diff options
author | Jelle Besseling <jelle@pingiun.com> | 2021-08-17 08:26:20 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-08-17 08:26:20 +0200 |
commit | 0305db28c98fd6cf43a3c50ba92c76215e99d512 (patch) | |
tree | 33b753a19728d9f453c1aa4f19b36ac797e5fe80 /server/tests | |
parent | f88ae8f5bc223579313b28582de9101944a4a814 (diff) | |
download | PeerTube-0305db28c98fd6cf43a3c50ba92c76215e99d512.tar.gz PeerTube-0305db28c98fd6cf43a3c50ba92c76215e99d512.tar.zst PeerTube-0305db28c98fd6cf43a3c50ba92c76215e99d512.zip |
Add support for saving video files to object storage (#4290)
* Add support for saving video files to object storage
* Add support for custom url generation on s3 stored files
Uses two config keys to support url generation that doesn't directly go
to (compatible s3). Can be used to generate urls to any cache server or
CDN.
* Upload files to s3 concurrently and delete originals afterwards
* Only publish after move to object storage is complete
* Use base url instead of url template
* Fix mistyped config field
* Add rudenmentary way to download before transcode
* Implement Chocobozzz suggestions
https://github.com/Chocobozzz/PeerTube/pull/4290#issuecomment-891670478
The remarks in question:
Try to use objectStorage prefix instead of s3 prefix for your function/variables/config names
Prefer to use a tree for the config: s3.streaming_playlists_bucket -> object_storage.streaming_playlists.bucket
Use uppercase for config: S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket -> OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET (maybe BUCKET_NAME instead of BUCKET)
I suggest to rename moveJobsRunning to pendingMovingJobs (or better, create a dedicated videoJobInfo table with a pendingMove & videoId columns so we could also use this table to track pending transcoding jobs)
https://github.com/Chocobozzz/PeerTube/pull/4290/files#diff-3e26d41ca4bda1de8e1747af70ca2af642abcc1e9e0bfb94239ff2165acfbde5R19 uses a string instead of an integer
I think we should store the origin object storage URL in fileUrl, without base_url injection. Instead, inject the base_url at "runtime" so admins can easily change this configuration without running a script to update DB URLs
* Import correct function
* Support multipart upload
* Remove import of node 15.0 module stream/promises
* Extend maximum upload job length
Using the same value as for redundancy downloading seems logical
* Use dynamic part size for really large uploads
Also adds very small part size for local testing
* Fix decreasePendingMove query
* Resolve various PR comments
* Move to object storage after optimize
* Make upload size configurable and increase default
* Prune webtorrent files that are stored in object storage
* Move files after transcoding jobs
* Fix federation
* Add video path manager
* Support move to external storage job in client
* Fix live object storage tests
Co-authored-by: Chocobozzz <me@florianbigard.com>
Diffstat (limited to 'server/tests')
-rw-r--r-- | server/tests/api/index.ts | 1 | ||||
-rw-r--r-- | server/tests/api/live/live-save-replay.ts | 30 | ||||
-rw-r--r-- | server/tests/api/object-storage/index.ts | 3 | ||||
-rw-r--r-- | server/tests/api/object-storage/live.ts | 136 | ||||
-rw-r--r-- | server/tests/api/object-storage/video-imports.ts | 112 | ||||
-rw-r--r-- | server/tests/api/object-storage/videos.ts | 391 | ||||
-rw-r--r-- | server/tests/api/redundancy/redundancy.ts | 6 | ||||
-rw-r--r-- | server/tests/api/videos/video-hls.ts | 73 | ||||
-rw-r--r-- | server/tests/cli/create-import-video-file-job.ts | 56 | ||||
-rw-r--r-- | server/tests/cli/create-transcoding-job.ts | 95 | ||||
-rw-r--r-- | server/tests/helpers/request.ts | 8 |
11 files changed, 832 insertions, 79 deletions
diff --git a/server/tests/api/index.ts b/server/tests/api/index.ts index b62e2f5f7..19301c0b9 100644 --- a/server/tests/api/index.ts +++ b/server/tests/api/index.ts | |||
@@ -2,6 +2,7 @@ | |||
2 | import './activitypub' | 2 | import './activitypub' |
3 | import './check-params' | 3 | import './check-params' |
4 | import './moderation' | 4 | import './moderation' |
5 | import './object-storage' | ||
5 | import './notifications' | 6 | import './notifications' |
6 | import './redundancy' | 7 | import './redundancy' |
7 | import './search' | 8 | import './search' |
diff --git a/server/tests/api/live/live-save-replay.ts b/server/tests/api/live/live-save-replay.ts index 8f1fb78a5..6c4ea90ca 100644 --- a/server/tests/api/live/live-save-replay.ts +++ b/server/tests/api/live/live-save-replay.ts | |||
@@ -15,7 +15,9 @@ import { | |||
15 | stopFfmpeg, | 15 | stopFfmpeg, |
16 | testFfmpegStreamError, | 16 | testFfmpegStreamError, |
17 | wait, | 17 | wait, |
18 | waitJobs | 18 | waitJobs, |
19 | waitUntilLivePublishedOnAllServers, | ||
20 | waitUntilLiveSavedOnAllServers | ||
19 | } from '@shared/extra-utils' | 21 | } from '@shared/extra-utils' |
20 | import { HttpStatusCode, LiveVideoCreate, VideoPrivacy, VideoState } from '@shared/models' | 22 | import { HttpStatusCode, LiveVideoCreate, VideoPrivacy, VideoState } from '@shared/models' |
21 | 23 | ||
@@ -66,18 +68,6 @@ describe('Save replay setting', function () { | |||
66 | } | 68 | } |
67 | } | 69 | } |
68 | 70 | ||
69 | async function waitUntilLivePublishedOnAllServers (videoId: string) { | ||
70 | for (const server of servers) { | ||
71 | await server.live.waitUntilPublished({ videoId }) | ||
72 | } | ||
73 | } | ||
74 | |||
75 | async function waitUntilLiveSavedOnAllServers (videoId: string) { | ||
76 | for (const server of servers) { | ||
77 | await server.live.waitUntilSaved({ videoId }) | ||
78 | } | ||
79 | } | ||
80 | |||
81 | before(async function () { | 71 | before(async function () { |
82 | this.timeout(120000) | 72 | this.timeout(120000) |
83 | 73 | ||
@@ -127,7 +117,7 @@ describe('Save replay setting', function () { | |||
127 | 117 | ||
128 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) | 118 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) |
129 | 119 | ||
130 | await waitUntilLivePublishedOnAllServers(liveVideoUUID) | 120 | await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) |
131 | 121 | ||
132 | await waitJobs(servers) | 122 | await waitJobs(servers) |
133 | 123 | ||
@@ -160,7 +150,7 @@ describe('Save replay setting', function () { | |||
160 | 150 | ||
161 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) | 151 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) |
162 | 152 | ||
163 | await waitUntilLivePublishedOnAllServers(liveVideoUUID) | 153 | await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) |
164 | 154 | ||
165 | await waitJobs(servers) | 155 | await waitJobs(servers) |
166 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) | 156 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) |
@@ -189,7 +179,7 @@ describe('Save replay setting', function () { | |||
189 | 179 | ||
190 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) | 180 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) |
191 | 181 | ||
192 | await waitUntilLivePublishedOnAllServers(liveVideoUUID) | 182 | await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) |
193 | 183 | ||
194 | await waitJobs(servers) | 184 | await waitJobs(servers) |
195 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) | 185 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) |
@@ -224,7 +214,7 @@ describe('Save replay setting', function () { | |||
224 | this.timeout(20000) | 214 | this.timeout(20000) |
225 | 215 | ||
226 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) | 216 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) |
227 | await waitUntilLivePublishedOnAllServers(liveVideoUUID) | 217 | await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) |
228 | 218 | ||
229 | await waitJobs(servers) | 219 | await waitJobs(servers) |
230 | 220 | ||
@@ -237,7 +227,7 @@ describe('Save replay setting', function () { | |||
237 | 227 | ||
238 | await stopFfmpeg(ffmpegCommand) | 228 | await stopFfmpeg(ffmpegCommand) |
239 | 229 | ||
240 | await waitUntilLiveSavedOnAllServers(liveVideoUUID) | 230 | await waitUntilLiveSavedOnAllServers(servers, liveVideoUUID) |
241 | await waitJobs(servers) | 231 | await waitJobs(servers) |
242 | 232 | ||
243 | // Live has been transcoded | 233 | // Live has been transcoded |
@@ -268,7 +258,7 @@ describe('Save replay setting', function () { | |||
268 | liveVideoUUID = await createLiveWrapper(true) | 258 | liveVideoUUID = await createLiveWrapper(true) |
269 | 259 | ||
270 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) | 260 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) |
271 | await waitUntilLivePublishedOnAllServers(liveVideoUUID) | 261 | await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) |
272 | 262 | ||
273 | await waitJobs(servers) | 263 | await waitJobs(servers) |
274 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) | 264 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) |
@@ -296,7 +286,7 @@ describe('Save replay setting', function () { | |||
296 | liveVideoUUID = await createLiveWrapper(true) | 286 | liveVideoUUID = await createLiveWrapper(true) |
297 | 287 | ||
298 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) | 288 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID }) |
299 | await waitUntilLivePublishedOnAllServers(liveVideoUUID) | 289 | await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID) |
300 | 290 | ||
301 | await waitJobs(servers) | 291 | await waitJobs(servers) |
302 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) | 292 | await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200) |
diff --git a/server/tests/api/object-storage/index.ts b/server/tests/api/object-storage/index.ts new file mode 100644 index 000000000..f319d6ef5 --- /dev/null +++ b/server/tests/api/object-storage/index.ts | |||
@@ -0,0 +1,3 @@ | |||
1 | export * from './live' | ||
2 | export * from './video-imports' | ||
3 | export * from './videos' | ||
diff --git a/server/tests/api/object-storage/live.ts b/server/tests/api/object-storage/live.ts new file mode 100644 index 000000000..d3e6777f2 --- /dev/null +++ b/server/tests/api/object-storage/live.ts | |||
@@ -0,0 +1,136 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import 'mocha' | ||
4 | import * as chai from 'chai' | ||
5 | import { FfmpegCommand } from 'fluent-ffmpeg' | ||
6 | import { | ||
7 | areObjectStorageTestsDisabled, | ||
8 | createMultipleServers, | ||
9 | doubleFollow, | ||
10 | expectStartWith, | ||
11 | killallServers, | ||
12 | makeRawRequest, | ||
13 | ObjectStorageCommand, | ||
14 | PeerTubeServer, | ||
15 | setAccessTokensToServers, | ||
16 | setDefaultVideoChannel, | ||
17 | stopFfmpeg, | ||
18 | waitJobs, | ||
19 | waitUntilLivePublishedOnAllServers, | ||
20 | waitUntilLiveSavedOnAllServers | ||
21 | } from '@shared/extra-utils' | ||
22 | import { HttpStatusCode, LiveVideoCreate, VideoFile, VideoPrivacy } from '@shared/models' | ||
23 | |||
24 | const expect = chai.expect | ||
25 | |||
26 | async function createLive (server: PeerTubeServer) { | ||
27 | const attributes: LiveVideoCreate = { | ||
28 | channelId: server.store.channel.id, | ||
29 | privacy: VideoPrivacy.PUBLIC, | ||
30 | name: 'my super live', | ||
31 | saveReplay: true | ||
32 | } | ||
33 | |||
34 | const { uuid } = await server.live.create({ fields: attributes }) | ||
35 | |||
36 | return uuid | ||
37 | } | ||
38 | |||
39 | async function checkFiles (files: VideoFile[]) { | ||
40 | for (const file of files) { | ||
41 | expectStartWith(file.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl()) | ||
42 | |||
43 | await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) | ||
44 | } | ||
45 | } | ||
46 | |||
47 | describe('Object storage for lives', function () { | ||
48 | if (areObjectStorageTestsDisabled()) return | ||
49 | |||
50 | let ffmpegCommand: FfmpegCommand | ||
51 | let servers: PeerTubeServer[] | ||
52 | let videoUUID: string | ||
53 | |||
54 | before(async function () { | ||
55 | this.timeout(120000) | ||
56 | |||
57 | await ObjectStorageCommand.prepareDefaultBuckets() | ||
58 | |||
59 | servers = await createMultipleServers(2, ObjectStorageCommand.getDefaultConfig()) | ||
60 | |||
61 | await setAccessTokensToServers(servers) | ||
62 | await setDefaultVideoChannel(servers) | ||
63 | await doubleFollow(servers[0], servers[1]) | ||
64 | |||
65 | await servers[0].config.enableTranscoding() | ||
66 | }) | ||
67 | |||
68 | describe('Without live transcoding', async function () { | ||
69 | |||
70 | before(async function () { | ||
71 | await servers[0].config.enableLive({ transcoding: false }) | ||
72 | |||
73 | videoUUID = await createLive(servers[0]) | ||
74 | }) | ||
75 | |||
76 | it('Should create a live and save the replay on object storage', async function () { | ||
77 | this.timeout(220000) | ||
78 | |||
79 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID }) | ||
80 | await waitUntilLivePublishedOnAllServers(servers, videoUUID) | ||
81 | |||
82 | await stopFfmpeg(ffmpegCommand) | ||
83 | |||
84 | await waitUntilLiveSavedOnAllServers(servers, videoUUID) | ||
85 | await waitJobs(servers) | ||
86 | |||
87 | for (const server of servers) { | ||
88 | const video = await server.videos.get({ id: videoUUID }) | ||
89 | |||
90 | expect(video.files).to.have.lengthOf(0) | ||
91 | expect(video.streamingPlaylists).to.have.lengthOf(1) | ||
92 | |||
93 | const files = video.streamingPlaylists[0].files | ||
94 | |||
95 | await checkFiles(files) | ||
96 | } | ||
97 | }) | ||
98 | }) | ||
99 | |||
100 | describe('With live transcoding', async function () { | ||
101 | |||
102 | before(async function () { | ||
103 | await servers[0].config.enableLive({ transcoding: true }) | ||
104 | |||
105 | videoUUID = await createLive(servers[0]) | ||
106 | }) | ||
107 | |||
108 | it('Should import a video and have sent it to object storage', async function () { | ||
109 | this.timeout(240000) | ||
110 | |||
111 | ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID }) | ||
112 | await waitUntilLivePublishedOnAllServers(servers, videoUUID) | ||
113 | |||
114 | await stopFfmpeg(ffmpegCommand) | ||
115 | |||
116 | await waitUntilLiveSavedOnAllServers(servers, videoUUID) | ||
117 | await waitJobs(servers) | ||
118 | |||
119 | for (const server of servers) { | ||
120 | const video = await server.videos.get({ id: videoUUID }) | ||
121 | |||
122 | expect(video.files).to.have.lengthOf(0) | ||
123 | expect(video.streamingPlaylists).to.have.lengthOf(1) | ||
124 | |||
125 | const files = video.streamingPlaylists[0].files | ||
126 | expect(files).to.have.lengthOf(4) | ||
127 | |||
128 | await checkFiles(files) | ||
129 | } | ||
130 | }) | ||
131 | }) | ||
132 | |||
133 | after(async function () { | ||
134 | await killallServers(servers) | ||
135 | }) | ||
136 | }) | ||
diff --git a/server/tests/api/object-storage/video-imports.ts b/server/tests/api/object-storage/video-imports.ts new file mode 100644 index 000000000..efc01f550 --- /dev/null +++ b/server/tests/api/object-storage/video-imports.ts | |||
@@ -0,0 +1,112 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import 'mocha' | ||
4 | import * as chai from 'chai' | ||
5 | import { | ||
6 | areObjectStorageTestsDisabled, | ||
7 | createSingleServer, | ||
8 | expectStartWith, | ||
9 | FIXTURE_URLS, | ||
10 | killallServers, | ||
11 | makeRawRequest, | ||
12 | ObjectStorageCommand, | ||
13 | PeerTubeServer, | ||
14 | setAccessTokensToServers, | ||
15 | setDefaultVideoChannel, | ||
16 | waitJobs | ||
17 | } from '@shared/extra-utils' | ||
18 | import { HttpStatusCode, VideoPrivacy } from '@shared/models' | ||
19 | |||
20 | const expect = chai.expect | ||
21 | |||
22 | async function importVideo (server: PeerTubeServer) { | ||
23 | const attributes = { | ||
24 | name: 'import 2', | ||
25 | privacy: VideoPrivacy.PUBLIC, | ||
26 | channelId: server.store.channel.id, | ||
27 | targetUrl: FIXTURE_URLS.goodVideo720 | ||
28 | } | ||
29 | |||
30 | const { video: { uuid } } = await server.imports.importVideo({ attributes }) | ||
31 | |||
32 | return uuid | ||
33 | } | ||
34 | |||
35 | describe('Object storage for video import', function () { | ||
36 | if (areObjectStorageTestsDisabled()) return | ||
37 | |||
38 | let server: PeerTubeServer | ||
39 | |||
40 | before(async function () { | ||
41 | this.timeout(120000) | ||
42 | |||
43 | await ObjectStorageCommand.prepareDefaultBuckets() | ||
44 | |||
45 | server = await createSingleServer(1, ObjectStorageCommand.getDefaultConfig()) | ||
46 | |||
47 | await setAccessTokensToServers([ server ]) | ||
48 | await setDefaultVideoChannel([ server ]) | ||
49 | |||
50 | await server.config.enableImports() | ||
51 | }) | ||
52 | |||
53 | describe('Without transcoding', async function () { | ||
54 | |||
55 | before(async function () { | ||
56 | await server.config.disableTranscoding() | ||
57 | }) | ||
58 | |||
59 | it('Should import a video and have sent it to object storage', async function () { | ||
60 | this.timeout(120000) | ||
61 | |||
62 | const uuid = await importVideo(server) | ||
63 | await waitJobs(server) | ||
64 | |||
65 | const video = await server.videos.get({ id: uuid }) | ||
66 | |||
67 | expect(video.files).to.have.lengthOf(1) | ||
68 | expect(video.streamingPlaylists).to.have.lengthOf(0) | ||
69 | |||
70 | const fileUrl = video.files[0].fileUrl | ||
71 | expectStartWith(fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) | ||
72 | |||
73 | await makeRawRequest(fileUrl, HttpStatusCode.OK_200) | ||
74 | }) | ||
75 | }) | ||
76 | |||
77 | describe('With transcoding', async function () { | ||
78 | |||
79 | before(async function () { | ||
80 | await server.config.enableTranscoding() | ||
81 | }) | ||
82 | |||
83 | it('Should import a video and have sent it to object storage', async function () { | ||
84 | this.timeout(120000) | ||
85 | |||
86 | const uuid = await importVideo(server) | ||
87 | await waitJobs(server) | ||
88 | |||
89 | const video = await server.videos.get({ id: uuid }) | ||
90 | |||
91 | expect(video.files).to.have.lengthOf(4) | ||
92 | expect(video.streamingPlaylists).to.have.lengthOf(1) | ||
93 | expect(video.streamingPlaylists[0].files).to.have.lengthOf(4) | ||
94 | |||
95 | for (const file of video.files) { | ||
96 | expectStartWith(file.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) | ||
97 | |||
98 | await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) | ||
99 | } | ||
100 | |||
101 | for (const file of video.streamingPlaylists[0].files) { | ||
102 | expectStartWith(file.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl()) | ||
103 | |||
104 | await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) | ||
105 | } | ||
106 | }) | ||
107 | }) | ||
108 | |||
109 | after(async function () { | ||
110 | await killallServers([ server ]) | ||
111 | }) | ||
112 | }) | ||
diff --git a/server/tests/api/object-storage/videos.ts b/server/tests/api/object-storage/videos.ts new file mode 100644 index 000000000..3958bd3d7 --- /dev/null +++ b/server/tests/api/object-storage/videos.ts | |||
@@ -0,0 +1,391 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import 'mocha' | ||
4 | import * as chai from 'chai' | ||
5 | import { merge } from 'lodash' | ||
6 | import { | ||
7 | areObjectStorageTestsDisabled, | ||
8 | checkTmpIsEmpty, | ||
9 | cleanupTests, | ||
10 | createMultipleServers, | ||
11 | createSingleServer, | ||
12 | doubleFollow, | ||
13 | expectStartWith, | ||
14 | killallServers, | ||
15 | makeRawRequest, | ||
16 | MockObjectStorage, | ||
17 | ObjectStorageCommand, | ||
18 | PeerTubeServer, | ||
19 | setAccessTokensToServers, | ||
20 | waitJobs, | ||
21 | webtorrentAdd | ||
22 | } from '@shared/extra-utils' | ||
23 | import { HttpStatusCode, VideoDetails } from '@shared/models' | ||
24 | |||
25 | const expect = chai.expect | ||
26 | |||
27 | async function checkFiles (options: { | ||
28 | video: VideoDetails | ||
29 | |||
30 | baseMockUrl?: string | ||
31 | |||
32 | playlistBucket: string | ||
33 | playlistPrefix?: string | ||
34 | |||
35 | webtorrentBucket: string | ||
36 | webtorrentPrefix?: string | ||
37 | }) { | ||
38 | const { | ||
39 | video, | ||
40 | playlistBucket, | ||
41 | webtorrentBucket, | ||
42 | baseMockUrl, | ||
43 | playlistPrefix, | ||
44 | webtorrentPrefix | ||
45 | } = options | ||
46 | |||
47 | let allFiles = video.files | ||
48 | |||
49 | for (const file of video.files) { | ||
50 | const baseUrl = baseMockUrl | ||
51 | ? `${baseMockUrl}/${webtorrentBucket}/` | ||
52 | : `http://${webtorrentBucket}.${ObjectStorageCommand.getEndpointHost()}/` | ||
53 | |||
54 | const prefix = webtorrentPrefix || '' | ||
55 | const start = baseUrl + prefix | ||
56 | |||
57 | expectStartWith(file.fileUrl, start) | ||
58 | |||
59 | const res = await makeRawRequest(file.fileDownloadUrl, HttpStatusCode.FOUND_302) | ||
60 | const location = res.headers['location'] | ||
61 | expectStartWith(location, start) | ||
62 | |||
63 | await makeRawRequest(location, HttpStatusCode.OK_200) | ||
64 | } | ||
65 | |||
66 | const hls = video.streamingPlaylists[0] | ||
67 | |||
68 | if (hls) { | ||
69 | allFiles = allFiles.concat(hls.files) | ||
70 | |||
71 | const baseUrl = baseMockUrl | ||
72 | ? `${baseMockUrl}/${playlistBucket}/` | ||
73 | : `http://${playlistBucket}.${ObjectStorageCommand.getEndpointHost()}/` | ||
74 | |||
75 | const prefix = playlistPrefix || '' | ||
76 | const start = baseUrl + prefix | ||
77 | |||
78 | expectStartWith(hls.playlistUrl, start) | ||
79 | expectStartWith(hls.segmentsSha256Url, start) | ||
80 | |||
81 | await makeRawRequest(hls.playlistUrl, HttpStatusCode.OK_200) | ||
82 | |||
83 | const resSha = await makeRawRequest(hls.segmentsSha256Url, HttpStatusCode.OK_200) | ||
84 | expect(JSON.stringify(resSha.body)).to.not.throw | ||
85 | |||
86 | for (const file of hls.files) { | ||
87 | expectStartWith(file.fileUrl, start) | ||
88 | |||
89 | const res = await makeRawRequest(file.fileDownloadUrl, HttpStatusCode.FOUND_302) | ||
90 | const location = res.headers['location'] | ||
91 | expectStartWith(location, start) | ||
92 | |||
93 | await makeRawRequest(location, HttpStatusCode.OK_200) | ||
94 | } | ||
95 | } | ||
96 | |||
97 | for (const file of allFiles) { | ||
98 | const torrent = await webtorrentAdd(file.magnetUri, true) | ||
99 | |||
100 | expect(torrent.files).to.be.an('array') | ||
101 | expect(torrent.files.length).to.equal(1) | ||
102 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
103 | |||
104 | const res = await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) | ||
105 | expect(res.body).to.have.length.above(100) | ||
106 | } | ||
107 | |||
108 | return allFiles.map(f => f.fileUrl) | ||
109 | } | ||
110 | |||
111 | function runTestSuite (options: { | ||
112 | playlistBucket: string | ||
113 | playlistPrefix?: string | ||
114 | |||
115 | webtorrentBucket: string | ||
116 | webtorrentPrefix?: string | ||
117 | |||
118 | useMockBaseUrl?: boolean | ||
119 | |||
120 | maxUploadPart?: string | ||
121 | }) { | ||
122 | const mockObjectStorage = new MockObjectStorage() | ||
123 | let baseMockUrl: string | ||
124 | |||
125 | let servers: PeerTubeServer[] | ||
126 | |||
127 | let keptUrls: string[] = [] | ||
128 | |||
129 | const uuidsToDelete: string[] = [] | ||
130 | let deletedUrls: string[] = [] | ||
131 | |||
132 | before(async function () { | ||
133 | this.timeout(120000) | ||
134 | |||
135 | const port = await mockObjectStorage.initialize() | ||
136 | baseMockUrl = options.useMockBaseUrl ? `http://localhost:${port}` : undefined | ||
137 | |||
138 | await ObjectStorageCommand.createBucket(options.playlistBucket) | ||
139 | await ObjectStorageCommand.createBucket(options.webtorrentBucket) | ||
140 | |||
141 | const config = { | ||
142 | object_storage: { | ||
143 | enabled: true, | ||
144 | endpoint: 'http://' + ObjectStorageCommand.getEndpointHost(), | ||
145 | region: ObjectStorageCommand.getRegion(), | ||
146 | |||
147 | credentials: ObjectStorageCommand.getCredentialsConfig(), | ||
148 | |||
149 | max_upload_part: options.maxUploadPart || '2MB', | ||
150 | |||
151 | streaming_playlists: { | ||
152 | bucket_name: options.playlistBucket, | ||
153 | prefix: options.playlistPrefix, | ||
154 | base_url: baseMockUrl | ||
155 | ? `${baseMockUrl}/${options.playlistBucket}` | ||
156 | : undefined | ||
157 | }, | ||
158 | |||
159 | videos: { | ||
160 | bucket_name: options.webtorrentBucket, | ||
161 | prefix: options.webtorrentPrefix, | ||
162 | base_url: baseMockUrl | ||
163 | ? `${baseMockUrl}/${options.webtorrentBucket}` | ||
164 | : undefined | ||
165 | } | ||
166 | } | ||
167 | } | ||
168 | |||
169 | servers = await createMultipleServers(2, config) | ||
170 | |||
171 | await setAccessTokensToServers(servers) | ||
172 | await doubleFollow(servers[0], servers[1]) | ||
173 | |||
174 | for (const server of servers) { | ||
175 | const { uuid } = await server.videos.quickUpload({ name: 'video to keep' }) | ||
176 | await waitJobs(servers) | ||
177 | |||
178 | const files = await server.videos.listFiles({ id: uuid }) | ||
179 | keptUrls = keptUrls.concat(files.map(f => f.fileUrl)) | ||
180 | } | ||
181 | }) | ||
182 | |||
183 | it('Should upload a video and move it to the object storage without transcoding', async function () { | ||
184 | this.timeout(20000) | ||
185 | |||
186 | const { uuid } = await servers[0].videos.quickUpload({ name: 'video 1' }) | ||
187 | uuidsToDelete.push(uuid) | ||
188 | |||
189 | await waitJobs(servers) | ||
190 | |||
191 | for (const server of servers) { | ||
192 | const video = await server.videos.get({ id: uuid }) | ||
193 | const files = await checkFiles({ ...options, video, baseMockUrl }) | ||
194 | |||
195 | deletedUrls = deletedUrls.concat(files) | ||
196 | } | ||
197 | }) | ||
198 | |||
199 | it('Should upload a video and move it to the object storage with transcoding', async function () { | ||
200 | this.timeout(40000) | ||
201 | |||
202 | const { uuid } = await servers[1].videos.quickUpload({ name: 'video 2' }) | ||
203 | uuidsToDelete.push(uuid) | ||
204 | |||
205 | await waitJobs(servers) | ||
206 | |||
207 | for (const server of servers) { | ||
208 | const video = await server.videos.get({ id: uuid }) | ||
209 | const files = await checkFiles({ ...options, video, baseMockUrl }) | ||
210 | |||
211 | deletedUrls = deletedUrls.concat(files) | ||
212 | } | ||
213 | }) | ||
214 | |||
215 | it('Should correctly delete the files', async function () { | ||
216 | await servers[0].videos.remove({ id: uuidsToDelete[0] }) | ||
217 | await servers[1].videos.remove({ id: uuidsToDelete[1] }) | ||
218 | |||
219 | await waitJobs(servers) | ||
220 | |||
221 | for (const url of deletedUrls) { | ||
222 | await makeRawRequest(url, HttpStatusCode.NOT_FOUND_404) | ||
223 | } | ||
224 | }) | ||
225 | |||
226 | it('Should have kept other files', async function () { | ||
227 | for (const url of keptUrls) { | ||
228 | await makeRawRequest(url, HttpStatusCode.OK_200) | ||
229 | } | ||
230 | }) | ||
231 | |||
232 | it('Should have an empty tmp directory', async function () { | ||
233 | for (const server of servers) { | ||
234 | await checkTmpIsEmpty(server) | ||
235 | } | ||
236 | }) | ||
237 | |||
238 | after(async function () { | ||
239 | mockObjectStorage.terminate() | ||
240 | |||
241 | await cleanupTests(servers) | ||
242 | }) | ||
243 | } | ||
244 | |||
245 | describe('Object storage for videos', function () { | ||
246 | if (areObjectStorageTestsDisabled()) return | ||
247 | |||
248 | describe('Test config', function () { | ||
249 | let server: PeerTubeServer | ||
250 | |||
251 | const baseConfig = { | ||
252 | object_storage: { | ||
253 | enabled: true, | ||
254 | endpoint: 'http://' + ObjectStorageCommand.getEndpointHost(), | ||
255 | region: ObjectStorageCommand.getRegion(), | ||
256 | |||
257 | credentials: ObjectStorageCommand.getCredentialsConfig(), | ||
258 | |||
259 | streaming_playlists: { | ||
260 | bucket_name: ObjectStorageCommand.DEFAULT_PLAYLIST_BUCKET | ||
261 | }, | ||
262 | |||
263 | videos: { | ||
264 | bucket_name: ObjectStorageCommand.DEFAULT_WEBTORRENT_BUCKET | ||
265 | } | ||
266 | } | ||
267 | } | ||
268 | |||
269 | const badCredentials = { | ||
270 | access_key_id: 'AKIAIOSFODNN7EXAMPLE', | ||
271 | secret_access_key: 'aJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' | ||
272 | } | ||
273 | |||
274 | it('Should fail with same bucket names without prefix', function (done) { | ||
275 | const config = merge({}, baseConfig, { | ||
276 | object_storage: { | ||
277 | streaming_playlists: { | ||
278 | bucket_name: 'aaa' | ||
279 | }, | ||
280 | |||
281 | videos: { | ||
282 | bucket_name: 'aaa' | ||
283 | } | ||
284 | } | ||
285 | }) | ||
286 | |||
287 | createSingleServer(1, config) | ||
288 | .then(() => done(new Error('Did not throw'))) | ||
289 | .catch(() => done()) | ||
290 | }) | ||
291 | |||
292 | it('Should fail with bad credentials', async function () { | ||
293 | this.timeout(60000) | ||
294 | |||
295 | await ObjectStorageCommand.prepareDefaultBuckets() | ||
296 | |||
297 | const config = merge({}, baseConfig, { | ||
298 | object_storage: { | ||
299 | credentials: badCredentials | ||
300 | } | ||
301 | }) | ||
302 | |||
303 | server = await createSingleServer(1, config) | ||
304 | await setAccessTokensToServers([ server ]) | ||
305 | |||
306 | const { uuid } = await server.videos.quickUpload({ name: 'video' }) | ||
307 | |||
308 | await waitJobs([ server ], true) | ||
309 | const video = await server.videos.get({ id: uuid }) | ||
310 | |||
311 | expectStartWith(video.files[0].fileUrl, server.url) | ||
312 | |||
313 | await killallServers([ server ]) | ||
314 | }) | ||
315 | |||
316 | it('Should succeed with credentials from env', async function () { | ||
317 | this.timeout(60000) | ||
318 | |||
319 | await ObjectStorageCommand.prepareDefaultBuckets() | ||
320 | |||
321 | const config = merge({}, baseConfig, { | ||
322 | object_storage: { | ||
323 | credentials: { | ||
324 | access_key_id: '', | ||
325 | secret_access_key: '' | ||
326 | } | ||
327 | } | ||
328 | }) | ||
329 | |||
330 | const goodCredentials = ObjectStorageCommand.getCredentialsConfig() | ||
331 | |||
332 | server = await createSingleServer(1, config, { | ||
333 | env: { | ||
334 | AWS_ACCESS_KEY_ID: goodCredentials.access_key_id, | ||
335 | AWS_SECRET_ACCESS_KEY: goodCredentials.secret_access_key | ||
336 | } | ||
337 | }) | ||
338 | |||
339 | await setAccessTokensToServers([ server ]) | ||
340 | |||
341 | const { uuid } = await server.videos.quickUpload({ name: 'video' }) | ||
342 | |||
343 | await waitJobs([ server ], true) | ||
344 | const video = await server.videos.get({ id: uuid }) | ||
345 | |||
346 | expectStartWith(video.files[0].fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) | ||
347 | }) | ||
348 | |||
349 | after(async function () { | ||
350 | await killallServers([ server ]) | ||
351 | }) | ||
352 | }) | ||
353 | |||
354 | describe('Test simple object storage', function () { | ||
355 | runTestSuite({ | ||
356 | playlistBucket: 'streaming-playlists', | ||
357 | webtorrentBucket: 'videos' | ||
358 | }) | ||
359 | }) | ||
360 | |||
361 | describe('Test object storage with prefix', function () { | ||
362 | runTestSuite({ | ||
363 | playlistBucket: 'mybucket', | ||
364 | webtorrentBucket: 'mybucket', | ||
365 | |||
366 | playlistPrefix: 'streaming-playlists_', | ||
367 | webtorrentPrefix: 'webtorrent_' | ||
368 | }) | ||
369 | }) | ||
370 | |||
371 | describe('Test object storage with prefix and base URL', function () { | ||
372 | runTestSuite({ | ||
373 | playlistBucket: 'mybucket', | ||
374 | webtorrentBucket: 'mybucket', | ||
375 | |||
376 | playlistPrefix: 'streaming-playlists_', | ||
377 | webtorrentPrefix: 'webtorrent_', | ||
378 | |||
379 | useMockBaseUrl: true | ||
380 | }) | ||
381 | }) | ||
382 | |||
383 | describe('Test object storage with small upload part', function () { | ||
384 | runTestSuite({ | ||
385 | playlistBucket: 'streaming-playlists', | ||
386 | webtorrentBucket: 'videos', | ||
387 | |||
388 | maxUploadPart: '5KB' | ||
389 | }) | ||
390 | }) | ||
391 | }) | ||
diff --git a/server/tests/api/redundancy/redundancy.ts b/server/tests/api/redundancy/redundancy.ts index e1a12f5f8..3400b1d9a 100644 --- a/server/tests/api/redundancy/redundancy.ts +++ b/server/tests/api/redundancy/redundancy.ts | |||
@@ -207,14 +207,14 @@ async function check1PlaylistRedundancies (videoUUID?: string) { | |||
207 | expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID) | 207 | expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID) |
208 | } | 208 | } |
209 | 209 | ||
210 | const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls' | 210 | const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID |
211 | const baseUrlSegment = servers[0].url + '/static/redundancy/hls' | 211 | const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID |
212 | 212 | ||
213 | const video = await servers[0].videos.get({ id: videoUUID }) | 213 | const video = await servers[0].videos.get({ id: videoUUID }) |
214 | const hlsPlaylist = video.streamingPlaylists[0] | 214 | const hlsPlaylist = video.streamingPlaylists[0] |
215 | 215 | ||
216 | for (const resolution of [ 240, 360, 480, 720 ]) { | 216 | for (const resolution of [ 240, 360, 480, 720 ]) { |
217 | await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist }) | 217 | await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist }) |
218 | } | 218 | } |
219 | 219 | ||
220 | const { hlsFilenames } = await ensureSameFilenames(videoUUID) | 220 | const { hlsFilenames } = await ensureSameFilenames(videoUUID) |
diff --git a/server/tests/api/videos/video-hls.ts b/server/tests/api/videos/video-hls.ts index 961f0e617..2c829f532 100644 --- a/server/tests/api/videos/video-hls.ts +++ b/server/tests/api/videos/video-hls.ts | |||
@@ -5,6 +5,7 @@ import * as chai from 'chai' | |||
5 | import { basename, join } from 'path' | 5 | import { basename, join } from 'path' |
6 | import { removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils' | 6 | import { removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils' |
7 | import { | 7 | import { |
8 | areObjectStorageTestsDisabled, | ||
8 | checkDirectoryIsEmpty, | 9 | checkDirectoryIsEmpty, |
9 | checkResolutionsInMasterPlaylist, | 10 | checkResolutionsInMasterPlaylist, |
10 | checkSegmentHash, | 11 | checkSegmentHash, |
@@ -12,7 +13,9 @@ import { | |||
12 | cleanupTests, | 13 | cleanupTests, |
13 | createMultipleServers, | 14 | createMultipleServers, |
14 | doubleFollow, | 15 | doubleFollow, |
16 | expectStartWith, | ||
15 | makeRawRequest, | 17 | makeRawRequest, |
18 | ObjectStorageCommand, | ||
16 | PeerTubeServer, | 19 | PeerTubeServer, |
17 | setAccessTokensToServers, | 20 | setAccessTokensToServers, |
18 | waitJobs, | 21 | waitJobs, |
@@ -23,8 +26,19 @@ import { DEFAULT_AUDIO_RESOLUTION } from '../../../initializers/constants' | |||
23 | 26 | ||
24 | const expect = chai.expect | 27 | const expect = chai.expect |
25 | 28 | ||
26 | async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, hlsOnly: boolean, resolutions = [ 240, 360, 480, 720 ]) { | 29 | async function checkHlsPlaylist (options: { |
27 | for (const server of servers) { | 30 | servers: PeerTubeServer[] |
31 | videoUUID: string | ||
32 | hlsOnly: boolean | ||
33 | |||
34 | resolutions?: number[] | ||
35 | objectStorageBaseUrl: string | ||
36 | }) { | ||
37 | const { videoUUID, hlsOnly, objectStorageBaseUrl } = options | ||
38 | |||
39 | const resolutions = options.resolutions ?? [ 240, 360, 480, 720 ] | ||
40 | |||
41 | for (const server of options.servers) { | ||
28 | const videoDetails = await server.videos.get({ id: videoUUID }) | 42 | const videoDetails = await server.videos.get({ id: videoUUID }) |
29 | const baseUrl = `http://${videoDetails.account.host}` | 43 | const baseUrl = `http://${videoDetails.account.host}` |
30 | 44 | ||
@@ -48,9 +62,15 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h | |||
48 | expect(file.torrentUrl).to.match( | 62 | expect(file.torrentUrl).to.match( |
49 | new RegExp(`http://${server.host}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}-hls.torrent`) | 63 | new RegExp(`http://${server.host}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}-hls.torrent`) |
50 | ) | 64 | ) |
51 | expect(file.fileUrl).to.match( | 65 | |
52 | new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`) | 66 | if (objectStorageBaseUrl) { |
53 | ) | 67 | expectStartWith(file.fileUrl, objectStorageBaseUrl) |
68 | } else { | ||
69 | expect(file.fileUrl).to.match( | ||
70 | new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`) | ||
71 | ) | ||
72 | } | ||
73 | |||
54 | expect(file.resolution.label).to.equal(resolution + 'p') | 74 | expect(file.resolution.label).to.equal(resolution + 'p') |
55 | 75 | ||
56 | await makeRawRequest(file.torrentUrl, HttpStatusCode.OK_200) | 76 | await makeRawRequest(file.torrentUrl, HttpStatusCode.OK_200) |
@@ -80,9 +100,11 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h | |||
80 | const file = hlsFiles.find(f => f.resolution.id === resolution) | 100 | const file = hlsFiles.find(f => f.resolution.id === resolution) |
81 | const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8' | 101 | const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8' |
82 | 102 | ||
83 | const subPlaylist = await server.streamingPlaylists.get({ | 103 | const url = objectStorageBaseUrl |
84 | url: `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}` | 104 | ? `${objectStorageBaseUrl}hls_${videoUUID}/${playlistName}` |
85 | }) | 105 | : `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}` |
106 | |||
107 | const subPlaylist = await server.streamingPlaylists.get({ url }) | ||
86 | 108 | ||
87 | expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`)) | 109 | expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`)) |
88 | expect(subPlaylist).to.contain(basename(file.fileUrl)) | 110 | expect(subPlaylist).to.contain(basename(file.fileUrl)) |
@@ -90,14 +112,15 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h | |||
90 | } | 112 | } |
91 | 113 | ||
92 | { | 114 | { |
93 | const baseUrlAndPath = baseUrl + '/static/streaming-playlists/hls' | 115 | const baseUrlAndPath = objectStorageBaseUrl |
116 | ? objectStorageBaseUrl + 'hls_' + videoUUID | ||
117 | : baseUrl + '/static/streaming-playlists/hls/' + videoUUID | ||
94 | 118 | ||
95 | for (const resolution of resolutions) { | 119 | for (const resolution of resolutions) { |
96 | await checkSegmentHash({ | 120 | await checkSegmentHash({ |
97 | server, | 121 | server, |
98 | baseUrlPlaylist: baseUrlAndPath, | 122 | baseUrlPlaylist: baseUrlAndPath, |
99 | baseUrlSegment: baseUrlAndPath, | 123 | baseUrlSegment: baseUrlAndPath, |
100 | videoUUID, | ||
101 | resolution, | 124 | resolution, |
102 | hlsPlaylist | 125 | hlsPlaylist |
103 | }) | 126 | }) |
@@ -111,7 +134,7 @@ describe('Test HLS videos', function () { | |||
111 | let videoUUID = '' | 134 | let videoUUID = '' |
112 | let videoAudioUUID = '' | 135 | let videoAudioUUID = '' |
113 | 136 | ||
114 | function runTestSuite (hlsOnly: boolean) { | 137 | function runTestSuite (hlsOnly: boolean, objectStorageBaseUrl?: string) { |
115 | 138 | ||
116 | it('Should upload a video and transcode it to HLS', async function () { | 139 | it('Should upload a video and transcode it to HLS', async function () { |
117 | this.timeout(120000) | 140 | this.timeout(120000) |
@@ -121,7 +144,7 @@ describe('Test HLS videos', function () { | |||
121 | 144 | ||
122 | await waitJobs(servers) | 145 | await waitJobs(servers) |
123 | 146 | ||
124 | await checkHlsPlaylist(servers, videoUUID, hlsOnly) | 147 | await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl }) |
125 | }) | 148 | }) |
126 | 149 | ||
127 | it('Should upload an audio file and transcode it to HLS', async function () { | 150 | it('Should upload an audio file and transcode it to HLS', async function () { |
@@ -132,7 +155,13 @@ describe('Test HLS videos', function () { | |||
132 | 155 | ||
133 | await waitJobs(servers) | 156 | await waitJobs(servers) |
134 | 157 | ||
135 | await checkHlsPlaylist(servers, videoAudioUUID, hlsOnly, [ DEFAULT_AUDIO_RESOLUTION, 360, 240 ]) | 158 | await checkHlsPlaylist({ |
159 | servers, | ||
160 | videoUUID: videoAudioUUID, | ||
161 | hlsOnly, | ||
162 | resolutions: [ DEFAULT_AUDIO_RESOLUTION, 360, 240 ], | ||
163 | objectStorageBaseUrl | ||
164 | }) | ||
136 | }) | 165 | }) |
137 | 166 | ||
138 | it('Should update the video', async function () { | 167 | it('Should update the video', async function () { |
@@ -142,7 +171,7 @@ describe('Test HLS videos', function () { | |||
142 | 171 | ||
143 | await waitJobs(servers) | 172 | await waitJobs(servers) |
144 | 173 | ||
145 | await checkHlsPlaylist(servers, videoUUID, hlsOnly) | 174 | await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl }) |
146 | }) | 175 | }) |
147 | 176 | ||
148 | it('Should delete videos', async function () { | 177 | it('Should delete videos', async function () { |
@@ -229,6 +258,22 @@ describe('Test HLS videos', function () { | |||
229 | runTestSuite(true) | 258 | runTestSuite(true) |
230 | }) | 259 | }) |
231 | 260 | ||
261 | describe('With object storage enabled', function () { | ||
262 | if (areObjectStorageTestsDisabled()) return | ||
263 | |||
264 | before(async function () { | ||
265 | this.timeout(120000) | ||
266 | |||
267 | const configOverride = ObjectStorageCommand.getDefaultConfig() | ||
268 | await ObjectStorageCommand.prepareDefaultBuckets() | ||
269 | |||
270 | await servers[0].kill() | ||
271 | await servers[0].run(configOverride) | ||
272 | }) | ||
273 | |||
274 | runTestSuite(true, ObjectStorageCommand.getPlaylistBaseUrl()) | ||
275 | }) | ||
276 | |||
232 | after(async function () { | 277 | after(async function () { |
233 | await cleanupTests(servers) | 278 | await cleanupTests(servers) |
234 | }) | 279 | }) |
diff --git a/server/tests/cli/create-import-video-file-job.ts b/server/tests/cli/create-import-video-file-job.ts index bddcff5e7..9f1b57a2e 100644 --- a/server/tests/cli/create-import-video-file-job.ts +++ b/server/tests/cli/create-import-video-file-job.ts | |||
@@ -2,8 +2,19 @@ | |||
2 | 2 | ||
3 | import 'mocha' | 3 | import 'mocha' |
4 | import * as chai from 'chai' | 4 | import * as chai from 'chai' |
5 | import { cleanupTests, createMultipleServers, doubleFollow, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/extra-utils' | 5 | import { |
6 | import { VideoFile } from '@shared/models' | 6 | areObjectStorageTestsDisabled, |
7 | cleanupTests, | ||
8 | createMultipleServers, | ||
9 | doubleFollow, | ||
10 | expectStartWith, | ||
11 | makeRawRequest, | ||
12 | ObjectStorageCommand, | ||
13 | PeerTubeServer, | ||
14 | setAccessTokensToServers, | ||
15 | waitJobs | ||
16 | } from '@shared/extra-utils' | ||
17 | import { HttpStatusCode, VideoDetails, VideoFile } from '@shared/models' | ||
7 | 18 | ||
8 | const expect = chai.expect | 19 | const expect = chai.expect |
9 | 20 | ||
@@ -17,22 +28,35 @@ function assertVideoProperties (video: VideoFile, resolution: number, extname: s | |||
17 | if (size) expect(video.size).to.equal(size) | 28 | if (size) expect(video.size).to.equal(size) |
18 | } | 29 | } |
19 | 30 | ||
20 | describe('Test create import video jobs', function () { | 31 | async function checkFiles (video: VideoDetails, objectStorage: boolean) { |
21 | this.timeout(60000) | 32 | for (const file of video.files) { |
33 | if (objectStorage) expectStartWith(file.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl()) | ||
22 | 34 | ||
23 | let servers: PeerTubeServer[] = [] | 35 | await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) |
36 | } | ||
37 | } | ||
38 | |||
39 | function runTests (objectStorage: boolean) { | ||
24 | let video1UUID: string | 40 | let video1UUID: string |
25 | let video2UUID: string | 41 | let video2UUID: string |
26 | 42 | ||
43 | let servers: PeerTubeServer[] = [] | ||
44 | |||
27 | before(async function () { | 45 | before(async function () { |
28 | this.timeout(90000) | 46 | this.timeout(90000) |
29 | 47 | ||
48 | const config = objectStorage | ||
49 | ? ObjectStorageCommand.getDefaultConfig() | ||
50 | : {} | ||
51 | |||
30 | // Run server 2 to have transcoding enabled | 52 | // Run server 2 to have transcoding enabled |
31 | servers = await createMultipleServers(2) | 53 | servers = await createMultipleServers(2, config) |
32 | await setAccessTokensToServers(servers) | 54 | await setAccessTokensToServers(servers) |
33 | 55 | ||
34 | await doubleFollow(servers[0], servers[1]) | 56 | await doubleFollow(servers[0], servers[1]) |
35 | 57 | ||
58 | if (objectStorage) await ObjectStorageCommand.prepareDefaultBuckets() | ||
59 | |||
36 | // Upload two videos for our needs | 60 | // Upload two videos for our needs |
37 | { | 61 | { |
38 | const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video1' } }) | 62 | const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video1' } }) |
@@ -44,7 +68,6 @@ describe('Test create import video jobs', function () { | |||
44 | video2UUID = uuid | 68 | video2UUID = uuid |
45 | } | 69 | } |
46 | 70 | ||
47 | // Transcoding | ||
48 | await waitJobs(servers) | 71 | await waitJobs(servers) |
49 | }) | 72 | }) |
50 | 73 | ||
@@ -65,6 +88,8 @@ describe('Test create import video jobs', function () { | |||
65 | const [ originalVideo, transcodedVideo ] = videoDetails.files | 88 | const [ originalVideo, transcodedVideo ] = videoDetails.files |
66 | assertVideoProperties(originalVideo, 720, 'webm', 218910) | 89 | assertVideoProperties(originalVideo, 720, 'webm', 218910) |
67 | assertVideoProperties(transcodedVideo, 480, 'webm', 69217) | 90 | assertVideoProperties(transcodedVideo, 480, 'webm', 69217) |
91 | |||
92 | await checkFiles(videoDetails, objectStorage) | ||
68 | } | 93 | } |
69 | }) | 94 | }) |
70 | 95 | ||
@@ -87,6 +112,8 @@ describe('Test create import video jobs', function () { | |||
87 | assertVideoProperties(transcodedVideo420, 480, 'mp4') | 112 | assertVideoProperties(transcodedVideo420, 480, 'mp4') |
88 | assertVideoProperties(transcodedVideo320, 360, 'mp4') | 113 | assertVideoProperties(transcodedVideo320, 360, 'mp4') |
89 | assertVideoProperties(transcodedVideo240, 240, 'mp4') | 114 | assertVideoProperties(transcodedVideo240, 240, 'mp4') |
115 | |||
116 | await checkFiles(videoDetails, objectStorage) | ||
90 | } | 117 | } |
91 | }) | 118 | }) |
92 | 119 | ||
@@ -107,10 +134,25 @@ describe('Test create import video jobs', function () { | |||
107 | const [ video720, video480 ] = videoDetails.files | 134 | const [ video720, video480 ] = videoDetails.files |
108 | assertVideoProperties(video720, 720, 'webm', 942961) | 135 | assertVideoProperties(video720, 720, 'webm', 942961) |
109 | assertVideoProperties(video480, 480, 'webm', 69217) | 136 | assertVideoProperties(video480, 480, 'webm', 69217) |
137 | |||
138 | await checkFiles(videoDetails, objectStorage) | ||
110 | } | 139 | } |
111 | }) | 140 | }) |
112 | 141 | ||
113 | after(async function () { | 142 | after(async function () { |
114 | await cleanupTests(servers) | 143 | await cleanupTests(servers) |
115 | }) | 144 | }) |
145 | } | ||
146 | |||
147 | describe('Test create import video jobs', function () { | ||
148 | |||
149 | describe('On filesystem', function () { | ||
150 | runTests(false) | ||
151 | }) | ||
152 | |||
153 | describe('On object storage', function () { | ||
154 | if (areObjectStorageTestsDisabled()) return | ||
155 | |||
156 | runTests(true) | ||
157 | }) | ||
116 | }) | 158 | }) |
diff --git a/server/tests/cli/create-transcoding-job.ts b/server/tests/cli/create-transcoding-job.ts index df787ccdc..3313a492f 100644 --- a/server/tests/cli/create-transcoding-job.ts +++ b/server/tests/cli/create-transcoding-job.ts | |||
@@ -2,10 +2,15 @@ | |||
2 | 2 | ||
3 | import 'mocha' | 3 | import 'mocha' |
4 | import * as chai from 'chai' | 4 | import * as chai from 'chai' |
5 | import { HttpStatusCode, VideoFile } from '@shared/models' | ||
5 | import { | 6 | import { |
7 | areObjectStorageTestsDisabled, | ||
6 | cleanupTests, | 8 | cleanupTests, |
7 | createMultipleServers, | 9 | createMultipleServers, |
8 | doubleFollow, | 10 | doubleFollow, |
11 | expectStartWith, | ||
12 | makeRawRequest, | ||
13 | ObjectStorageCommand, | ||
9 | PeerTubeServer, | 14 | PeerTubeServer, |
10 | setAccessTokensToServers, | 15 | setAccessTokensToServers, |
11 | waitJobs | 16 | waitJobs |
@@ -13,39 +18,39 @@ import { | |||
13 | 18 | ||
14 | const expect = chai.expect | 19 | const expect = chai.expect |
15 | 20 | ||
16 | describe('Test create transcoding jobs', function () { | 21 | async function checkFilesInObjectStorage (files: VideoFile[], type: 'webtorrent' | 'playlist') { |
17 | let servers: PeerTubeServer[] = [] | 22 | for (const file of files) { |
18 | const videosUUID: string[] = [] | 23 | const shouldStartWith = type === 'webtorrent' |
24 | ? ObjectStorageCommand.getWebTorrentBaseUrl() | ||
25 | : ObjectStorageCommand.getPlaylistBaseUrl() | ||
19 | 26 | ||
20 | const config = { | 27 | expectStartWith(file.fileUrl, shouldStartWith) |
21 | transcoding: { | 28 | |
22 | enabled: false, | 29 | await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200) |
23 | resolutions: { | ||
24 | '240p': true, | ||
25 | '360p': true, | ||
26 | '480p': true, | ||
27 | '720p': true, | ||
28 | '1080p': true, | ||
29 | '1440p': true, | ||
30 | '2160p': true | ||
31 | }, | ||
32 | hls: { | ||
33 | enabled: false | ||
34 | } | ||
35 | } | ||
36 | } | 30 | } |
31 | } | ||
32 | |||
33 | function runTests (objectStorage: boolean) { | ||
34 | let servers: PeerTubeServer[] = [] | ||
35 | const videosUUID: string[] = [] | ||
37 | 36 | ||
38 | before(async function () { | 37 | before(async function () { |
39 | this.timeout(60000) | 38 | this.timeout(60000) |
40 | 39 | ||
40 | const config = objectStorage | ||
41 | ? ObjectStorageCommand.getDefaultConfig() | ||
42 | : {} | ||
43 | |||
41 | // Run server 2 to have transcoding enabled | 44 | // Run server 2 to have transcoding enabled |
42 | servers = await createMultipleServers(2) | 45 | servers = await createMultipleServers(2, config) |
43 | await setAccessTokensToServers(servers) | 46 | await setAccessTokensToServers(servers) |
44 | 47 | ||
45 | await servers[0].config.updateCustomSubConfig({ newConfig: config }) | 48 | await servers[0].config.disableTranscoding() |
46 | 49 | ||
47 | await doubleFollow(servers[0], servers[1]) | 50 | await doubleFollow(servers[0], servers[1]) |
48 | 51 | ||
52 | if (objectStorage) await ObjectStorageCommand.prepareDefaultBuckets() | ||
53 | |||
49 | for (let i = 1; i <= 5; i++) { | 54 | for (let i = 1; i <= 5; i++) { |
50 | const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video' + i } }) | 55 | const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video' + i } }) |
51 | videosUUID.push(uuid) | 56 | videosUUID.push(uuid) |
@@ -81,27 +86,29 @@ describe('Test create transcoding jobs', function () { | |||
81 | let infoHashes: { [id: number]: string } | 86 | let infoHashes: { [id: number]: string } |
82 | 87 | ||
83 | for (const video of data) { | 88 | for (const video of data) { |
84 | const videoDetail = await server.videos.get({ id: video.uuid }) | 89 | const videoDetails = await server.videos.get({ id: video.uuid }) |
85 | 90 | ||
86 | if (video.uuid === videosUUID[1]) { | 91 | if (video.uuid === videosUUID[1]) { |
87 | expect(videoDetail.files).to.have.lengthOf(4) | 92 | expect(videoDetails.files).to.have.lengthOf(4) |
88 | expect(videoDetail.streamingPlaylists).to.have.lengthOf(0) | 93 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) |
94 | |||
95 | if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
89 | 96 | ||
90 | if (!infoHashes) { | 97 | if (!infoHashes) { |
91 | infoHashes = {} | 98 | infoHashes = {} |
92 | 99 | ||
93 | for (const file of videoDetail.files) { | 100 | for (const file of videoDetails.files) { |
94 | infoHashes[file.resolution.id.toString()] = file.magnetUri | 101 | infoHashes[file.resolution.id.toString()] = file.magnetUri |
95 | } | 102 | } |
96 | } else { | 103 | } else { |
97 | for (const resolution of Object.keys(infoHashes)) { | 104 | for (const resolution of Object.keys(infoHashes)) { |
98 | const file = videoDetail.files.find(f => f.resolution.id.toString() === resolution) | 105 | const file = videoDetails.files.find(f => f.resolution.id.toString() === resolution) |
99 | expect(file.magnetUri).to.equal(infoHashes[resolution]) | 106 | expect(file.magnetUri).to.equal(infoHashes[resolution]) |
100 | } | 107 | } |
101 | } | 108 | } |
102 | } else { | 109 | } else { |
103 | expect(videoDetail.files).to.have.lengthOf(1) | 110 | expect(videoDetails.files).to.have.lengthOf(1) |
104 | expect(videoDetail.streamingPlaylists).to.have.lengthOf(0) | 111 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) |
105 | } | 112 | } |
106 | } | 113 | } |
107 | } | 114 | } |
@@ -125,6 +132,8 @@ describe('Test create transcoding jobs', function () { | |||
125 | expect(videoDetails.files[1].resolution.id).to.equal(480) | 132 | expect(videoDetails.files[1].resolution.id).to.equal(480) |
126 | 133 | ||
127 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) | 134 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) |
135 | |||
136 | if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
128 | } | 137 | } |
129 | }) | 138 | }) |
130 | 139 | ||
@@ -139,11 +148,15 @@ describe('Test create transcoding jobs', function () { | |||
139 | const videoDetails = await server.videos.get({ id: videosUUID[2] }) | 148 | const videoDetails = await server.videos.get({ id: videosUUID[2] }) |
140 | 149 | ||
141 | expect(videoDetails.files).to.have.lengthOf(1) | 150 | expect(videoDetails.files).to.have.lengthOf(1) |
151 | if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
152 | |||
142 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) | 153 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) |
143 | 154 | ||
144 | const files = videoDetails.streamingPlaylists[0].files | 155 | const files = videoDetails.streamingPlaylists[0].files |
145 | expect(files).to.have.lengthOf(1) | 156 | expect(files).to.have.lengthOf(1) |
146 | expect(files[0].resolution.id).to.equal(480) | 157 | expect(files[0].resolution.id).to.equal(480) |
158 | |||
159 | if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') | ||
147 | } | 160 | } |
148 | }) | 161 | }) |
149 | 162 | ||
@@ -160,6 +173,8 @@ describe('Test create transcoding jobs', function () { | |||
160 | const files = videoDetails.streamingPlaylists[0].files | 173 | const files = videoDetails.streamingPlaylists[0].files |
161 | expect(files).to.have.lengthOf(1) | 174 | expect(files).to.have.lengthOf(1) |
162 | expect(files[0].resolution.id).to.equal(480) | 175 | expect(files[0].resolution.id).to.equal(480) |
176 | |||
177 | if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') | ||
163 | } | 178 | } |
164 | }) | 179 | }) |
165 | 180 | ||
@@ -178,15 +193,15 @@ describe('Test create transcoding jobs', function () { | |||
178 | 193 | ||
179 | const files = videoDetails.streamingPlaylists[0].files | 194 | const files = videoDetails.streamingPlaylists[0].files |
180 | expect(files).to.have.lengthOf(4) | 195 | expect(files).to.have.lengthOf(4) |
196 | |||
197 | if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') | ||
181 | } | 198 | } |
182 | }) | 199 | }) |
183 | 200 | ||
184 | it('Should optimize the video file and generate HLS videos if enabled in config', async function () { | 201 | it('Should optimize the video file and generate HLS videos if enabled in config', async function () { |
185 | this.timeout(120000) | 202 | this.timeout(120000) |
186 | 203 | ||
187 | config.transcoding.hls.enabled = true | 204 | await servers[0].config.enableTranscoding() |
188 | await servers[0].config.updateCustomSubConfig({ newConfig: config }) | ||
189 | |||
190 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[4]}`) | 205 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[4]}`) |
191 | 206 | ||
192 | await waitJobs(servers) | 207 | await waitJobs(servers) |
@@ -197,10 +212,28 @@ describe('Test create transcoding jobs', function () { | |||
197 | expect(videoDetails.files).to.have.lengthOf(4) | 212 | expect(videoDetails.files).to.have.lengthOf(4) |
198 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) | 213 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) |
199 | expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(4) | 214 | expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(4) |
215 | |||
216 | if (objectStorage) { | ||
217 | await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
218 | await checkFilesInObjectStorage(videoDetails.streamingPlaylists[0].files, 'playlist') | ||
219 | } | ||
200 | } | 220 | } |
201 | }) | 221 | }) |
202 | 222 | ||
203 | after(async function () { | 223 | after(async function () { |
204 | await cleanupTests(servers) | 224 | await cleanupTests(servers) |
205 | }) | 225 | }) |
226 | } | ||
227 | |||
228 | describe('Test create transcoding jobs', function () { | ||
229 | |||
230 | describe('On filesystem', function () { | ||
231 | runTests(false) | ||
232 | }) | ||
233 | |||
234 | describe('On object storage', function () { | ||
235 | if (areObjectStorageTestsDisabled()) return | ||
236 | |||
237 | runTests(true) | ||
238 | }) | ||
206 | }) | 239 | }) |
diff --git a/server/tests/helpers/request.ts b/server/tests/helpers/request.ts index 7f7873df3..c9a2eb831 100644 --- a/server/tests/helpers/request.ts +++ b/server/tests/helpers/request.ts | |||
@@ -13,7 +13,7 @@ describe('Request helpers', function () { | |||
13 | 13 | ||
14 | it('Should throw an error when the bytes limit is exceeded for request', async function () { | 14 | it('Should throw an error when the bytes limit is exceeded for request', async function () { |
15 | try { | 15 | try { |
16 | await doRequest(FIXTURE_URLS.video4K, { bodyKBLimit: 3 }) | 16 | await doRequest(FIXTURE_URLS.file4K, { bodyKBLimit: 3 }) |
17 | } catch { | 17 | } catch { |
18 | return | 18 | return |
19 | } | 19 | } |
@@ -23,7 +23,7 @@ describe('Request helpers', function () { | |||
23 | 23 | ||
24 | it('Should throw an error when the bytes limit is exceeded for request and save file', async function () { | 24 | it('Should throw an error when the bytes limit is exceeded for request and save file', async function () { |
25 | try { | 25 | try { |
26 | await doRequestAndSaveToFile(FIXTURE_URLS.video4K, destPath1, { bodyKBLimit: 3 }) | 26 | await doRequestAndSaveToFile(FIXTURE_URLS.file4K, destPath1, { bodyKBLimit: 3 }) |
27 | } catch { | 27 | } catch { |
28 | 28 | ||
29 | await wait(500) | 29 | await wait(500) |
@@ -35,8 +35,8 @@ describe('Request helpers', function () { | |||
35 | }) | 35 | }) |
36 | 36 | ||
37 | it('Should succeed if the file is below the limit', async function () { | 37 | it('Should succeed if the file is below the limit', async function () { |
38 | await doRequest(FIXTURE_URLS.video4K, { bodyKBLimit: 5 }) | 38 | await doRequest(FIXTURE_URLS.file4K, { bodyKBLimit: 5 }) |
39 | await doRequestAndSaveToFile(FIXTURE_URLS.video4K, destPath2, { bodyKBLimit: 5 }) | 39 | await doRequestAndSaveToFile(FIXTURE_URLS.file4K, destPath2, { bodyKBLimit: 5 }) |
40 | 40 | ||
41 | expect(await pathExists(destPath2)).to.be.true | 41 | expect(await pathExists(destPath2)).to.be.true |
42 | }) | 42 | }) |