diff options
author | Chocobozzz <me@florianbigard.com> | 2023-07-31 14:34:36 +0200 |
---|---|---|
committer | Chocobozzz <me@florianbigard.com> | 2023-08-11 15:02:33 +0200 |
commit | 3a4992633ee62d5edfbb484d9c6bcb3cf158489d (patch) | |
tree | e4510b39bdac9c318fdb4b47018d08f15368b8f0 /server/tests/api/videos/resumable-upload.ts | |
parent | 04d1da5621d25d59bd5fa1543b725c497bf5d9a8 (diff) | |
download | PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.gz PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.zst PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.zip |
Migrate server to ESM
Sorry for the very big commit that may lead to git log issues and merge
conflicts, but it's a major step forward:
* Server can be faster at startup because imports() are async and we can
easily lazy import big modules
* Angular doesn't seem to support ES import (with .js extension), so we
had to correctly organize peertube into a monorepo:
* Use yarn workspace feature
* Use typescript reference projects for dependencies
* Shared projects have been moved into "packages", each one is now a
node module (with a dedicated package.json/tsconfig.json)
* server/tools have been moved into apps/ and is now a dedicated app
bundled and published on NPM so users don't have to build peertube
cli tools manually
* server/tests have been moved into packages/ so we don't compile
them every time we want to run the server
* Use isolatedModule option:
* Had to move from const enum to const
(https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums)
* Had to explictely specify "type" imports when used in decorators
* Prefer tsx (that uses esbuild under the hood) instead of ts-node to
load typescript files (tests with mocha or scripts):
* To reduce test complexity as esbuild doesn't support decorator
metadata, we only test server files that do not import server
models
* We still build tests files into js files for a faster CI
* Remove unmaintained peertube CLI import script
* Removed some barrels to speed up execution (less imports)
Diffstat (limited to 'server/tests/api/videos/resumable-upload.ts')
-rw-r--r-- | server/tests/api/videos/resumable-upload.ts | 310 |
1 files changed, 0 insertions, 310 deletions
diff --git a/server/tests/api/videos/resumable-upload.ts b/server/tests/api/videos/resumable-upload.ts deleted file mode 100644 index cac1201e9..000000000 --- a/server/tests/api/videos/resumable-upload.ts +++ /dev/null | |||
@@ -1,310 +0,0 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { pathExists, readdir, stat } from 'fs-extra' | ||
5 | import { join } from 'path' | ||
6 | import { buildAbsoluteFixturePath } from '@shared/core-utils' | ||
7 | import { sha1 } from '@shared/extra-utils' | ||
8 | import { HttpStatusCode, VideoPrivacy } from '@shared/models' | ||
9 | import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands' | ||
10 | |||
11 | // Most classic resumable upload tests are done in other test suites | ||
12 | |||
13 | describe('Test resumable upload', function () { | ||
14 | const path = '/api/v1/videos/upload-resumable' | ||
15 | const defaultFixture = 'video_short.mp4' | ||
16 | let server: PeerTubeServer | ||
17 | let rootId: number | ||
18 | let userAccessToken: string | ||
19 | let userChannelId: number | ||
20 | |||
21 | async function buildSize (fixture: string, size?: number) { | ||
22 | if (size !== undefined) return size | ||
23 | |||
24 | const baseFixture = buildAbsoluteFixturePath(fixture) | ||
25 | return (await stat(baseFixture)).size | ||
26 | } | ||
27 | |||
28 | async function prepareUpload (options: { | ||
29 | channelId?: number | ||
30 | token?: string | ||
31 | size?: number | ||
32 | originalName?: string | ||
33 | lastModified?: number | ||
34 | } = {}) { | ||
35 | const { token, originalName, lastModified } = options | ||
36 | |||
37 | const size = await buildSize(defaultFixture, options.size) | ||
38 | |||
39 | const attributes = { | ||
40 | name: 'video', | ||
41 | channelId: options.channelId ?? server.store.channel.id, | ||
42 | privacy: VideoPrivacy.PUBLIC, | ||
43 | fixture: defaultFixture | ||
44 | } | ||
45 | |||
46 | const mimetype = 'video/mp4' | ||
47 | |||
48 | const res = await server.videos.prepareResumableUpload({ path, token, attributes, size, mimetype, originalName, lastModified }) | ||
49 | |||
50 | return res.header['location'].split('?')[1] | ||
51 | } | ||
52 | |||
53 | async function sendChunks (options: { | ||
54 | token?: string | ||
55 | pathUploadId: string | ||
56 | size?: number | ||
57 | expectedStatus?: HttpStatusCode | ||
58 | contentLength?: number | ||
59 | contentRange?: string | ||
60 | contentRangeBuilder?: (start: number, chunk: any) => string | ||
61 | digestBuilder?: (chunk: any) => string | ||
62 | }) { | ||
63 | const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder, digestBuilder } = options | ||
64 | |||
65 | const size = await buildSize(defaultFixture, options.size) | ||
66 | const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture) | ||
67 | |||
68 | return server.videos.sendResumableChunks({ | ||
69 | token, | ||
70 | path, | ||
71 | pathUploadId, | ||
72 | videoFilePath: absoluteFilePath, | ||
73 | size, | ||
74 | contentLength, | ||
75 | contentRangeBuilder, | ||
76 | digestBuilder, | ||
77 | expectedStatus | ||
78 | }) | ||
79 | } | ||
80 | |||
81 | async function checkFileSize (uploadIdArg: string, expectedSize: number | null) { | ||
82 | const uploadId = uploadIdArg.replace(/^upload_id=/, '') | ||
83 | |||
84 | const subPath = join('tmp', 'resumable-uploads', `${rootId}-${uploadId}.mp4`) | ||
85 | const filePath = server.servers.buildDirectory(subPath) | ||
86 | const exists = await pathExists(filePath) | ||
87 | |||
88 | if (expectedSize === null) { | ||
89 | expect(exists).to.be.false | ||
90 | return | ||
91 | } | ||
92 | |||
93 | expect(exists).to.be.true | ||
94 | |||
95 | expect((await stat(filePath)).size).to.equal(expectedSize) | ||
96 | } | ||
97 | |||
98 | async function countResumableUploads (wait?: number) { | ||
99 | const subPath = join('tmp', 'resumable-uploads') | ||
100 | const filePath = server.servers.buildDirectory(subPath) | ||
101 | await new Promise(resolve => setTimeout(resolve, wait)) | ||
102 | const files = await readdir(filePath) | ||
103 | return files.length | ||
104 | } | ||
105 | |||
106 | before(async function () { | ||
107 | this.timeout(30000) | ||
108 | |||
109 | server = await createSingleServer(1) | ||
110 | await setAccessTokensToServers([ server ]) | ||
111 | await setDefaultVideoChannel([ server ]) | ||
112 | |||
113 | const body = await server.users.getMyInfo() | ||
114 | rootId = body.id | ||
115 | |||
116 | { | ||
117 | userAccessToken = await server.users.generateUserAndToken('user1') | ||
118 | const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken }) | ||
119 | userChannelId = videoChannels[0].id | ||
120 | } | ||
121 | |||
122 | await server.users.update({ userId: rootId, videoQuota: 10_000_000 }) | ||
123 | }) | ||
124 | |||
125 | describe('Directory cleaning', function () { | ||
126 | |||
127 | it('Should correctly delete files after an upload', async function () { | ||
128 | const uploadId = await prepareUpload() | ||
129 | await sendChunks({ pathUploadId: uploadId }) | ||
130 | await server.videos.endResumableUpload({ path, pathUploadId: uploadId }) | ||
131 | |||
132 | expect(await countResumableUploads()).to.equal(0) | ||
133 | }) | ||
134 | |||
135 | it('Should correctly delete corrupt files', async function () { | ||
136 | const uploadId = await prepareUpload({ size: 8 * 1024 }) | ||
137 | await sendChunks({ pathUploadId: uploadId, size: 8 * 1024, expectedStatus: HttpStatusCode.UNPROCESSABLE_ENTITY_422 }) | ||
138 | |||
139 | expect(await countResumableUploads(2000)).to.equal(0) | ||
140 | }) | ||
141 | |||
142 | it('Should not delete files after an unfinished upload', async function () { | ||
143 | await prepareUpload() | ||
144 | |||
145 | expect(await countResumableUploads()).to.equal(2) | ||
146 | }) | ||
147 | |||
148 | it('Should not delete recent uploads', async function () { | ||
149 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | ||
150 | |||
151 | expect(await countResumableUploads()).to.equal(2) | ||
152 | }) | ||
153 | |||
154 | it('Should delete old uploads', async function () { | ||
155 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | ||
156 | |||
157 | expect(await countResumableUploads()).to.equal(0) | ||
158 | }) | ||
159 | }) | ||
160 | |||
161 | describe('Resumable upload and chunks', function () { | ||
162 | |||
163 | it('Should accept the same amount of chunks', async function () { | ||
164 | const uploadId = await prepareUpload() | ||
165 | await sendChunks({ pathUploadId: uploadId }) | ||
166 | |||
167 | await checkFileSize(uploadId, null) | ||
168 | }) | ||
169 | |||
170 | it('Should not accept more chunks than expected', async function () { | ||
171 | const uploadId = await prepareUpload({ size: 100 }) | ||
172 | |||
173 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 }) | ||
174 | await checkFileSize(uploadId, 0) | ||
175 | }) | ||
176 | |||
177 | it('Should not accept more chunks than expected with an invalid content length/content range', async function () { | ||
178 | const uploadId = await prepareUpload({ size: 1500 }) | ||
179 | |||
180 | // Content length check can be different depending on the node version | ||
181 | try { | ||
182 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 }) | ||
183 | await checkFileSize(uploadId, 0) | ||
184 | } catch { | ||
185 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 }) | ||
186 | await checkFileSize(uploadId, 0) | ||
187 | } | ||
188 | }) | ||
189 | |||
190 | it('Should not accept more chunks than expected with an invalid content length', async function () { | ||
191 | const uploadId = await prepareUpload({ size: 500 }) | ||
192 | |||
193 | const size = 1000 | ||
194 | |||
195 | // Content length check seems to have changed in v16 | ||
196 | const expectedStatus = process.version.startsWith('v16') | ||
197 | ? HttpStatusCode.CONFLICT_409 | ||
198 | : HttpStatusCode.BAD_REQUEST_400 | ||
199 | |||
200 | const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}` | ||
201 | await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size }) | ||
202 | await checkFileSize(uploadId, 0) | ||
203 | }) | ||
204 | |||
205 | it('Should be able to accept 2 PUT requests', async function () { | ||
206 | const uploadId = await prepareUpload() | ||
207 | |||
208 | const result1 = await sendChunks({ pathUploadId: uploadId }) | ||
209 | const result2 = await sendChunks({ pathUploadId: uploadId }) | ||
210 | |||
211 | expect(result1.body.video.uuid).to.exist | ||
212 | expect(result1.body.video.uuid).to.equal(result2.body.video.uuid) | ||
213 | |||
214 | expect(result1.headers['x-resumable-upload-cached']).to.not.exist | ||
215 | expect(result2.headers['x-resumable-upload-cached']).to.equal('true') | ||
216 | |||
217 | await checkFileSize(uploadId, null) | ||
218 | }) | ||
219 | |||
220 | it('Should not have the same upload id with 2 different users', async function () { | ||
221 | const originalName = 'toto.mp4' | ||
222 | const lastModified = new Date().getTime() | ||
223 | |||
224 | const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | ||
225 | const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken }) | ||
226 | |||
227 | expect(uploadId1).to.not.equal(uploadId2) | ||
228 | }) | ||
229 | |||
230 | it('Should have the same upload id with the same user', async function () { | ||
231 | const originalName = 'toto.mp4' | ||
232 | const lastModified = new Date().getTime() | ||
233 | |||
234 | const uploadId1 = await prepareUpload({ originalName, lastModified }) | ||
235 | const uploadId2 = await prepareUpload({ originalName, lastModified }) | ||
236 | |||
237 | expect(uploadId1).to.equal(uploadId2) | ||
238 | }) | ||
239 | |||
240 | it('Should not cache a request with 2 different users', async function () { | ||
241 | const originalName = 'toto.mp4' | ||
242 | const lastModified = new Date().getTime() | ||
243 | |||
244 | const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | ||
245 | |||
246 | await sendChunks({ pathUploadId: uploadId, token: server.accessToken }) | ||
247 | await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | ||
248 | }) | ||
249 | |||
250 | it('Should not cache a request after a delete', async function () { | ||
251 | const originalName = 'toto.mp4' | ||
252 | const lastModified = new Date().getTime() | ||
253 | const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | ||
254 | |||
255 | await sendChunks({ pathUploadId: uploadId1 }) | ||
256 | await server.videos.endResumableUpload({ path, pathUploadId: uploadId1 }) | ||
257 | |||
258 | const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | ||
259 | expect(uploadId1).to.equal(uploadId2) | ||
260 | |||
261 | const result2 = await sendChunks({ pathUploadId: uploadId1 }) | ||
262 | expect(result2.headers['x-resumable-upload-cached']).to.not.exist | ||
263 | }) | ||
264 | |||
265 | it('Should not cache after video deletion', async function () { | ||
266 | const originalName = 'toto.mp4' | ||
267 | const lastModified = new Date().getTime() | ||
268 | |||
269 | const uploadId1 = await prepareUpload({ originalName, lastModified }) | ||
270 | const result1 = await sendChunks({ pathUploadId: uploadId1 }) | ||
271 | await server.videos.remove({ id: result1.body.video.uuid }) | ||
272 | |||
273 | const uploadId2 = await prepareUpload({ originalName, lastModified }) | ||
274 | const result2 = await sendChunks({ pathUploadId: uploadId2 }) | ||
275 | expect(result1.body.video.uuid).to.not.equal(result2.body.video.uuid) | ||
276 | |||
277 | expect(result2.headers['x-resumable-upload-cached']).to.not.exist | ||
278 | |||
279 | await checkFileSize(uploadId1, null) | ||
280 | await checkFileSize(uploadId2, null) | ||
281 | }) | ||
282 | |||
283 | it('Should refuse an invalid digest', async function () { | ||
284 | const uploadId = await prepareUpload({ token: server.accessToken }) | ||
285 | |||
286 | await sendChunks({ | ||
287 | pathUploadId: uploadId, | ||
288 | token: server.accessToken, | ||
289 | digestBuilder: () => 'sha=' + 'a'.repeat(40), | ||
290 | expectedStatus: 460 as any | ||
291 | }) | ||
292 | }) | ||
293 | |||
294 | it('Should accept an appropriate digest', async function () { | ||
295 | const uploadId = await prepareUpload({ token: server.accessToken }) | ||
296 | |||
297 | await sendChunks({ | ||
298 | pathUploadId: uploadId, | ||
299 | token: server.accessToken, | ||
300 | digestBuilder: (chunk: Buffer) => { | ||
301 | return 'sha1=' + sha1(chunk, 'base64') | ||
302 | } | ||
303 | }) | ||
304 | }) | ||
305 | }) | ||
306 | |||
307 | after(async function () { | ||
308 | await cleanupTests([ server ]) | ||
309 | }) | ||
310 | }) | ||