aboutsummaryrefslogtreecommitdiffhomepage
path: root/packages/tests/src/api/videos/resumable-upload.ts
diff options
context:
space:
mode:
Diffstat (limited to 'packages/tests/src/api/videos/resumable-upload.ts')
-rw-r--r--packages/tests/src/api/videos/resumable-upload.ts316
1 files changed, 316 insertions, 0 deletions
diff --git a/packages/tests/src/api/videos/resumable-upload.ts b/packages/tests/src/api/videos/resumable-upload.ts
new file mode 100644
index 000000000..628e0298c
--- /dev/null
+++ b/packages/tests/src/api/videos/resumable-upload.ts
@@ -0,0 +1,316 @@
1/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3import { expect } from 'chai'
4import { pathExists } from 'fs-extra/esm'
5import { readdir, stat } from 'fs/promises'
6import { join } from 'path'
7import { HttpStatusCode, HttpStatusCodeType, VideoPrivacy } from '@peertube/peertube-models'
8import { buildAbsoluteFixturePath, sha1 } from '@peertube/peertube-node-utils'
9import {
10 cleanupTests,
11 createSingleServer,
12 PeerTubeServer,
13 setAccessTokensToServers,
14 setDefaultVideoChannel
15} from '@peertube/peertube-server-commands'
16
17// Most classic resumable upload tests are done in other test suites
18
19describe('Test resumable upload', function () {
20 const path = '/api/v1/videos/upload-resumable'
21 const defaultFixture = 'video_short.mp4'
22 let server: PeerTubeServer
23 let rootId: number
24 let userAccessToken: string
25 let userChannelId: number
26
27 async function buildSize (fixture: string, size?: number) {
28 if (size !== undefined) return size
29
30 const baseFixture = buildAbsoluteFixturePath(fixture)
31 return (await stat(baseFixture)).size
32 }
33
34 async function prepareUpload (options: {
35 channelId?: number
36 token?: string
37 size?: number
38 originalName?: string
39 lastModified?: number
40 } = {}) {
41 const { token, originalName, lastModified } = options
42
43 const size = await buildSize(defaultFixture, options.size)
44
45 const attributes = {
46 name: 'video',
47 channelId: options.channelId ?? server.store.channel.id,
48 privacy: VideoPrivacy.PUBLIC,
49 fixture: defaultFixture
50 }
51
52 const mimetype = 'video/mp4'
53
54 const res = await server.videos.prepareResumableUpload({ path, token, attributes, size, mimetype, originalName, lastModified })
55
56 return res.header['location'].split('?')[1]
57 }
58
59 async function sendChunks (options: {
60 token?: string
61 pathUploadId: string
62 size?: number
63 expectedStatus?: HttpStatusCodeType
64 contentLength?: number
65 contentRange?: string
66 contentRangeBuilder?: (start: number, chunk: any) => string
67 digestBuilder?: (chunk: any) => string
68 }) {
69 const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder, digestBuilder } = options
70
71 const size = await buildSize(defaultFixture, options.size)
72 const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture)
73
74 return server.videos.sendResumableChunks({
75 token,
76 path,
77 pathUploadId,
78 videoFilePath: absoluteFilePath,
79 size,
80 contentLength,
81 contentRangeBuilder,
82 digestBuilder,
83 expectedStatus
84 })
85 }
86
87 async function checkFileSize (uploadIdArg: string, expectedSize: number | null) {
88 const uploadId = uploadIdArg.replace(/^upload_id=/, '')
89
90 const subPath = join('tmp', 'resumable-uploads', `${rootId}-${uploadId}.mp4`)
91 const filePath = server.servers.buildDirectory(subPath)
92 const exists = await pathExists(filePath)
93
94 if (expectedSize === null) {
95 expect(exists).to.be.false
96 return
97 }
98
99 expect(exists).to.be.true
100
101 expect((await stat(filePath)).size).to.equal(expectedSize)
102 }
103
104 async function countResumableUploads (wait?: number) {
105 const subPath = join('tmp', 'resumable-uploads')
106 const filePath = server.servers.buildDirectory(subPath)
107 await new Promise(resolve => setTimeout(resolve, wait))
108 const files = await readdir(filePath)
109 return files.length
110 }
111
112 before(async function () {
113 this.timeout(30000)
114
115 server = await createSingleServer(1)
116 await setAccessTokensToServers([ server ])
117 await setDefaultVideoChannel([ server ])
118
119 const body = await server.users.getMyInfo()
120 rootId = body.id
121
122 {
123 userAccessToken = await server.users.generateUserAndToken('user1')
124 const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken })
125 userChannelId = videoChannels[0].id
126 }
127
128 await server.users.update({ userId: rootId, videoQuota: 10_000_000 })
129 })
130
131 describe('Directory cleaning', function () {
132
133 it('Should correctly delete files after an upload', async function () {
134 const uploadId = await prepareUpload()
135 await sendChunks({ pathUploadId: uploadId })
136 await server.videos.endResumableUpload({ path, pathUploadId: uploadId })
137
138 expect(await countResumableUploads()).to.equal(0)
139 })
140
141 it('Should correctly delete corrupt files', async function () {
142 const uploadId = await prepareUpload({ size: 8 * 1024 })
143 await sendChunks({ pathUploadId: uploadId, size: 8 * 1024, expectedStatus: HttpStatusCode.UNPROCESSABLE_ENTITY_422 })
144
145 expect(await countResumableUploads(2000)).to.equal(0)
146 })
147
148 it('Should not delete files after an unfinished upload', async function () {
149 await prepareUpload()
150
151 expect(await countResumableUploads()).to.equal(2)
152 })
153
154 it('Should not delete recent uploads', async function () {
155 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
156
157 expect(await countResumableUploads()).to.equal(2)
158 })
159
160 it('Should delete old uploads', async function () {
161 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
162
163 expect(await countResumableUploads()).to.equal(0)
164 })
165 })
166
167 describe('Resumable upload and chunks', function () {
168
169 it('Should accept the same amount of chunks', async function () {
170 const uploadId = await prepareUpload()
171 await sendChunks({ pathUploadId: uploadId })
172
173 await checkFileSize(uploadId, null)
174 })
175
176 it('Should not accept more chunks than expected', async function () {
177 const uploadId = await prepareUpload({ size: 100 })
178
179 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 })
180 await checkFileSize(uploadId, 0)
181 })
182
183 it('Should not accept more chunks than expected with an invalid content length/content range', async function () {
184 const uploadId = await prepareUpload({ size: 1500 })
185
186 // Content length check can be different depending on the node version
187 try {
188 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 })
189 await checkFileSize(uploadId, 0)
190 } catch {
191 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 })
192 await checkFileSize(uploadId, 0)
193 }
194 })
195
196 it('Should not accept more chunks than expected with an invalid content length', async function () {
197 const uploadId = await prepareUpload({ size: 500 })
198
199 const size = 1000
200
201 // Content length check seems to have changed in v16
202 const expectedStatus = process.version.startsWith('v16')
203 ? HttpStatusCode.CONFLICT_409
204 : HttpStatusCode.BAD_REQUEST_400
205
206 const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}`
207 await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size })
208 await checkFileSize(uploadId, 0)
209 })
210
211 it('Should be able to accept 2 PUT requests', async function () {
212 const uploadId = await prepareUpload()
213
214 const result1 = await sendChunks({ pathUploadId: uploadId })
215 const result2 = await sendChunks({ pathUploadId: uploadId })
216
217 expect(result1.body.video.uuid).to.exist
218 expect(result1.body.video.uuid).to.equal(result2.body.video.uuid)
219
220 expect(result1.headers['x-resumable-upload-cached']).to.not.exist
221 expect(result2.headers['x-resumable-upload-cached']).to.equal('true')
222
223 await checkFileSize(uploadId, null)
224 })
225
226 it('Should not have the same upload id with 2 different users', async function () {
227 const originalName = 'toto.mp4'
228 const lastModified = new Date().getTime()
229
230 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
231 const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken })
232
233 expect(uploadId1).to.not.equal(uploadId2)
234 })
235
236 it('Should have the same upload id with the same user', async function () {
237 const originalName = 'toto.mp4'
238 const lastModified = new Date().getTime()
239
240 const uploadId1 = await prepareUpload({ originalName, lastModified })
241 const uploadId2 = await prepareUpload({ originalName, lastModified })
242
243 expect(uploadId1).to.equal(uploadId2)
244 })
245
246 it('Should not cache a request with 2 different users', async function () {
247 const originalName = 'toto.mp4'
248 const lastModified = new Date().getTime()
249
250 const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken })
251
252 await sendChunks({ pathUploadId: uploadId, token: server.accessToken })
253 await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
254 })
255
256 it('Should not cache a request after a delete', async function () {
257 const originalName = 'toto.mp4'
258 const lastModified = new Date().getTime()
259 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
260
261 await sendChunks({ pathUploadId: uploadId1 })
262 await server.videos.endResumableUpload({ path, pathUploadId: uploadId1 })
263
264 const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
265 expect(uploadId1).to.equal(uploadId2)
266
267 const result2 = await sendChunks({ pathUploadId: uploadId1 })
268 expect(result2.headers['x-resumable-upload-cached']).to.not.exist
269 })
270
271 it('Should not cache after video deletion', async function () {
272 const originalName = 'toto.mp4'
273 const lastModified = new Date().getTime()
274
275 const uploadId1 = await prepareUpload({ originalName, lastModified })
276 const result1 = await sendChunks({ pathUploadId: uploadId1 })
277 await server.videos.remove({ id: result1.body.video.uuid })
278
279 const uploadId2 = await prepareUpload({ originalName, lastModified })
280 const result2 = await sendChunks({ pathUploadId: uploadId2 })
281 expect(result1.body.video.uuid).to.not.equal(result2.body.video.uuid)
282
283 expect(result2.headers['x-resumable-upload-cached']).to.not.exist
284
285 await checkFileSize(uploadId1, null)
286 await checkFileSize(uploadId2, null)
287 })
288
289 it('Should refuse an invalid digest', async function () {
290 const uploadId = await prepareUpload({ token: server.accessToken })
291
292 await sendChunks({
293 pathUploadId: uploadId,
294 token: server.accessToken,
295 digestBuilder: () => 'sha=' + 'a'.repeat(40),
296 expectedStatus: 460 as any
297 })
298 })
299
300 it('Should accept an appropriate digest', async function () {
301 const uploadId = await prepareUpload({ token: server.accessToken })
302
303 await sendChunks({
304 pathUploadId: uploadId,
305 token: server.accessToken,
306 digestBuilder: (chunk: Buffer) => {
307 return 'sha1=' + sha1(chunk, 'base64')
308 }
309 })
310 })
311 })
312
313 after(async function () {
314 await cleanupTests([ server ])
315 })
316})