]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/tests/api/videos/resumable-upload.ts
We don't need to import mocha
[github/Chocobozzz/PeerTube.git] / server / tests / api / videos / resumable-upload.ts
1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3 import * as chai from 'chai'
4 import { pathExists, readdir, stat } from 'fs-extra'
5 import { join } from 'path'
6 import { buildAbsoluteFixturePath } from '@shared/core-utils'
7 import { sha1 } from '@shared/extra-utils'
8 import { HttpStatusCode, VideoPrivacy } from '@shared/models'
9 import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands'
10
11 const expect = chai.expect
12
13 // Most classic resumable upload tests are done in other test suites
14
15 describe('Test resumable upload', function () {
16 const defaultFixture = 'video_short.mp4'
17 let server: PeerTubeServer
18 let rootId: number
19 let userAccessToken: string
20 let userChannelId: number
21
22 async function buildSize (fixture: string, size?: number) {
23 if (size !== undefined) return size
24
25 const baseFixture = buildAbsoluteFixturePath(fixture)
26 return (await stat(baseFixture)).size
27 }
28
29 async function prepareUpload (options: {
30 channelId?: number
31 token?: string
32 size?: number
33 originalName?: string
34 lastModified?: number
35 } = {}) {
36 const { token, originalName, lastModified } = options
37
38 const size = await buildSize(defaultFixture, options.size)
39
40 const attributes = {
41 name: 'video',
42 channelId: options.channelId ?? server.store.channel.id,
43 privacy: VideoPrivacy.PUBLIC,
44 fixture: defaultFixture
45 }
46
47 const mimetype = 'video/mp4'
48
49 const res = await server.videos.prepareResumableUpload({ token, attributes, size, mimetype, originalName, lastModified })
50
51 return res.header['location'].split('?')[1]
52 }
53
54 async function sendChunks (options: {
55 token?: string
56 pathUploadId: string
57 size?: number
58 expectedStatus?: HttpStatusCode
59 contentLength?: number
60 contentRange?: string
61 contentRangeBuilder?: (start: number, chunk: any) => string
62 digestBuilder?: (chunk: any) => string
63 }) {
64 const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder, digestBuilder } = options
65
66 const size = await buildSize(defaultFixture, options.size)
67 const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture)
68
69 return server.videos.sendResumableChunks({
70 token,
71 pathUploadId,
72 videoFilePath: absoluteFilePath,
73 size,
74 contentLength,
75 contentRangeBuilder,
76 digestBuilder,
77 expectedStatus
78 })
79 }
80
81 async function checkFileSize (uploadIdArg: string, expectedSize: number | null) {
82 const uploadId = uploadIdArg.replace(/^upload_id=/, '')
83
84 const subPath = join('tmp', 'resumable-uploads', uploadId)
85 const filePath = server.servers.buildDirectory(subPath)
86 const exists = await pathExists(filePath)
87
88 if (expectedSize === null) {
89 expect(exists).to.be.false
90 return
91 }
92
93 expect(exists).to.be.true
94
95 expect((await stat(filePath)).size).to.equal(expectedSize)
96 }
97
98 async function countResumableUploads () {
99 const subPath = join('tmp', 'resumable-uploads')
100 const filePath = server.servers.buildDirectory(subPath)
101
102 const files = await readdir(filePath)
103 return files.length
104 }
105
106 before(async function () {
107 this.timeout(30000)
108
109 server = await createSingleServer(1)
110 await setAccessTokensToServers([ server ])
111 await setDefaultVideoChannel([ server ])
112
113 const body = await server.users.getMyInfo()
114 rootId = body.id
115
116 {
117 userAccessToken = await server.users.generateUserAndToken('user1')
118 const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken })
119 userChannelId = videoChannels[0].id
120 }
121
122 await server.users.update({ userId: rootId, videoQuota: 10_000_000 })
123 })
124
125 describe('Directory cleaning', function () {
126
127 // FIXME: https://github.com/kukhariev/node-uploadx/pull/524/files#r852989382
128 // it('Should correctly delete files after an upload', async function () {
129 // const uploadId = await prepareUpload()
130 // await sendChunks({ pathUploadId: uploadId })
131 // await server.videos.endResumableUpload({ pathUploadId: uploadId })
132
133 // expect(await countResumableUploads()).to.equal(0)
134 // })
135
136 it('Should not delete files after an unfinished upload', async function () {
137 await prepareUpload()
138
139 expect(await countResumableUploads()).to.equal(2)
140 })
141
142 it('Should not delete recent uploads', async function () {
143 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
144
145 expect(await countResumableUploads()).to.equal(2)
146 })
147
148 it('Should delete old uploads', async function () {
149 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
150
151 expect(await countResumableUploads()).to.equal(0)
152 })
153 })
154
155 describe('Resumable upload and chunks', function () {
156
157 it('Should accept the same amount of chunks', async function () {
158 const uploadId = await prepareUpload()
159 await sendChunks({ pathUploadId: uploadId })
160
161 await checkFileSize(uploadId, null)
162 })
163
164 it('Should not accept more chunks than expected', async function () {
165 const uploadId = await prepareUpload({ size: 100 })
166
167 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 })
168 await checkFileSize(uploadId, 0)
169 })
170
171 it('Should not accept more chunks than expected with an invalid content length/content range', async function () {
172 const uploadId = await prepareUpload({ size: 1500 })
173
174 // Content length check seems to have changed in v16
175 if (process.version.startsWith('v16')) {
176 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 })
177 await checkFileSize(uploadId, 1000)
178 } else {
179 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 })
180 await checkFileSize(uploadId, 0)
181 }
182 })
183
184 it('Should not accept more chunks than expected with an invalid content length', async function () {
185 const uploadId = await prepareUpload({ size: 500 })
186
187 const size = 1000
188
189 // Content length check seems to have changed in v16
190 const expectedStatus = process.version.startsWith('v16')
191 ? HttpStatusCode.CONFLICT_409
192 : HttpStatusCode.BAD_REQUEST_400
193
194 const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}`
195 await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size })
196 await checkFileSize(uploadId, 0)
197 })
198
199 it('Should be able to accept 2 PUT requests', async function () {
200 const uploadId = await prepareUpload()
201
202 const result1 = await sendChunks({ pathUploadId: uploadId })
203 const result2 = await sendChunks({ pathUploadId: uploadId })
204
205 expect(result1.body.video.uuid).to.exist
206 expect(result1.body.video.uuid).to.equal(result2.body.video.uuid)
207
208 expect(result1.headers['x-resumable-upload-cached']).to.not.exist
209 expect(result2.headers['x-resumable-upload-cached']).to.equal('true')
210
211 await checkFileSize(uploadId, null)
212 })
213
214 it('Should not have the same upload id with 2 different users', async function () {
215 const originalName = 'toto.mp4'
216 const lastModified = new Date().getTime()
217
218 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
219 const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken })
220
221 expect(uploadId1).to.not.equal(uploadId2)
222 })
223
224 it('Should have the same upload id with the same user', async function () {
225 const originalName = 'toto.mp4'
226 const lastModified = new Date().getTime()
227
228 const uploadId1 = await prepareUpload({ originalName, lastModified })
229 const uploadId2 = await prepareUpload({ originalName, lastModified })
230
231 expect(uploadId1).to.equal(uploadId2)
232 })
233
234 it('Should not cache a request with 2 different users', async function () {
235 const originalName = 'toto.mp4'
236 const lastModified = new Date().getTime()
237
238 const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken })
239
240 await sendChunks({ pathUploadId: uploadId, token: server.accessToken })
241 await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
242 })
243
244 it('Should not cache a request after a delete', async function () {
245 const originalName = 'toto.mp4'
246 const lastModified = new Date().getTime()
247 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
248
249 await sendChunks({ pathUploadId: uploadId1 })
250 await server.videos.endResumableUpload({ pathUploadId: uploadId1 })
251
252 const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
253 expect(uploadId1).to.equal(uploadId2)
254
255 const result2 = await sendChunks({ pathUploadId: uploadId1 })
256 expect(result2.headers['x-resumable-upload-cached']).to.not.exist
257 })
258
259 it('Should refuse an invalid digest', async function () {
260 const uploadId = await prepareUpload({ token: server.accessToken })
261
262 await sendChunks({
263 pathUploadId: uploadId,
264 token: server.accessToken,
265 digestBuilder: () => 'sha=' + 'a'.repeat(40),
266 expectedStatus: 460
267 })
268 })
269
270 it('Should accept an appropriate digest', async function () {
271 const uploadId = await prepareUpload({ token: server.accessToken })
272
273 await sendChunks({
274 pathUploadId: uploadId,
275 token: server.accessToken,
276 digestBuilder: (chunk: Buffer) => {
277 return 'sha1=' + sha1(chunk, 'base64')
278 }
279 })
280 })
281 })
282
283 after(async function () {
284 await cleanupTests([ server ])
285 })
286 })