]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blame - server/tests/api/videos/resumable-upload.ts
Add new player string to custom translations
[github/Chocobozzz/PeerTube.git] / server / tests / api / videos / resumable-upload.ts
CommitLineData
f6d6e7f8 1/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3import 'mocha'
4import * as chai from 'chai'
5import { pathExists, readdir, stat } from 'fs-extra'
6import { join } from 'path'
c55e3d72 7import { buildAbsoluteFixturePath } from '@shared/core-utils'
33ac85bf 8import { sha1 } from '@shared/extra-utils'
4c7e60bc 9import { HttpStatusCode, VideoPrivacy } from '@shared/models'
c55e3d72 10import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands'
f6d6e7f8 11
12const expect = chai.expect
13
14// Most classic resumable upload tests are done in other test suites
15
16describe('Test resumable upload', function () {
17 const defaultFixture = 'video_short.mp4'
254d3579 18 let server: PeerTubeServer
f6d6e7f8 19 let rootId: number
020d3d3d
C
20 let userAccessToken: string
21 let userChannelId: number
f6d6e7f8 22
23 async function buildSize (fixture: string, size?: number) {
24 if (size !== undefined) return size
25
26 const baseFixture = buildAbsoluteFixturePath(fixture)
27 return (await stat(baseFixture)).size
28 }
29
020d3d3d
C
30 async function prepareUpload (options: {
31 channelId?: number
32 token?: string
33 size?: number
34 originalName?: string
35 lastModified?: number
36 } = {}) {
37 const { token, originalName, lastModified } = options
38
39 const size = await buildSize(defaultFixture, options.size)
f6d6e7f8 40
41 const attributes = {
42 name: 'video',
020d3d3d 43 channelId: options.channelId ?? server.store.channel.id,
f6d6e7f8 44 privacy: VideoPrivacy.PUBLIC,
45 fixture: defaultFixture
46 }
47
48 const mimetype = 'video/mp4'
49
020d3d3d 50 const res = await server.videos.prepareResumableUpload({ token, attributes, size, mimetype, originalName, lastModified })
f6d6e7f8 51
52 return res.header['location'].split('?')[1]
53 }
54
55 async function sendChunks (options: {
020d3d3d 56 token?: string
f6d6e7f8 57 pathUploadId: string
58 size?: number
59 expectedStatus?: HttpStatusCode
60 contentLength?: number
61 contentRange?: string
62 contentRangeBuilder?: (start: number, chunk: any) => string
33ac85bf 63 digestBuilder?: (chunk: any) => string
f6d6e7f8 64 }) {
33ac85bf 65 const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder, digestBuilder } = options
f6d6e7f8 66
67 const size = await buildSize(defaultFixture, options.size)
68 const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture)
69
89d241a7 70 return server.videos.sendResumableChunks({
020d3d3d 71 token,
f6d6e7f8 72 pathUploadId,
73 videoFilePath: absoluteFilePath,
74 size,
75 contentLength,
76 contentRangeBuilder,
33ac85bf 77 digestBuilder,
d23dd9fb 78 expectedStatus
f6d6e7f8 79 })
80 }
81
82 async function checkFileSize (uploadIdArg: string, expectedSize: number | null) {
83 const uploadId = uploadIdArg.replace(/^upload_id=/, '')
84
85 const subPath = join('tmp', 'resumable-uploads', uploadId)
89d241a7 86 const filePath = server.servers.buildDirectory(subPath)
f6d6e7f8 87 const exists = await pathExists(filePath)
88
89 if (expectedSize === null) {
90 expect(exists).to.be.false
91 return
92 }
93
94 expect(exists).to.be.true
95
96 expect((await stat(filePath)).size).to.equal(expectedSize)
97 }
98
99 async function countResumableUploads () {
100 const subPath = join('tmp', 'resumable-uploads')
89d241a7 101 const filePath = server.servers.buildDirectory(subPath)
f6d6e7f8 102
103 const files = await readdir(filePath)
104 return files.length
105 }
106
107 before(async function () {
108 this.timeout(30000)
109
254d3579 110 server = await createSingleServer(1)
83903cb6
C
111 await setAccessTokensToServers([ server ])
112 await setDefaultVideoChannel([ server ])
f6d6e7f8 113
89d241a7 114 const body = await server.users.getMyInfo()
7926c5f9 115 rootId = body.id
f6d6e7f8 116
020d3d3d
C
117 {
118 userAccessToken = await server.users.generateUserAndToken('user1')
119 const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken })
120 userChannelId = videoChannels[0].id
121 }
122
89d241a7 123 await server.users.update({ userId: rootId, videoQuota: 10_000_000 })
f6d6e7f8 124 })
125
126 describe('Directory cleaning', function () {
127
33ac85bf
C
128 // FIXME: https://github.com/kukhariev/node-uploadx/pull/524/files#r852989382
129 // it('Should correctly delete files after an upload', async function () {
130 // const uploadId = await prepareUpload()
131 // await sendChunks({ pathUploadId: uploadId })
132 // await server.videos.endResumableUpload({ pathUploadId: uploadId })
f6d6e7f8 133
33ac85bf
C
134 // expect(await countResumableUploads()).to.equal(0)
135 // })
f6d6e7f8 136
137 it('Should not delete files after an unfinished upload', async function () {
138 await prepareUpload()
139
140 expect(await countResumableUploads()).to.equal(2)
141 })
142
143 it('Should not delete recent uploads', async function () {
89d241a7 144 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
f6d6e7f8 145
146 expect(await countResumableUploads()).to.equal(2)
147 })
148
149 it('Should delete old uploads', async function () {
89d241a7 150 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
f6d6e7f8 151
152 expect(await countResumableUploads()).to.equal(0)
153 })
154 })
155
156 describe('Resumable upload and chunks', function () {
157
158 it('Should accept the same amount of chunks', async function () {
159 const uploadId = await prepareUpload()
160 await sendChunks({ pathUploadId: uploadId })
161
162 await checkFileSize(uploadId, null)
163 })
164
165 it('Should not accept more chunks than expected', async function () {
020d3d3d 166 const uploadId = await prepareUpload({ size: 100 })
f6d6e7f8 167
168 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 })
169 await checkFileSize(uploadId, 0)
170 })
171
172 it('Should not accept more chunks than expected with an invalid content length/content range', async function () {
020d3d3d 173 const uploadId = await prepareUpload({ size: 1500 })
f6d6e7f8 174
fea11cf2
C
175 // Content length check seems to have changed in v16
176 if (process.version.startsWith('v16')) {
177 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 })
178 await checkFileSize(uploadId, 1000)
179 } else {
180 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 })
181 await checkFileSize(uploadId, 0)
182 }
f6d6e7f8 183 })
184
185 it('Should not accept more chunks than expected with an invalid content length', async function () {
020d3d3d 186 const uploadId = await prepareUpload({ size: 500 })
f6d6e7f8 187
188 const size = 1000
189
764b1a14
C
190 // Content length check seems to have changed in v16
191 const expectedStatus = process.version.startsWith('v16')
192 ? HttpStatusCode.CONFLICT_409
193 : HttpStatusCode.BAD_REQUEST_400
194
83903cb6 195 const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}`
764b1a14 196 await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size })
f6d6e7f8 197 await checkFileSize(uploadId, 0)
198 })
276250f0
RK
199
200 it('Should be able to accept 2 PUT requests', async function () {
201 const uploadId = await prepareUpload()
202
203 const result1 = await sendChunks({ pathUploadId: uploadId })
204 const result2 = await sendChunks({ pathUploadId: uploadId })
205
206 expect(result1.body.video.uuid).to.exist
207 expect(result1.body.video.uuid).to.equal(result2.body.video.uuid)
208
209 expect(result1.headers['x-resumable-upload-cached']).to.not.exist
210 expect(result2.headers['x-resumable-upload-cached']).to.equal('true')
211
212 await checkFileSize(uploadId, null)
213 })
020d3d3d
C
214
215 it('Should not have the same upload id with 2 different users', async function () {
216 const originalName = 'toto.mp4'
217 const lastModified = new Date().getTime()
218
219 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
220 const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken })
221
222 expect(uploadId1).to.not.equal(uploadId2)
223 })
224
225 it('Should have the same upload id with the same user', async function () {
226 const originalName = 'toto.mp4'
227 const lastModified = new Date().getTime()
228
229 const uploadId1 = await prepareUpload({ originalName, lastModified })
230 const uploadId2 = await prepareUpload({ originalName, lastModified })
231
232 expect(uploadId1).to.equal(uploadId2)
233 })
234
235 it('Should not cache a request with 2 different users', async function () {
236 const originalName = 'toto.mp4'
237 const lastModified = new Date().getTime()
238
239 const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken })
240
241 await sendChunks({ pathUploadId: uploadId, token: server.accessToken })
242 await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
243 })
244
245 it('Should not cache a request after a delete', async function () {
246 const originalName = 'toto.mp4'
247 const lastModified = new Date().getTime()
248 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
249
250 await sendChunks({ pathUploadId: uploadId1 })
251 await server.videos.endResumableUpload({ pathUploadId: uploadId1 })
252
253 const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
254 expect(uploadId1).to.equal(uploadId2)
255
256 const result2 = await sendChunks({ pathUploadId: uploadId1 })
257 expect(result2.headers['x-resumable-upload-cached']).to.not.exist
258 })
33ac85bf
C
259
260 it('Should refuse an invalid digest', async function () {
261 const uploadId = await prepareUpload({ token: server.accessToken })
262
263 await sendChunks({
264 pathUploadId: uploadId,
265 token: server.accessToken,
266 digestBuilder: () => 'sha=' + 'a'.repeat(40),
267 expectedStatus: 460
268 })
269 })
270
271 it('Should accept an appropriate digest', async function () {
272 const uploadId = await prepareUpload({ token: server.accessToken })
273
274 await sendChunks({
275 pathUploadId: uploadId,
276 token: server.accessToken,
277 digestBuilder: (chunk: Buffer) => {
278 return 'sha1=' + sha1(chunk, 'base64')
279 }
280 })
281 })
f6d6e7f8 282 })
283
06c27593 284 after(async function () {
83903cb6 285 await cleanupTests([ server ])
06c27593 286 })
f6d6e7f8 287})