]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blame - server/tests/api/videos/resumable-upload.ts
shared/ typescript types dir server-commands
[github/Chocobozzz/PeerTube.git] / server / tests / api / videos / resumable-upload.ts
CommitLineData
f6d6e7f8 1/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3import 'mocha'
4import * as chai from 'chai'
5import { pathExists, readdir, stat } from 'fs-extra'
6import { join } from 'path'
f6d6e7f8 7import {
8 buildAbsoluteFixturePath,
06c27593 9 cleanupTests,
254d3579
C
10 createSingleServer,
11 PeerTubeServer,
f6d6e7f8 12 setAccessTokensToServers,
7926c5f9 13 setDefaultVideoChannel
bf54587a 14} from '@shared/server-commands'
4c7e60bc 15import { HttpStatusCode, VideoPrivacy } from '@shared/models'
f6d6e7f8 16
17const expect = chai.expect
18
19// Most classic resumable upload tests are done in other test suites
20
21describe('Test resumable upload', function () {
22 const defaultFixture = 'video_short.mp4'
254d3579 23 let server: PeerTubeServer
f6d6e7f8 24 let rootId: number
020d3d3d
C
25 let userAccessToken: string
26 let userChannelId: number
f6d6e7f8 27
28 async function buildSize (fixture: string, size?: number) {
29 if (size !== undefined) return size
30
31 const baseFixture = buildAbsoluteFixturePath(fixture)
32 return (await stat(baseFixture)).size
33 }
34
020d3d3d
C
35 async function prepareUpload (options: {
36 channelId?: number
37 token?: string
38 size?: number
39 originalName?: string
40 lastModified?: number
41 } = {}) {
42 const { token, originalName, lastModified } = options
43
44 const size = await buildSize(defaultFixture, options.size)
f6d6e7f8 45
46 const attributes = {
47 name: 'video',
020d3d3d 48 channelId: options.channelId ?? server.store.channel.id,
f6d6e7f8 49 privacy: VideoPrivacy.PUBLIC,
50 fixture: defaultFixture
51 }
52
53 const mimetype = 'video/mp4'
54
020d3d3d 55 const res = await server.videos.prepareResumableUpload({ token, attributes, size, mimetype, originalName, lastModified })
f6d6e7f8 56
57 return res.header['location'].split('?')[1]
58 }
59
60 async function sendChunks (options: {
020d3d3d 61 token?: string
f6d6e7f8 62 pathUploadId: string
63 size?: number
64 expectedStatus?: HttpStatusCode
65 contentLength?: number
66 contentRange?: string
67 contentRangeBuilder?: (start: number, chunk: any) => string
68 }) {
020d3d3d 69 const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder } = options
f6d6e7f8 70
71 const size = await buildSize(defaultFixture, options.size)
72 const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture)
73
89d241a7 74 return server.videos.sendResumableChunks({
020d3d3d 75 token,
f6d6e7f8 76 pathUploadId,
77 videoFilePath: absoluteFilePath,
78 size,
79 contentLength,
80 contentRangeBuilder,
d23dd9fb 81 expectedStatus
f6d6e7f8 82 })
83 }
84
85 async function checkFileSize (uploadIdArg: string, expectedSize: number | null) {
86 const uploadId = uploadIdArg.replace(/^upload_id=/, '')
87
88 const subPath = join('tmp', 'resumable-uploads', uploadId)
89d241a7 89 const filePath = server.servers.buildDirectory(subPath)
f6d6e7f8 90 const exists = await pathExists(filePath)
91
92 if (expectedSize === null) {
93 expect(exists).to.be.false
94 return
95 }
96
97 expect(exists).to.be.true
98
99 expect((await stat(filePath)).size).to.equal(expectedSize)
100 }
101
102 async function countResumableUploads () {
103 const subPath = join('tmp', 'resumable-uploads')
89d241a7 104 const filePath = server.servers.buildDirectory(subPath)
f6d6e7f8 105
106 const files = await readdir(filePath)
107 return files.length
108 }
109
110 before(async function () {
111 this.timeout(30000)
112
254d3579 113 server = await createSingleServer(1)
83903cb6
C
114 await setAccessTokensToServers([ server ])
115 await setDefaultVideoChannel([ server ])
f6d6e7f8 116
89d241a7 117 const body = await server.users.getMyInfo()
7926c5f9 118 rootId = body.id
f6d6e7f8 119
020d3d3d
C
120 {
121 userAccessToken = await server.users.generateUserAndToken('user1')
122 const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken })
123 userChannelId = videoChannels[0].id
124 }
125
89d241a7 126 await server.users.update({ userId: rootId, videoQuota: 10_000_000 })
f6d6e7f8 127 })
128
129 describe('Directory cleaning', function () {
130
131 it('Should correctly delete files after an upload', async function () {
132 const uploadId = await prepareUpload()
133 await sendChunks({ pathUploadId: uploadId })
790c2837 134 await server.videos.endResumableUpload({ pathUploadId: uploadId })
f6d6e7f8 135
136 expect(await countResumableUploads()).to.equal(0)
137 })
138
139 it('Should not delete files after an unfinished upload', async function () {
140 await prepareUpload()
141
142 expect(await countResumableUploads()).to.equal(2)
143 })
144
145 it('Should not delete recent uploads', async function () {
89d241a7 146 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
f6d6e7f8 147
148 expect(await countResumableUploads()).to.equal(2)
149 })
150
151 it('Should delete old uploads', async function () {
89d241a7 152 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
f6d6e7f8 153
154 expect(await countResumableUploads()).to.equal(0)
155 })
156 })
157
158 describe('Resumable upload and chunks', function () {
159
160 it('Should accept the same amount of chunks', async function () {
161 const uploadId = await prepareUpload()
162 await sendChunks({ pathUploadId: uploadId })
163
164 await checkFileSize(uploadId, null)
165 })
166
167 it('Should not accept more chunks than expected', async function () {
020d3d3d 168 const uploadId = await prepareUpload({ size: 100 })
f6d6e7f8 169
170 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 })
171 await checkFileSize(uploadId, 0)
172 })
173
174 it('Should not accept more chunks than expected with an invalid content length/content range', async function () {
020d3d3d 175 const uploadId = await prepareUpload({ size: 1500 })
f6d6e7f8 176
fea11cf2
C
177 // Content length check seems to have changed in v16
178 if (process.version.startsWith('v16')) {
179 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 })
180 await checkFileSize(uploadId, 1000)
181 } else {
182 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 })
183 await checkFileSize(uploadId, 0)
184 }
f6d6e7f8 185 })
186
187 it('Should not accept more chunks than expected with an invalid content length', async function () {
020d3d3d 188 const uploadId = await prepareUpload({ size: 500 })
f6d6e7f8 189
190 const size = 1000
191
764b1a14
C
192 // Content length check seems to have changed in v16
193 const expectedStatus = process.version.startsWith('v16')
194 ? HttpStatusCode.CONFLICT_409
195 : HttpStatusCode.BAD_REQUEST_400
196
83903cb6 197 const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}`
764b1a14 198 await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size })
f6d6e7f8 199 await checkFileSize(uploadId, 0)
200 })
276250f0
RK
201
202 it('Should be able to accept 2 PUT requests', async function () {
203 const uploadId = await prepareUpload()
204
205 const result1 = await sendChunks({ pathUploadId: uploadId })
206 const result2 = await sendChunks({ pathUploadId: uploadId })
207
208 expect(result1.body.video.uuid).to.exist
209 expect(result1.body.video.uuid).to.equal(result2.body.video.uuid)
210
211 expect(result1.headers['x-resumable-upload-cached']).to.not.exist
212 expect(result2.headers['x-resumable-upload-cached']).to.equal('true')
213
214 await checkFileSize(uploadId, null)
215 })
020d3d3d
C
216
217 it('Should not have the same upload id with 2 different users', async function () {
218 const originalName = 'toto.mp4'
219 const lastModified = new Date().getTime()
220
221 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
222 const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken })
223
224 expect(uploadId1).to.not.equal(uploadId2)
225 })
226
227 it('Should have the same upload id with the same user', async function () {
228 const originalName = 'toto.mp4'
229 const lastModified = new Date().getTime()
230
231 const uploadId1 = await prepareUpload({ originalName, lastModified })
232 const uploadId2 = await prepareUpload({ originalName, lastModified })
233
234 expect(uploadId1).to.equal(uploadId2)
235 })
236
237 it('Should not cache a request with 2 different users', async function () {
238 const originalName = 'toto.mp4'
239 const lastModified = new Date().getTime()
240
241 const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken })
242
243 await sendChunks({ pathUploadId: uploadId, token: server.accessToken })
244 await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
245 })
246
247 it('Should not cache a request after a delete', async function () {
248 const originalName = 'toto.mp4'
249 const lastModified = new Date().getTime()
250 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
251
252 await sendChunks({ pathUploadId: uploadId1 })
253 await server.videos.endResumableUpload({ pathUploadId: uploadId1 })
254
255 const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
256 expect(uploadId1).to.equal(uploadId2)
257
258 const result2 = await sendChunks({ pathUploadId: uploadId1 })
259 expect(result2.headers['x-resumable-upload-cached']).to.not.exist
260 })
f6d6e7f8 261 })
262
06c27593 263 after(async function () {
83903cb6 264 await cleanupTests([ server ])
06c27593 265 })
f6d6e7f8 266})