]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blame - server/tests/api/videos/resumable-upload.ts
Add basic video editor support
[github/Chocobozzz/PeerTube.git] / server / tests / api / videos / resumable-upload.ts
CommitLineData
f6d6e7f8 1/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3import 'mocha'
4import * as chai from 'chai'
5import { pathExists, readdir, stat } from 'fs-extra'
6import { join } from 'path'
c55e3d72 7import { buildAbsoluteFixturePath } from '@shared/core-utils'
4c7e60bc 8import { HttpStatusCode, VideoPrivacy } from '@shared/models'
c55e3d72 9import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands'
f6d6e7f8 10
11const expect = chai.expect
12
13// Most classic resumable upload tests are done in other test suites
14
15describe('Test resumable upload', function () {
16 const defaultFixture = 'video_short.mp4'
254d3579 17 let server: PeerTubeServer
f6d6e7f8 18 let rootId: number
020d3d3d
C
19 let userAccessToken: string
20 let userChannelId: number
f6d6e7f8 21
22 async function buildSize (fixture: string, size?: number) {
23 if (size !== undefined) return size
24
25 const baseFixture = buildAbsoluteFixturePath(fixture)
26 return (await stat(baseFixture)).size
27 }
28
020d3d3d
C
29 async function prepareUpload (options: {
30 channelId?: number
31 token?: string
32 size?: number
33 originalName?: string
34 lastModified?: number
35 } = {}) {
36 const { token, originalName, lastModified } = options
37
38 const size = await buildSize(defaultFixture, options.size)
f6d6e7f8 39
40 const attributes = {
41 name: 'video',
020d3d3d 42 channelId: options.channelId ?? server.store.channel.id,
f6d6e7f8 43 privacy: VideoPrivacy.PUBLIC,
44 fixture: defaultFixture
45 }
46
47 const mimetype = 'video/mp4'
48
020d3d3d 49 const res = await server.videos.prepareResumableUpload({ token, attributes, size, mimetype, originalName, lastModified })
f6d6e7f8 50
51 return res.header['location'].split('?')[1]
52 }
53
54 async function sendChunks (options: {
020d3d3d 55 token?: string
f6d6e7f8 56 pathUploadId: string
57 size?: number
58 expectedStatus?: HttpStatusCode
59 contentLength?: number
60 contentRange?: string
61 contentRangeBuilder?: (start: number, chunk: any) => string
62 }) {
020d3d3d 63 const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder } = options
f6d6e7f8 64
65 const size = await buildSize(defaultFixture, options.size)
66 const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture)
67
89d241a7 68 return server.videos.sendResumableChunks({
020d3d3d 69 token,
f6d6e7f8 70 pathUploadId,
71 videoFilePath: absoluteFilePath,
72 size,
73 contentLength,
74 contentRangeBuilder,
d23dd9fb 75 expectedStatus
f6d6e7f8 76 })
77 }
78
79 async function checkFileSize (uploadIdArg: string, expectedSize: number | null) {
80 const uploadId = uploadIdArg.replace(/^upload_id=/, '')
81
82 const subPath = join('tmp', 'resumable-uploads', uploadId)
89d241a7 83 const filePath = server.servers.buildDirectory(subPath)
f6d6e7f8 84 const exists = await pathExists(filePath)
85
86 if (expectedSize === null) {
87 expect(exists).to.be.false
88 return
89 }
90
91 expect(exists).to.be.true
92
93 expect((await stat(filePath)).size).to.equal(expectedSize)
94 }
95
96 async function countResumableUploads () {
97 const subPath = join('tmp', 'resumable-uploads')
89d241a7 98 const filePath = server.servers.buildDirectory(subPath)
f6d6e7f8 99
100 const files = await readdir(filePath)
101 return files.length
102 }
103
104 before(async function () {
105 this.timeout(30000)
106
254d3579 107 server = await createSingleServer(1)
83903cb6
C
108 await setAccessTokensToServers([ server ])
109 await setDefaultVideoChannel([ server ])
f6d6e7f8 110
89d241a7 111 const body = await server.users.getMyInfo()
7926c5f9 112 rootId = body.id
f6d6e7f8 113
020d3d3d
C
114 {
115 userAccessToken = await server.users.generateUserAndToken('user1')
116 const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken })
117 userChannelId = videoChannels[0].id
118 }
119
89d241a7 120 await server.users.update({ userId: rootId, videoQuota: 10_000_000 })
f6d6e7f8 121 })
122
123 describe('Directory cleaning', function () {
124
125 it('Should correctly delete files after an upload', async function () {
126 const uploadId = await prepareUpload()
127 await sendChunks({ pathUploadId: uploadId })
790c2837 128 await server.videos.endResumableUpload({ pathUploadId: uploadId })
f6d6e7f8 129
130 expect(await countResumableUploads()).to.equal(0)
131 })
132
133 it('Should not delete files after an unfinished upload', async function () {
134 await prepareUpload()
135
136 expect(await countResumableUploads()).to.equal(2)
137 })
138
139 it('Should not delete recent uploads', async function () {
89d241a7 140 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
f6d6e7f8 141
142 expect(await countResumableUploads()).to.equal(2)
143 })
144
145 it('Should delete old uploads', async function () {
89d241a7 146 await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
f6d6e7f8 147
148 expect(await countResumableUploads()).to.equal(0)
149 })
150 })
151
152 describe('Resumable upload and chunks', function () {
153
154 it('Should accept the same amount of chunks', async function () {
155 const uploadId = await prepareUpload()
156 await sendChunks({ pathUploadId: uploadId })
157
158 await checkFileSize(uploadId, null)
159 })
160
161 it('Should not accept more chunks than expected', async function () {
020d3d3d 162 const uploadId = await prepareUpload({ size: 100 })
f6d6e7f8 163
164 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 })
165 await checkFileSize(uploadId, 0)
166 })
167
168 it('Should not accept more chunks than expected with an invalid content length/content range', async function () {
020d3d3d 169 const uploadId = await prepareUpload({ size: 1500 })
f6d6e7f8 170
fea11cf2
C
171 // Content length check seems to have changed in v16
172 if (process.version.startsWith('v16')) {
173 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 })
174 await checkFileSize(uploadId, 1000)
175 } else {
176 await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 })
177 await checkFileSize(uploadId, 0)
178 }
f6d6e7f8 179 })
180
181 it('Should not accept more chunks than expected with an invalid content length', async function () {
020d3d3d 182 const uploadId = await prepareUpload({ size: 500 })
f6d6e7f8 183
184 const size = 1000
185
764b1a14
C
186 // Content length check seems to have changed in v16
187 const expectedStatus = process.version.startsWith('v16')
188 ? HttpStatusCode.CONFLICT_409
189 : HttpStatusCode.BAD_REQUEST_400
190
83903cb6 191 const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}`
764b1a14 192 await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size })
f6d6e7f8 193 await checkFileSize(uploadId, 0)
194 })
276250f0
RK
195
196 it('Should be able to accept 2 PUT requests', async function () {
197 const uploadId = await prepareUpload()
198
199 const result1 = await sendChunks({ pathUploadId: uploadId })
200 const result2 = await sendChunks({ pathUploadId: uploadId })
201
202 expect(result1.body.video.uuid).to.exist
203 expect(result1.body.video.uuid).to.equal(result2.body.video.uuid)
204
205 expect(result1.headers['x-resumable-upload-cached']).to.not.exist
206 expect(result2.headers['x-resumable-upload-cached']).to.equal('true')
207
208 await checkFileSize(uploadId, null)
209 })
020d3d3d
C
210
211 it('Should not have the same upload id with 2 different users', async function () {
212 const originalName = 'toto.mp4'
213 const lastModified = new Date().getTime()
214
215 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
216 const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken })
217
218 expect(uploadId1).to.not.equal(uploadId2)
219 })
220
221 it('Should have the same upload id with the same user', async function () {
222 const originalName = 'toto.mp4'
223 const lastModified = new Date().getTime()
224
225 const uploadId1 = await prepareUpload({ originalName, lastModified })
226 const uploadId2 = await prepareUpload({ originalName, lastModified })
227
228 expect(uploadId1).to.equal(uploadId2)
229 })
230
231 it('Should not cache a request with 2 different users', async function () {
232 const originalName = 'toto.mp4'
233 const lastModified = new Date().getTime()
234
235 const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken })
236
237 await sendChunks({ pathUploadId: uploadId, token: server.accessToken })
238 await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
239 })
240
241 it('Should not cache a request after a delete', async function () {
242 const originalName = 'toto.mp4'
243 const lastModified = new Date().getTime()
244 const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
245
246 await sendChunks({ pathUploadId: uploadId1 })
247 await server.videos.endResumableUpload({ pathUploadId: uploadId1 })
248
249 const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken })
250 expect(uploadId1).to.equal(uploadId2)
251
252 const result2 = await sendChunks({ pathUploadId: uploadId1 })
253 expect(result2.headers['x-resumable-upload-cached']).to.not.exist
254 })
f6d6e7f8 255 })
256
06c27593 257 after(async function () {
83903cb6 258 await cleanupTests([ server ])
06c27593 259 })
f6d6e7f8 260})