]>
Commit | Line | Data |
---|---|---|
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | |
2 | ||
3 | import 'mocha' | |
4 | import * as chai from 'chai' | |
5 | import { pathExists, readdir, stat } from 'fs-extra' | |
6 | import { join } from 'path' | |
7 | import { buildAbsoluteFixturePath } from '@shared/core-utils' | |
8 | import { HttpStatusCode, VideoPrivacy } from '@shared/models' | |
9 | import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands' | |
10 | ||
11 | const expect = chai.expect | |
12 | ||
13 | // Most classic resumable upload tests are done in other test suites | |
14 | ||
15 | describe('Test resumable upload', function () { | |
16 | const defaultFixture = 'video_short.mp4' | |
17 | let server: PeerTubeServer | |
18 | let rootId: number | |
19 | let userAccessToken: string | |
20 | let userChannelId: number | |
21 | ||
22 | async function buildSize (fixture: string, size?: number) { | |
23 | if (size !== undefined) return size | |
24 | ||
25 | const baseFixture = buildAbsoluteFixturePath(fixture) | |
26 | return (await stat(baseFixture)).size | |
27 | } | |
28 | ||
29 | async function prepareUpload (options: { | |
30 | channelId?: number | |
31 | token?: string | |
32 | size?: number | |
33 | originalName?: string | |
34 | lastModified?: number | |
35 | } = {}) { | |
36 | const { token, originalName, lastModified } = options | |
37 | ||
38 | const size = await buildSize(defaultFixture, options.size) | |
39 | ||
40 | const attributes = { | |
41 | name: 'video', | |
42 | channelId: options.channelId ?? server.store.channel.id, | |
43 | privacy: VideoPrivacy.PUBLIC, | |
44 | fixture: defaultFixture | |
45 | } | |
46 | ||
47 | const mimetype = 'video/mp4' | |
48 | ||
49 | const res = await server.videos.prepareResumableUpload({ token, attributes, size, mimetype, originalName, lastModified }) | |
50 | ||
51 | return res.header['location'].split('?')[1] | |
52 | } | |
53 | ||
54 | async function sendChunks (options: { | |
55 | token?: string | |
56 | pathUploadId: string | |
57 | size?: number | |
58 | expectedStatus?: HttpStatusCode | |
59 | contentLength?: number | |
60 | contentRange?: string | |
61 | contentRangeBuilder?: (start: number, chunk: any) => string | |
62 | }) { | |
63 | const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder } = options | |
64 | ||
65 | const size = await buildSize(defaultFixture, options.size) | |
66 | const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture) | |
67 | ||
68 | return server.videos.sendResumableChunks({ | |
69 | token, | |
70 | pathUploadId, | |
71 | videoFilePath: absoluteFilePath, | |
72 | size, | |
73 | contentLength, | |
74 | contentRangeBuilder, | |
75 | expectedStatus | |
76 | }) | |
77 | } | |
78 | ||
79 | async function checkFileSize (uploadIdArg: string, expectedSize: number | null) { | |
80 | const uploadId = uploadIdArg.replace(/^upload_id=/, '') | |
81 | ||
82 | const subPath = join('tmp', 'resumable-uploads', uploadId) | |
83 | const filePath = server.servers.buildDirectory(subPath) | |
84 | const exists = await pathExists(filePath) | |
85 | ||
86 | if (expectedSize === null) { | |
87 | expect(exists).to.be.false | |
88 | return | |
89 | } | |
90 | ||
91 | expect(exists).to.be.true | |
92 | ||
93 | expect((await stat(filePath)).size).to.equal(expectedSize) | |
94 | } | |
95 | ||
96 | async function countResumableUploads () { | |
97 | const subPath = join('tmp', 'resumable-uploads') | |
98 | const filePath = server.servers.buildDirectory(subPath) | |
99 | ||
100 | const files = await readdir(filePath) | |
101 | return files.length | |
102 | } | |
103 | ||
104 | before(async function () { | |
105 | this.timeout(30000) | |
106 | ||
107 | server = await createSingleServer(1) | |
108 | await setAccessTokensToServers([ server ]) | |
109 | await setDefaultVideoChannel([ server ]) | |
110 | ||
111 | const body = await server.users.getMyInfo() | |
112 | rootId = body.id | |
113 | ||
114 | { | |
115 | userAccessToken = await server.users.generateUserAndToken('user1') | |
116 | const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken }) | |
117 | userChannelId = videoChannels[0].id | |
118 | } | |
119 | ||
120 | await server.users.update({ userId: rootId, videoQuota: 10_000_000 }) | |
121 | }) | |
122 | ||
123 | describe('Directory cleaning', function () { | |
124 | ||
125 | it('Should correctly delete files after an upload', async function () { | |
126 | const uploadId = await prepareUpload() | |
127 | await sendChunks({ pathUploadId: uploadId }) | |
128 | await server.videos.endResumableUpload({ pathUploadId: uploadId }) | |
129 | ||
130 | expect(await countResumableUploads()).to.equal(0) | |
131 | }) | |
132 | ||
133 | it('Should not delete files after an unfinished upload', async function () { | |
134 | await prepareUpload() | |
135 | ||
136 | expect(await countResumableUploads()).to.equal(2) | |
137 | }) | |
138 | ||
139 | it('Should not delete recent uploads', async function () { | |
140 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | |
141 | ||
142 | expect(await countResumableUploads()).to.equal(2) | |
143 | }) | |
144 | ||
145 | it('Should delete old uploads', async function () { | |
146 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | |
147 | ||
148 | expect(await countResumableUploads()).to.equal(0) | |
149 | }) | |
150 | }) | |
151 | ||
152 | describe('Resumable upload and chunks', function () { | |
153 | ||
154 | it('Should accept the same amount of chunks', async function () { | |
155 | const uploadId = await prepareUpload() | |
156 | await sendChunks({ pathUploadId: uploadId }) | |
157 | ||
158 | await checkFileSize(uploadId, null) | |
159 | }) | |
160 | ||
161 | it('Should not accept more chunks than expected', async function () { | |
162 | const uploadId = await prepareUpload({ size: 100 }) | |
163 | ||
164 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 }) | |
165 | await checkFileSize(uploadId, 0) | |
166 | }) | |
167 | ||
168 | it('Should not accept more chunks than expected with an invalid content length/content range', async function () { | |
169 | const uploadId = await prepareUpload({ size: 1500 }) | |
170 | ||
171 | // Content length check seems to have changed in v16 | |
172 | if (process.version.startsWith('v16')) { | |
173 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 }) | |
174 | await checkFileSize(uploadId, 1000) | |
175 | } else { | |
176 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 }) | |
177 | await checkFileSize(uploadId, 0) | |
178 | } | |
179 | }) | |
180 | ||
181 | it('Should not accept more chunks than expected with an invalid content length', async function () { | |
182 | const uploadId = await prepareUpload({ size: 500 }) | |
183 | ||
184 | const size = 1000 | |
185 | ||
186 | // Content length check seems to have changed in v16 | |
187 | const expectedStatus = process.version.startsWith('v16') | |
188 | ? HttpStatusCode.CONFLICT_409 | |
189 | : HttpStatusCode.BAD_REQUEST_400 | |
190 | ||
191 | const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}` | |
192 | await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size }) | |
193 | await checkFileSize(uploadId, 0) | |
194 | }) | |
195 | ||
196 | it('Should be able to accept 2 PUT requests', async function () { | |
197 | const uploadId = await prepareUpload() | |
198 | ||
199 | const result1 = await sendChunks({ pathUploadId: uploadId }) | |
200 | const result2 = await sendChunks({ pathUploadId: uploadId }) | |
201 | ||
202 | expect(result1.body.video.uuid).to.exist | |
203 | expect(result1.body.video.uuid).to.equal(result2.body.video.uuid) | |
204 | ||
205 | expect(result1.headers['x-resumable-upload-cached']).to.not.exist | |
206 | expect(result2.headers['x-resumable-upload-cached']).to.equal('true') | |
207 | ||
208 | await checkFileSize(uploadId, null) | |
209 | }) | |
210 | ||
211 | it('Should not have the same upload id with 2 different users', async function () { | |
212 | const originalName = 'toto.mp4' | |
213 | const lastModified = new Date().getTime() | |
214 | ||
215 | const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
216 | const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken }) | |
217 | ||
218 | expect(uploadId1).to.not.equal(uploadId2) | |
219 | }) | |
220 | ||
221 | it('Should have the same upload id with the same user', async function () { | |
222 | const originalName = 'toto.mp4' | |
223 | const lastModified = new Date().getTime() | |
224 | ||
225 | const uploadId1 = await prepareUpload({ originalName, lastModified }) | |
226 | const uploadId2 = await prepareUpload({ originalName, lastModified }) | |
227 | ||
228 | expect(uploadId1).to.equal(uploadId2) | |
229 | }) | |
230 | ||
231 | it('Should not cache a request with 2 different users', async function () { | |
232 | const originalName = 'toto.mp4' | |
233 | const lastModified = new Date().getTime() | |
234 | ||
235 | const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
236 | ||
237 | await sendChunks({ pathUploadId: uploadId, token: server.accessToken }) | |
238 | await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | |
239 | }) | |
240 | ||
241 | it('Should not cache a request after a delete', async function () { | |
242 | const originalName = 'toto.mp4' | |
243 | const lastModified = new Date().getTime() | |
244 | const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
245 | ||
246 | await sendChunks({ pathUploadId: uploadId1 }) | |
247 | await server.videos.endResumableUpload({ pathUploadId: uploadId1 }) | |
248 | ||
249 | const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
250 | expect(uploadId1).to.equal(uploadId2) | |
251 | ||
252 | const result2 = await sendChunks({ pathUploadId: uploadId1 }) | |
253 | expect(result2.headers['x-resumable-upload-cached']).to.not.exist | |
254 | }) | |
255 | }) | |
256 | ||
257 | after(async function () { | |
258 | await cleanupTests([ server ]) | |
259 | }) | |
260 | }) |