]>
Commit | Line | Data |
---|---|---|
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | |
2 | ||
3 | import { expect } from 'chai' | |
4 | import { pathExists, readdir, stat } from 'fs-extra' | |
5 | import { join } from 'path' | |
6 | import { buildAbsoluteFixturePath } from '@shared/core-utils' | |
7 | import { sha1 } from '@shared/extra-utils' | |
8 | import { HttpStatusCode, VideoPrivacy } from '@shared/models' | |
9 | import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands' | |
10 | ||
11 | // Most classic resumable upload tests are done in other test suites | |
12 | ||
13 | describe('Test resumable upload', function () { | |
14 | const defaultFixture = 'video_short.mp4' | |
15 | let server: PeerTubeServer | |
16 | let rootId: number | |
17 | let userAccessToken: string | |
18 | let userChannelId: number | |
19 | ||
20 | async function buildSize (fixture: string, size?: number) { | |
21 | if (size !== undefined) return size | |
22 | ||
23 | const baseFixture = buildAbsoluteFixturePath(fixture) | |
24 | return (await stat(baseFixture)).size | |
25 | } | |
26 | ||
27 | async function prepareUpload (options: { | |
28 | channelId?: number | |
29 | token?: string | |
30 | size?: number | |
31 | originalName?: string | |
32 | lastModified?: number | |
33 | } = {}) { | |
34 | const { token, originalName, lastModified } = options | |
35 | ||
36 | const size = await buildSize(defaultFixture, options.size) | |
37 | ||
38 | const attributes = { | |
39 | name: 'video', | |
40 | channelId: options.channelId ?? server.store.channel.id, | |
41 | privacy: VideoPrivacy.PUBLIC, | |
42 | fixture: defaultFixture | |
43 | } | |
44 | ||
45 | const mimetype = 'video/mp4' | |
46 | ||
47 | const res = await server.videos.prepareResumableUpload({ token, attributes, size, mimetype, originalName, lastModified }) | |
48 | ||
49 | return res.header['location'].split('?')[1] | |
50 | } | |
51 | ||
52 | async function sendChunks (options: { | |
53 | token?: string | |
54 | pathUploadId: string | |
55 | size?: number | |
56 | expectedStatus?: HttpStatusCode | |
57 | contentLength?: number | |
58 | contentRange?: string | |
59 | contentRangeBuilder?: (start: number, chunk: any) => string | |
60 | digestBuilder?: (chunk: any) => string | |
61 | }) { | |
62 | const { token, pathUploadId, expectedStatus, contentLength, contentRangeBuilder, digestBuilder } = options | |
63 | ||
64 | const size = await buildSize(defaultFixture, options.size) | |
65 | const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture) | |
66 | ||
67 | return server.videos.sendResumableChunks({ | |
68 | token, | |
69 | pathUploadId, | |
70 | videoFilePath: absoluteFilePath, | |
71 | size, | |
72 | contentLength, | |
73 | contentRangeBuilder, | |
74 | digestBuilder, | |
75 | expectedStatus | |
76 | }) | |
77 | } | |
78 | ||
79 | async function checkFileSize (uploadIdArg: string, expectedSize: number | null) { | |
80 | const uploadId = uploadIdArg.replace(/^upload_id=/, '') | |
81 | ||
82 | const subPath = join('tmp', 'resumable-uploads', `${rootId}-${uploadId}.mp4`) | |
83 | const filePath = server.servers.buildDirectory(subPath) | |
84 | const exists = await pathExists(filePath) | |
85 | ||
86 | if (expectedSize === null) { | |
87 | expect(exists).to.be.false | |
88 | return | |
89 | } | |
90 | ||
91 | expect(exists).to.be.true | |
92 | ||
93 | expect((await stat(filePath)).size).to.equal(expectedSize) | |
94 | } | |
95 | ||
96 | async function countResumableUploads () { | |
97 | const subPath = join('tmp', 'resumable-uploads') | |
98 | const filePath = server.servers.buildDirectory(subPath) | |
99 | ||
100 | const files = await readdir(filePath) | |
101 | return files.length | |
102 | } | |
103 | ||
104 | before(async function () { | |
105 | this.timeout(30000) | |
106 | ||
107 | server = await createSingleServer(1) | |
108 | await setAccessTokensToServers([ server ]) | |
109 | await setDefaultVideoChannel([ server ]) | |
110 | ||
111 | const body = await server.users.getMyInfo() | |
112 | rootId = body.id | |
113 | ||
114 | { | |
115 | userAccessToken = await server.users.generateUserAndToken('user1') | |
116 | const { videoChannels } = await server.users.getMyInfo({ token: userAccessToken }) | |
117 | userChannelId = videoChannels[0].id | |
118 | } | |
119 | ||
120 | await server.users.update({ userId: rootId, videoQuota: 10_000_000 }) | |
121 | }) | |
122 | ||
123 | describe('Directory cleaning', function () { | |
124 | ||
125 | // FIXME: https://github.com/kukhariev/node-uploadx/pull/524/files#r852989382 | |
126 | // it('Should correctly delete files after an upload', async function () { | |
127 | // const uploadId = await prepareUpload() | |
128 | // await sendChunks({ pathUploadId: uploadId }) | |
129 | // await server.videos.endResumableUpload({ pathUploadId: uploadId }) | |
130 | ||
131 | // expect(await countResumableUploads()).to.equal(0) | |
132 | // }) | |
133 | ||
134 | it('Should not delete files after an unfinished upload', async function () { | |
135 | await prepareUpload() | |
136 | ||
137 | expect(await countResumableUploads()).to.equal(2) | |
138 | }) | |
139 | ||
140 | it('Should not delete recent uploads', async function () { | |
141 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | |
142 | ||
143 | expect(await countResumableUploads()).to.equal(2) | |
144 | }) | |
145 | ||
146 | it('Should delete old uploads', async function () { | |
147 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | |
148 | ||
149 | expect(await countResumableUploads()).to.equal(0) | |
150 | }) | |
151 | }) | |
152 | ||
153 | describe('Resumable upload and chunks', function () { | |
154 | ||
155 | it('Should accept the same amount of chunks', async function () { | |
156 | const uploadId = await prepareUpload() | |
157 | await sendChunks({ pathUploadId: uploadId }) | |
158 | ||
159 | await checkFileSize(uploadId, null) | |
160 | }) | |
161 | ||
162 | it('Should not accept more chunks than expected', async function () { | |
163 | const uploadId = await prepareUpload({ size: 100 }) | |
164 | ||
165 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 }) | |
166 | await checkFileSize(uploadId, 0) | |
167 | }) | |
168 | ||
169 | it('Should not accept more chunks than expected with an invalid content length/content range', async function () { | |
170 | const uploadId = await prepareUpload({ size: 1500 }) | |
171 | ||
172 | // Content length check can be different depending on the node version | |
173 | try { | |
174 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 }) | |
175 | await checkFileSize(uploadId, 0) | |
176 | } catch { | |
177 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 }) | |
178 | await checkFileSize(uploadId, 0) | |
179 | } | |
180 | }) | |
181 | ||
182 | it('Should not accept more chunks than expected with an invalid content length', async function () { | |
183 | const uploadId = await prepareUpload({ size: 500 }) | |
184 | ||
185 | const size = 1000 | |
186 | ||
187 | // Content length check seems to have changed in v16 | |
188 | const expectedStatus = process.version.startsWith('v16') | |
189 | ? HttpStatusCode.CONFLICT_409 | |
190 | : HttpStatusCode.BAD_REQUEST_400 | |
191 | ||
192 | const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}` | |
193 | await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size }) | |
194 | await checkFileSize(uploadId, 0) | |
195 | }) | |
196 | ||
197 | it('Should be able to accept 2 PUT requests', async function () { | |
198 | const uploadId = await prepareUpload() | |
199 | ||
200 | const result1 = await sendChunks({ pathUploadId: uploadId }) | |
201 | const result2 = await sendChunks({ pathUploadId: uploadId }) | |
202 | ||
203 | expect(result1.body.video.uuid).to.exist | |
204 | expect(result1.body.video.uuid).to.equal(result2.body.video.uuid) | |
205 | ||
206 | expect(result1.headers['x-resumable-upload-cached']).to.not.exist | |
207 | expect(result2.headers['x-resumable-upload-cached']).to.equal('true') | |
208 | ||
209 | await checkFileSize(uploadId, null) | |
210 | }) | |
211 | ||
212 | it('Should not have the same upload id with 2 different users', async function () { | |
213 | const originalName = 'toto.mp4' | |
214 | const lastModified = new Date().getTime() | |
215 | ||
216 | const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
217 | const uploadId2 = await prepareUpload({ originalName, lastModified, channelId: userChannelId, token: userAccessToken }) | |
218 | ||
219 | expect(uploadId1).to.not.equal(uploadId2) | |
220 | }) | |
221 | ||
222 | it('Should have the same upload id with the same user', async function () { | |
223 | const originalName = 'toto.mp4' | |
224 | const lastModified = new Date().getTime() | |
225 | ||
226 | const uploadId1 = await prepareUpload({ originalName, lastModified }) | |
227 | const uploadId2 = await prepareUpload({ originalName, lastModified }) | |
228 | ||
229 | expect(uploadId1).to.equal(uploadId2) | |
230 | }) | |
231 | ||
232 | it('Should not cache a request with 2 different users', async function () { | |
233 | const originalName = 'toto.mp4' | |
234 | const lastModified = new Date().getTime() | |
235 | ||
236 | const uploadId = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
237 | ||
238 | await sendChunks({ pathUploadId: uploadId, token: server.accessToken }) | |
239 | await sendChunks({ pathUploadId: uploadId, token: userAccessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | |
240 | }) | |
241 | ||
242 | it('Should not cache a request after a delete', async function () { | |
243 | const originalName = 'toto.mp4' | |
244 | const lastModified = new Date().getTime() | |
245 | const uploadId1 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
246 | ||
247 | await sendChunks({ pathUploadId: uploadId1 }) | |
248 | await server.videos.endResumableUpload({ pathUploadId: uploadId1 }) | |
249 | ||
250 | const uploadId2 = await prepareUpload({ originalName, lastModified, token: server.accessToken }) | |
251 | expect(uploadId1).to.equal(uploadId2) | |
252 | ||
253 | const result2 = await sendChunks({ pathUploadId: uploadId1 }) | |
254 | expect(result2.headers['x-resumable-upload-cached']).to.not.exist | |
255 | }) | |
256 | ||
257 | it('Should refuse an invalid digest', async function () { | |
258 | const uploadId = await prepareUpload({ token: server.accessToken }) | |
259 | ||
260 | await sendChunks({ | |
261 | pathUploadId: uploadId, | |
262 | token: server.accessToken, | |
263 | digestBuilder: () => 'sha=' + 'a'.repeat(40), | |
264 | expectedStatus: 460 as any | |
265 | }) | |
266 | }) | |
267 | ||
268 | it('Should accept an appropriate digest', async function () { | |
269 | const uploadId = await prepareUpload({ token: server.accessToken }) | |
270 | ||
271 | await sendChunks({ | |
272 | pathUploadId: uploadId, | |
273 | token: server.accessToken, | |
274 | digestBuilder: (chunk: Buffer) => { | |
275 | return 'sha1=' + sha1(chunk, 'base64') | |
276 | } | |
277 | }) | |
278 | }) | |
279 | }) | |
280 | ||
281 | after(async function () { | |
282 | await cleanupTests([ server ]) | |
283 | }) | |
284 | }) |