]>
Commit | Line | Data |
---|---|---|
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | |
2 | ||
3 | import 'mocha' | |
4 | import * as chai from 'chai' | |
5 | import { pathExists, readdir, stat } from 'fs-extra' | |
6 | import { join } from 'path' | |
7 | import { HttpStatusCode } from '@shared/core-utils' | |
8 | import { | |
9 | buildAbsoluteFixturePath, | |
10 | buildServerDirectory, | |
11 | cleanupTests, | |
12 | flushAndRunServer, | |
13 | getMyUserInformation, | |
14 | prepareResumableUpload, | |
15 | sendResumableChunks, | |
16 | ServerInfo, | |
17 | setAccessTokensToServers, | |
18 | setDefaultVideoChannel, | |
19 | updateUser | |
20 | } from '@shared/extra-utils' | |
21 | import { MyUser, VideoPrivacy } from '@shared/models' | |
22 | ||
23 | const expect = chai.expect | |
24 | ||
25 | // Most classic resumable upload tests are done in other test suites | |
26 | ||
27 | describe('Test resumable upload', function () { | |
28 | const defaultFixture = 'video_short.mp4' | |
29 | let server: ServerInfo | |
30 | let rootId: number | |
31 | ||
32 | async function buildSize (fixture: string, size?: number) { | |
33 | if (size !== undefined) return size | |
34 | ||
35 | const baseFixture = buildAbsoluteFixturePath(fixture) | |
36 | return (await stat(baseFixture)).size | |
37 | } | |
38 | ||
39 | async function prepareUpload (sizeArg?: number) { | |
40 | const size = await buildSize(defaultFixture, sizeArg) | |
41 | ||
42 | const attributes = { | |
43 | name: 'video', | |
44 | channelId: server.videoChannel.id, | |
45 | privacy: VideoPrivacy.PUBLIC, | |
46 | fixture: defaultFixture | |
47 | } | |
48 | ||
49 | const mimetype = 'video/mp4' | |
50 | ||
51 | const res = await prepareResumableUpload({ url: server.url, token: server.accessToken, attributes, size, mimetype }) | |
52 | ||
53 | return res.header['location'].split('?')[1] | |
54 | } | |
55 | ||
56 | async function sendChunks (options: { | |
57 | pathUploadId: string | |
58 | size?: number | |
59 | expectedStatus?: HttpStatusCode | |
60 | contentLength?: number | |
61 | contentRange?: string | |
62 | contentRangeBuilder?: (start: number, chunk: any) => string | |
63 | }) { | |
64 | const { pathUploadId, expectedStatus, contentLength, contentRangeBuilder } = options | |
65 | ||
66 | const size = await buildSize(defaultFixture, options.size) | |
67 | const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture) | |
68 | ||
69 | return sendResumableChunks({ | |
70 | url: server.url, | |
71 | token: server.accessToken, | |
72 | pathUploadId, | |
73 | videoFilePath: absoluteFilePath, | |
74 | size, | |
75 | contentLength, | |
76 | contentRangeBuilder, | |
77 | specialStatus: expectedStatus | |
78 | }) | |
79 | } | |
80 | ||
81 | async function checkFileSize (uploadIdArg: string, expectedSize: number | null) { | |
82 | const uploadId = uploadIdArg.replace(/^upload_id=/, '') | |
83 | ||
84 | const subPath = join('tmp', 'resumable-uploads', uploadId) | |
85 | const filePath = buildServerDirectory(server, subPath) | |
86 | const exists = await pathExists(filePath) | |
87 | ||
88 | if (expectedSize === null) { | |
89 | expect(exists).to.be.false | |
90 | return | |
91 | } | |
92 | ||
93 | expect(exists).to.be.true | |
94 | ||
95 | expect((await stat(filePath)).size).to.equal(expectedSize) | |
96 | } | |
97 | ||
98 | async function countResumableUploads () { | |
99 | const subPath = join('tmp', 'resumable-uploads') | |
100 | const filePath = buildServerDirectory(server, subPath) | |
101 | ||
102 | const files = await readdir(filePath) | |
103 | return files.length | |
104 | } | |
105 | ||
106 | before(async function () { | |
107 | this.timeout(30000) | |
108 | ||
109 | server = await flushAndRunServer(1) | |
110 | await setAccessTokensToServers([ server ]) | |
111 | await setDefaultVideoChannel([ server ]) | |
112 | ||
113 | const res = await getMyUserInformation(server.url, server.accessToken) | |
114 | rootId = (res.body as MyUser).id | |
115 | ||
116 | await updateUser({ | |
117 | url: server.url, | |
118 | userId: rootId, | |
119 | accessToken: server.accessToken, | |
120 | videoQuota: 10_000_000 | |
121 | }) | |
122 | }) | |
123 | ||
124 | describe('Directory cleaning', function () { | |
125 | ||
126 | it('Should correctly delete files after an upload', async function () { | |
127 | const uploadId = await prepareUpload() | |
128 | await sendChunks({ pathUploadId: uploadId }) | |
129 | ||
130 | expect(await countResumableUploads()).to.equal(0) | |
131 | }) | |
132 | ||
133 | it('Should not delete files after an unfinished upload', async function () { | |
134 | await prepareUpload() | |
135 | ||
136 | expect(await countResumableUploads()).to.equal(2) | |
137 | }) | |
138 | ||
139 | it('Should not delete recent uploads', async function () { | |
140 | await server.debugCommand.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | |
141 | ||
142 | expect(await countResumableUploads()).to.equal(2) | |
143 | }) | |
144 | ||
145 | it('Should delete old uploads', async function () { | |
146 | await server.debugCommand.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) | |
147 | ||
148 | expect(await countResumableUploads()).to.equal(0) | |
149 | }) | |
150 | }) | |
151 | ||
152 | describe('Resumable upload and chunks', function () { | |
153 | ||
154 | it('Should accept the same amount of chunks', async function () { | |
155 | const uploadId = await prepareUpload() | |
156 | await sendChunks({ pathUploadId: uploadId }) | |
157 | ||
158 | await checkFileSize(uploadId, null) | |
159 | }) | |
160 | ||
161 | it('Should not accept more chunks than expected', async function () { | |
162 | const size = 100 | |
163 | const uploadId = await prepareUpload(size) | |
164 | ||
165 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 }) | |
166 | await checkFileSize(uploadId, 0) | |
167 | }) | |
168 | ||
169 | it('Should not accept more chunks than expected with an invalid content length/content range', async function () { | |
170 | const uploadId = await prepareUpload(1500) | |
171 | ||
172 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 }) | |
173 | await checkFileSize(uploadId, 0) | |
174 | }) | |
175 | ||
176 | it('Should not accept more chunks than expected with an invalid content length', async function () { | |
177 | const uploadId = await prepareUpload(500) | |
178 | ||
179 | const size = 1000 | |
180 | ||
181 | const contentRangeBuilder = start => `bytes ${start}-${start + size - 1}/${size}` | |
182 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentRangeBuilder, contentLength: size }) | |
183 | await checkFileSize(uploadId, 0) | |
184 | }) | |
185 | }) | |
186 | ||
187 | after(async function () { | |
188 | await cleanupTests([ server ]) | |
189 | }) | |
190 | }) |