]>
Commit | Line | Data |
---|---|---|
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | |
2 | ||
3 | import 'mocha' | |
4 | import * as chai from 'chai' | |
5 | import { pathExists, readdir, stat } from 'fs-extra' | |
6 | import { join } from 'path' | |
7 | import { HttpStatusCode } from '@shared/core-utils' | |
8 | import { | |
9 | buildAbsoluteFixturePath, | |
10 | buildServerDirectory, | |
11 | cleanupTests, | |
12 | flushAndRunServer, | |
13 | getMyUserInformation, | |
14 | prepareResumableUpload, | |
15 | sendDebugCommand, | |
16 | sendResumableChunks, | |
17 | ServerInfo, | |
18 | setAccessTokensToServers, | |
19 | setDefaultVideoChannel, | |
20 | updateUser | |
21 | } from '@shared/extra-utils' | |
22 | import { MyUser, VideoPrivacy } from '@shared/models' | |
23 | ||
24 | const expect = chai.expect | |
25 | ||
26 | // Most classic resumable upload tests are done in other test suites | |
27 | ||
28 | describe('Test resumable upload', function () { | |
29 | const defaultFixture = 'video_short.mp4' | |
30 | let server: ServerInfo | |
31 | let rootId: number | |
32 | ||
33 | async function buildSize (fixture: string, size?: number) { | |
34 | if (size !== undefined) return size | |
35 | ||
36 | const baseFixture = buildAbsoluteFixturePath(fixture) | |
37 | return (await stat(baseFixture)).size | |
38 | } | |
39 | ||
40 | async function prepareUpload (sizeArg?: number) { | |
41 | const size = await buildSize(defaultFixture, sizeArg) | |
42 | ||
43 | const attributes = { | |
44 | name: 'video', | |
45 | channelId: server.videoChannel.id, | |
46 | privacy: VideoPrivacy.PUBLIC, | |
47 | fixture: defaultFixture | |
48 | } | |
49 | ||
50 | const mimetype = 'video/mp4' | |
51 | ||
52 | const res = await prepareResumableUpload({ url: server.url, token: server.accessToken, attributes, size, mimetype }) | |
53 | ||
54 | return res.header['location'].split('?')[1] | |
55 | } | |
56 | ||
57 | async function sendChunks (options: { | |
58 | pathUploadId: string | |
59 | size?: number | |
60 | expectedStatus?: HttpStatusCode | |
61 | contentLength?: number | |
62 | contentRange?: string | |
63 | contentRangeBuilder?: (start: number, chunk: any) => string | |
64 | }) { | |
65 | const { pathUploadId, expectedStatus, contentLength, contentRangeBuilder } = options | |
66 | ||
67 | const size = await buildSize(defaultFixture, options.size) | |
68 | const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture) | |
69 | ||
70 | return sendResumableChunks({ | |
71 | url: server.url, | |
72 | token: server.accessToken, | |
73 | pathUploadId, | |
74 | videoFilePath: absoluteFilePath, | |
75 | size, | |
76 | contentLength, | |
77 | contentRangeBuilder, | |
78 | specialStatus: expectedStatus | |
79 | }) | |
80 | } | |
81 | ||
82 | async function checkFileSize (uploadIdArg: string, expectedSize: number | null) { | |
83 | const uploadId = uploadIdArg.replace(/^upload_id=/, '') | |
84 | ||
85 | const subPath = join('tmp', 'resumable-uploads', uploadId) | |
86 | const filePath = buildServerDirectory(server, subPath) | |
87 | const exists = await pathExists(filePath) | |
88 | ||
89 | if (expectedSize === null) { | |
90 | expect(exists).to.be.false | |
91 | return | |
92 | } | |
93 | ||
94 | expect(exists).to.be.true | |
95 | ||
96 | expect((await stat(filePath)).size).to.equal(expectedSize) | |
97 | } | |
98 | ||
99 | async function countResumableUploads () { | |
100 | const subPath = join('tmp', 'resumable-uploads') | |
101 | const filePath = buildServerDirectory(server, subPath) | |
102 | ||
103 | const files = await readdir(filePath) | |
104 | return files.length | |
105 | } | |
106 | ||
107 | before(async function () { | |
108 | this.timeout(30000) | |
109 | ||
110 | server = await flushAndRunServer(1) | |
111 | await setAccessTokensToServers([ server ]) | |
112 | await setDefaultVideoChannel([ server ]) | |
113 | ||
114 | const res = await getMyUserInformation(server.url, server.accessToken) | |
115 | rootId = (res.body as MyUser).id | |
116 | ||
117 | await updateUser({ | |
118 | url: server.url, | |
119 | userId: rootId, | |
120 | accessToken: server.accessToken, | |
121 | videoQuota: 10_000_000 | |
122 | }) | |
123 | }) | |
124 | ||
125 | describe('Directory cleaning', function () { | |
126 | ||
127 | it('Should correctly delete files after an upload', async function () { | |
128 | const uploadId = await prepareUpload() | |
129 | await sendChunks({ pathUploadId: uploadId }) | |
130 | ||
131 | expect(await countResumableUploads()).to.equal(0) | |
132 | }) | |
133 | ||
134 | it('Should not delete files after an unfinished upload', async function () { | |
135 | await prepareUpload() | |
136 | ||
137 | expect(await countResumableUploads()).to.equal(2) | |
138 | }) | |
139 | ||
140 | it('Should not delete recent uploads', async function () { | |
141 | await sendDebugCommand(server.url, server.accessToken, { command: 'remove-dandling-resumable-uploads' }) | |
142 | ||
143 | expect(await countResumableUploads()).to.equal(2) | |
144 | }) | |
145 | ||
146 | it('Should delete old uploads', async function () { | |
147 | await sendDebugCommand(server.url, server.accessToken, { command: 'remove-dandling-resumable-uploads' }) | |
148 | ||
149 | expect(await countResumableUploads()).to.equal(0) | |
150 | }) | |
151 | }) | |
152 | ||
153 | describe('Resumable upload and chunks', function () { | |
154 | ||
155 | it('Should accept the same amount of chunks', async function () { | |
156 | const uploadId = await prepareUpload() | |
157 | await sendChunks({ pathUploadId: uploadId }) | |
158 | ||
159 | await checkFileSize(uploadId, null) | |
160 | }) | |
161 | ||
162 | it('Should not accept more chunks than expected', async function () { | |
163 | const size = 100 | |
164 | const uploadId = await prepareUpload(size) | |
165 | ||
166 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 }) | |
167 | await checkFileSize(uploadId, 0) | |
168 | }) | |
169 | ||
170 | it('Should not accept more chunks than expected with an invalid content length/content range', async function () { | |
171 | const uploadId = await prepareUpload(1500) | |
172 | ||
173 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 }) | |
174 | await checkFileSize(uploadId, 0) | |
175 | }) | |
176 | ||
177 | it('Should not accept more chunks than expected with an invalid content length', async function () { | |
178 | const uploadId = await prepareUpload(500) | |
179 | ||
180 | const size = 1000 | |
181 | ||
182 | const contentRangeBuilder = start => `bytes ${start}-${start + size - 1}/${size}` | |
183 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentRangeBuilder, contentLength: size }) | |
184 | await checkFileSize(uploadId, 0) | |
185 | }) | |
186 | }) | |
187 | ||
188 | after(async function () { | |
189 | await cleanupTests([ server ]) | |
190 | }) | |
191 | }) |