]>
Commit | Line | Data |
---|---|---|
f6d6e7f8 | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | ||
3 | import 'mocha' | |
4 | import * as chai from 'chai' | |
5 | import { pathExists, readdir, stat } from 'fs-extra' | |
6 | import { join } from 'path' | |
f6d6e7f8 | 7 | import { |
8 | buildAbsoluteFixturePath, | |
06c27593 | 9 | cleanupTests, |
254d3579 C |
10 | createSingleServer, |
11 | PeerTubeServer, | |
f6d6e7f8 | 12 | setAccessTokensToServers, |
7926c5f9 | 13 | setDefaultVideoChannel |
f6d6e7f8 | 14 | } from '@shared/extra-utils' |
4c7e60bc | 15 | import { HttpStatusCode, VideoPrivacy } from '@shared/models' |
f6d6e7f8 | 16 | |
17 | const expect = chai.expect | |
18 | ||
19 | // Most classic resumable upload tests are done in other test suites | |
20 | ||
21 | describe('Test resumable upload', function () { | |
22 | const defaultFixture = 'video_short.mp4' | |
254d3579 | 23 | let server: PeerTubeServer |
f6d6e7f8 | 24 | let rootId: number |
25 | ||
26 | async function buildSize (fixture: string, size?: number) { | |
27 | if (size !== undefined) return size | |
28 | ||
29 | const baseFixture = buildAbsoluteFixturePath(fixture) | |
30 | return (await stat(baseFixture)).size | |
31 | } | |
32 | ||
33 | async function prepareUpload (sizeArg?: number) { | |
34 | const size = await buildSize(defaultFixture, sizeArg) | |
35 | ||
36 | const attributes = { | |
37 | name: 'video', | |
89d241a7 | 38 | channelId: server.store.channel.id, |
f6d6e7f8 | 39 | privacy: VideoPrivacy.PUBLIC, |
40 | fixture: defaultFixture | |
41 | } | |
42 | ||
43 | const mimetype = 'video/mp4' | |
44 | ||
89d241a7 | 45 | const res = await server.videos.prepareResumableUpload({ attributes, size, mimetype }) |
f6d6e7f8 | 46 | |
47 | return res.header['location'].split('?')[1] | |
48 | } | |
49 | ||
50 | async function sendChunks (options: { | |
51 | pathUploadId: string | |
52 | size?: number | |
53 | expectedStatus?: HttpStatusCode | |
54 | contentLength?: number | |
55 | contentRange?: string | |
56 | contentRangeBuilder?: (start: number, chunk: any) => string | |
57 | }) { | |
58 | const { pathUploadId, expectedStatus, contentLength, contentRangeBuilder } = options | |
59 | ||
60 | const size = await buildSize(defaultFixture, options.size) | |
61 | const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture) | |
62 | ||
89d241a7 | 63 | return server.videos.sendResumableChunks({ |
f6d6e7f8 | 64 | pathUploadId, |
65 | videoFilePath: absoluteFilePath, | |
66 | size, | |
67 | contentLength, | |
68 | contentRangeBuilder, | |
d23dd9fb | 69 | expectedStatus |
f6d6e7f8 | 70 | }) |
71 | } | |
72 | ||
73 | async function checkFileSize (uploadIdArg: string, expectedSize: number | null) { | |
74 | const uploadId = uploadIdArg.replace(/^upload_id=/, '') | |
75 | ||
76 | const subPath = join('tmp', 'resumable-uploads', uploadId) | |
89d241a7 | 77 | const filePath = server.servers.buildDirectory(subPath) |
f6d6e7f8 | 78 | const exists = await pathExists(filePath) |
79 | ||
80 | if (expectedSize === null) { | |
81 | expect(exists).to.be.false | |
82 | return | |
83 | } | |
84 | ||
85 | expect(exists).to.be.true | |
86 | ||
87 | expect((await stat(filePath)).size).to.equal(expectedSize) | |
88 | } | |
89 | ||
90 | async function countResumableUploads () { | |
91 | const subPath = join('tmp', 'resumable-uploads') | |
89d241a7 | 92 | const filePath = server.servers.buildDirectory(subPath) |
f6d6e7f8 | 93 | |
94 | const files = await readdir(filePath) | |
95 | return files.length | |
96 | } | |
97 | ||
98 | before(async function () { | |
99 | this.timeout(30000) | |
100 | ||
254d3579 | 101 | server = await createSingleServer(1) |
83903cb6 C |
102 | await setAccessTokensToServers([ server ]) |
103 | await setDefaultVideoChannel([ server ]) | |
f6d6e7f8 | 104 | |
89d241a7 | 105 | const body = await server.users.getMyInfo() |
7926c5f9 | 106 | rootId = body.id |
f6d6e7f8 | 107 | |
89d241a7 | 108 | await server.users.update({ userId: rootId, videoQuota: 10_000_000 }) |
f6d6e7f8 | 109 | }) |
110 | ||
111 | describe('Directory cleaning', function () { | |
112 | ||
113 | it('Should correctly delete files after an upload', async function () { | |
114 | const uploadId = await prepareUpload() | |
115 | await sendChunks({ pathUploadId: uploadId }) | |
116 | ||
117 | expect(await countResumableUploads()).to.equal(0) | |
118 | }) | |
119 | ||
120 | it('Should not delete files after an unfinished upload', async function () { | |
121 | await prepareUpload() | |
122 | ||
123 | expect(await countResumableUploads()).to.equal(2) | |
124 | }) | |
125 | ||
126 | it('Should not delete recent uploads', async function () { | |
89d241a7 | 127 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) |
f6d6e7f8 | 128 | |
129 | expect(await countResumableUploads()).to.equal(2) | |
130 | }) | |
131 | ||
132 | it('Should delete old uploads', async function () { | |
89d241a7 | 133 | await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } }) |
f6d6e7f8 | 134 | |
135 | expect(await countResumableUploads()).to.equal(0) | |
136 | }) | |
137 | }) | |
138 | ||
139 | describe('Resumable upload and chunks', function () { | |
140 | ||
141 | it('Should accept the same amount of chunks', async function () { | |
142 | const uploadId = await prepareUpload() | |
143 | await sendChunks({ pathUploadId: uploadId }) | |
144 | ||
145 | await checkFileSize(uploadId, null) | |
146 | }) | |
147 | ||
148 | it('Should not accept more chunks than expected', async function () { | |
fea11cf2 | 149 | const uploadId = await prepareUpload(100) |
f6d6e7f8 | 150 | |
151 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 }) | |
152 | await checkFileSize(uploadId, 0) | |
153 | }) | |
154 | ||
155 | it('Should not accept more chunks than expected with an invalid content length/content range', async function () { | |
156 | const uploadId = await prepareUpload(1500) | |
157 | ||
fea11cf2 C |
158 | // Content length check seems to have changed in v16 |
159 | if (process.version.startsWith('v16')) { | |
160 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 }) | |
161 | await checkFileSize(uploadId, 1000) | |
162 | } else { | |
163 | await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 }) | |
164 | await checkFileSize(uploadId, 0) | |
165 | } | |
f6d6e7f8 | 166 | }) |
167 | ||
168 | it('Should not accept more chunks than expected with an invalid content length', async function () { | |
169 | const uploadId = await prepareUpload(500) | |
170 | ||
171 | const size = 1000 | |
172 | ||
764b1a14 C |
173 | // Content length check seems to have changed in v16 |
174 | const expectedStatus = process.version.startsWith('v16') | |
175 | ? HttpStatusCode.CONFLICT_409 | |
176 | : HttpStatusCode.BAD_REQUEST_400 | |
177 | ||
83903cb6 | 178 | const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}` |
764b1a14 | 179 | await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size }) |
f6d6e7f8 | 180 | await checkFileSize(uploadId, 0) |
181 | }) | |
182 | }) | |
183 | ||
06c27593 | 184 | after(async function () { |
83903cb6 | 185 | await cleanupTests([ server ]) |
06c27593 | 186 | }) |
f6d6e7f8 | 187 | }) |