]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/lib/object-storage/shared/object-storage-helpers.ts
Forward 206 status code for object storage proxy
[github/Chocobozzz/PeerTube.git] / server / lib / object-storage / shared / object-storage-helpers.ts
1 import { map } from 'bluebird'
2 import { createReadStream, createWriteStream, ensureDir, ReadStream } from 'fs-extra'
3 import { dirname } from 'path'
4 import { Readable } from 'stream'
5 import {
6 _Object,
7 CompleteMultipartUploadCommandOutput,
8 DeleteObjectCommand,
9 GetObjectCommand,
10 ListObjectsV2Command,
11 PutObjectAclCommand,
12 PutObjectCommandInput,
13 S3Client
14 } from '@aws-sdk/client-s3'
15 import { Upload } from '@aws-sdk/lib-storage'
16 import { pipelinePromise } from '@server/helpers/core-utils'
17 import { isArray } from '@server/helpers/custom-validators/misc'
18 import { logger } from '@server/helpers/logger'
19 import { CONFIG } from '@server/initializers/config'
20 import { getInternalUrl } from '../urls'
21 import { getClient } from './client'
22 import { lTags } from './logger'
23
24 type BucketInfo = {
25 BUCKET_NAME: string
26 PREFIX?: string
27 }
28
29 async function listKeysOfPrefix (prefix: string, bucketInfo: BucketInfo) {
30 const s3Client = getClient()
31
32 const commandPrefix = bucketInfo.PREFIX + prefix
33 const listCommand = new ListObjectsV2Command({
34 Bucket: bucketInfo.BUCKET_NAME,
35 Prefix: commandPrefix
36 })
37
38 const listedObjects = await s3Client.send(listCommand)
39
40 if (isArray(listedObjects.Contents) !== true) return []
41
42 return listedObjects.Contents.map(c => c.Key)
43 }
44
45 // ---------------------------------------------------------------------------
46
47 async function storeObject (options: {
48 inputPath: string
49 objectStorageKey: string
50 bucketInfo: BucketInfo
51 isPrivate: boolean
52 }): Promise<string> {
53 const { inputPath, objectStorageKey, bucketInfo, isPrivate } = options
54
55 logger.debug('Uploading file %s to %s%s in bucket %s', inputPath, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags())
56
57 const fileStream = createReadStream(inputPath)
58
59 return uploadToStorage({ objectStorageKey, content: fileStream, bucketInfo, isPrivate })
60 }
61
62 // ---------------------------------------------------------------------------
63
64 function updateObjectACL (options: {
65 objectStorageKey: string
66 bucketInfo: BucketInfo
67 isPrivate: boolean
68 }) {
69 const { objectStorageKey, bucketInfo, isPrivate } = options
70
71 const key = buildKey(objectStorageKey, bucketInfo)
72
73 logger.debug('Updating ACL file %s in bucket %s', key, bucketInfo.BUCKET_NAME, lTags())
74
75 const command = new PutObjectAclCommand({
76 Bucket: bucketInfo.BUCKET_NAME,
77 Key: key,
78 ACL: getACL(isPrivate)
79 })
80
81 return getClient().send(command)
82 }
83
84 function updatePrefixACL (options: {
85 prefix: string
86 bucketInfo: BucketInfo
87 isPrivate: boolean
88 }) {
89 const { prefix, bucketInfo, isPrivate } = options
90
91 logger.debug('Updating ACL of files in prefix %s in bucket %s', prefix, bucketInfo.BUCKET_NAME, lTags())
92
93 return applyOnPrefix({
94 prefix,
95 bucketInfo,
96 commandBuilder: obj => {
97 logger.debug('Updating ACL of %s inside prefix %s in bucket %s', obj.Key, prefix, bucketInfo.BUCKET_NAME, lTags())
98
99 return new PutObjectAclCommand({
100 Bucket: bucketInfo.BUCKET_NAME,
101 Key: obj.Key,
102 ACL: getACL(isPrivate)
103 })
104 }
105 })
106 }
107
108 // ---------------------------------------------------------------------------
109
110 function removeObject (objectStorageKey: string, bucketInfo: BucketInfo) {
111 const key = buildKey(objectStorageKey, bucketInfo)
112
113 return removeObjectByFullKey(key, bucketInfo)
114 }
115
116 function removeObjectByFullKey (fullKey: string, bucketInfo: BucketInfo) {
117 logger.debug('Removing file %s in bucket %s', fullKey, bucketInfo.BUCKET_NAME, lTags())
118
119 const command = new DeleteObjectCommand({
120 Bucket: bucketInfo.BUCKET_NAME,
121 Key: fullKey
122 })
123
124 return getClient().send(command)
125 }
126
127 async function removePrefix (prefix: string, bucketInfo: BucketInfo) {
128 // FIXME: use bulk delete when s3ninja will support this operation
129
130 logger.debug('Removing prefix %s in bucket %s', prefix, bucketInfo.BUCKET_NAME, lTags())
131
132 return applyOnPrefix({
133 prefix,
134 bucketInfo,
135 commandBuilder: obj => {
136 logger.debug('Removing %s inside prefix %s in bucket %s', obj.Key, prefix, bucketInfo.BUCKET_NAME, lTags())
137
138 return new DeleteObjectCommand({
139 Bucket: bucketInfo.BUCKET_NAME,
140 Key: obj.Key
141 })
142 }
143 })
144 }
145
146 // ---------------------------------------------------------------------------
147
148 async function makeAvailable (options: {
149 key: string
150 destination: string
151 bucketInfo: BucketInfo
152 }) {
153 const { key, destination, bucketInfo } = options
154
155 await ensureDir(dirname(options.destination))
156
157 const command = new GetObjectCommand({
158 Bucket: bucketInfo.BUCKET_NAME,
159 Key: buildKey(key, bucketInfo)
160 })
161 const response = await getClient().send(command)
162
163 const file = createWriteStream(destination)
164 await pipelinePromise(response.Body as Readable, file)
165
166 file.close()
167 }
168
169 function buildKey (key: string, bucketInfo: BucketInfo) {
170 return bucketInfo.PREFIX + key
171 }
172
173 // ---------------------------------------------------------------------------
174
175 async function createObjectReadStream (options: {
176 key: string
177 bucketInfo: BucketInfo
178 rangeHeader: string
179 }) {
180 const { key, bucketInfo, rangeHeader } = options
181
182 const command = new GetObjectCommand({
183 Bucket: bucketInfo.BUCKET_NAME,
184 Key: buildKey(key, bucketInfo),
185 Range: rangeHeader
186 })
187
188 const response = await getClient().send(command)
189
190 return {
191 response,
192 stream: response.Body as Readable
193 }
194 }
195
196 // ---------------------------------------------------------------------------
197
198 export {
199 BucketInfo,
200 buildKey,
201
202 storeObject,
203
204 removeObject,
205 removeObjectByFullKey,
206 removePrefix,
207
208 makeAvailable,
209
210 updateObjectACL,
211 updatePrefixACL,
212
213 listKeysOfPrefix,
214 createObjectReadStream
215 }
216
217 // ---------------------------------------------------------------------------
218
219 async function uploadToStorage (options: {
220 content: ReadStream
221 objectStorageKey: string
222 bucketInfo: BucketInfo
223 isPrivate: boolean
224 }) {
225 const { content, objectStorageKey, bucketInfo, isPrivate } = options
226
227 const input: PutObjectCommandInput = {
228 Body: content,
229 Bucket: bucketInfo.BUCKET_NAME,
230 Key: buildKey(objectStorageKey, bucketInfo),
231 ACL: getACL(isPrivate)
232 }
233
234 const parallelUploads3 = new Upload({
235 client: getClient(),
236 queueSize: 4,
237 partSize: CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART,
238
239 // `leavePartsOnError` must be set to `true` to avoid silently dropping failed parts
240 // More detailed explanation:
241 // https://github.com/aws/aws-sdk-js-v3/blob/v3.164.0/lib/lib-storage/src/Upload.ts#L274
242 // https://github.com/aws/aws-sdk-js-v3/issues/2311#issuecomment-939413928
243 leavePartsOnError: true,
244 params: input
245 })
246
247 const response = (await parallelUploads3.done()) as CompleteMultipartUploadCommandOutput
248 // Check is needed even if the HTTP status code is 200 OK
249 // For more information, see https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html
250 if (!response.Bucket) {
251 const message = `Error uploading ${objectStorageKey} to bucket ${bucketInfo.BUCKET_NAME}`
252 logger.error(message, { response, ...lTags() })
253 throw new Error(message)
254 }
255
256 logger.debug(
257 'Completed %s%s in bucket %s',
258 bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags()
259 )
260
261 return getInternalUrl(bucketInfo, objectStorageKey)
262 }
263
264 async function applyOnPrefix (options: {
265 prefix: string
266 bucketInfo: BucketInfo
267 commandBuilder: (obj: _Object) => Parameters<S3Client['send']>[0]
268
269 continuationToken?: string
270 }) {
271 const { prefix, bucketInfo, commandBuilder, continuationToken } = options
272
273 const s3Client = getClient()
274
275 const commandPrefix = buildKey(prefix, bucketInfo)
276 const listCommand = new ListObjectsV2Command({
277 Bucket: bucketInfo.BUCKET_NAME,
278 Prefix: commandPrefix,
279 ContinuationToken: continuationToken
280 })
281
282 const listedObjects = await s3Client.send(listCommand)
283
284 if (isArray(listedObjects.Contents) !== true) {
285 const message = `Cannot apply function on ${commandPrefix} prefix in bucket ${bucketInfo.BUCKET_NAME}: no files listed.`
286
287 logger.error(message, { response: listedObjects, ...lTags() })
288 throw new Error(message)
289 }
290
291 await map(listedObjects.Contents, object => {
292 const command = commandBuilder(object)
293
294 return s3Client.send(command)
295 }, { concurrency: 10 })
296
297 // Repeat if not all objects could be listed at once (limit of 1000?)
298 if (listedObjects.IsTruncated) {
299 await applyOnPrefix({ ...options, continuationToken: listedObjects.ContinuationToken })
300 }
301 }
302
303 function getACL (isPrivate: boolean) {
304 return isPrivate
305 ? CONFIG.OBJECT_STORAGE.UPLOAD_ACL.PRIVATE
306 : CONFIG.OBJECT_STORAGE.UPLOAD_ACL.PUBLIC
307 }