]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/lib/object-storage/shared/object-storage-helpers.ts
Merge branch 'release/5.0.0' into develop
[github/Chocobozzz/PeerTube.git] / server / lib / object-storage / shared / object-storage-helpers.ts
1 import { map } from 'bluebird'
2 import { createReadStream, createWriteStream, ensureDir, ReadStream } from 'fs-extra'
3 import { dirname } from 'path'
4 import { Readable } from 'stream'
5 import {
6 _Object,
7 CompleteMultipartUploadCommandOutput,
8 DeleteObjectCommand,
9 GetObjectCommand,
10 ListObjectsV2Command,
11 PutObjectAclCommand,
12 PutObjectCommandInput,
13 S3Client
14 } from '@aws-sdk/client-s3'
15 import { Upload } from '@aws-sdk/lib-storage'
16 import { pipelinePromise } from '@server/helpers/core-utils'
17 import { isArray } from '@server/helpers/custom-validators/misc'
18 import { logger } from '@server/helpers/logger'
19 import { CONFIG } from '@server/initializers/config'
20 import { getInternalUrl } from '../urls'
21 import { getClient } from './client'
22 import { lTags } from './logger'
23
24 type BucketInfo = {
25 BUCKET_NAME: string
26 PREFIX?: string
27 }
28
29 async function listKeysOfPrefix (prefix: string, bucketInfo: BucketInfo) {
30 const s3Client = getClient()
31
32 const commandPrefix = bucketInfo.PREFIX + prefix
33 const listCommand = new ListObjectsV2Command({
34 Bucket: bucketInfo.BUCKET_NAME,
35 Prefix: commandPrefix
36 })
37
38 const listedObjects = await s3Client.send(listCommand)
39
40 if (isArray(listedObjects.Contents) !== true) return []
41
42 return listedObjects.Contents.map(c => c.Key)
43 }
44
45 // ---------------------------------------------------------------------------
46
47 async function storeObject (options: {
48 inputPath: string
49 objectStorageKey: string
50 bucketInfo: BucketInfo
51 isPrivate: boolean
52 }): Promise<string> {
53 const { inputPath, objectStorageKey, bucketInfo, isPrivate } = options
54
55 logger.debug('Uploading file %s to %s%s in bucket %s', inputPath, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags())
56
57 const fileStream = createReadStream(inputPath)
58
59 return uploadToStorage({ objectStorageKey, content: fileStream, bucketInfo, isPrivate })
60 }
61
62 // ---------------------------------------------------------------------------
63
64 async function updateObjectACL (options: {
65 objectStorageKey: string
66 bucketInfo: BucketInfo
67 isPrivate: boolean
68 }) {
69 const { objectStorageKey, bucketInfo, isPrivate } = options
70
71 const acl = getACL(isPrivate)
72 if (!acl) return
73
74 const key = buildKey(objectStorageKey, bucketInfo)
75
76 logger.debug('Updating ACL file %s in bucket %s', key, bucketInfo.BUCKET_NAME, lTags())
77
78 const command = new PutObjectAclCommand({
79 Bucket: bucketInfo.BUCKET_NAME,
80 Key: key,
81 ACL: acl
82 })
83
84 await getClient().send(command)
85 }
86
87 function updatePrefixACL (options: {
88 prefix: string
89 bucketInfo: BucketInfo
90 isPrivate: boolean
91 }) {
92 const { prefix, bucketInfo, isPrivate } = options
93
94 const acl = getACL(isPrivate)
95 if (!acl) return
96
97 logger.debug('Updating ACL of files in prefix %s in bucket %s', prefix, bucketInfo.BUCKET_NAME, lTags())
98
99 return applyOnPrefix({
100 prefix,
101 bucketInfo,
102 commandBuilder: obj => {
103 logger.debug('Updating ACL of %s inside prefix %s in bucket %s', obj.Key, prefix, bucketInfo.BUCKET_NAME, lTags())
104
105 return new PutObjectAclCommand({
106 Bucket: bucketInfo.BUCKET_NAME,
107 Key: obj.Key,
108 ACL: acl
109 })
110 }
111 })
112 }
113
114 // ---------------------------------------------------------------------------
115
116 function removeObject (objectStorageKey: string, bucketInfo: BucketInfo) {
117 const key = buildKey(objectStorageKey, bucketInfo)
118
119 return removeObjectByFullKey(key, bucketInfo)
120 }
121
122 function removeObjectByFullKey (fullKey: string, bucketInfo: BucketInfo) {
123 logger.debug('Removing file %s in bucket %s', fullKey, bucketInfo.BUCKET_NAME, lTags())
124
125 const command = new DeleteObjectCommand({
126 Bucket: bucketInfo.BUCKET_NAME,
127 Key: fullKey
128 })
129
130 return getClient().send(command)
131 }
132
133 async function removePrefix (prefix: string, bucketInfo: BucketInfo) {
134 // FIXME: use bulk delete when s3ninja will support this operation
135
136 logger.debug('Removing prefix %s in bucket %s', prefix, bucketInfo.BUCKET_NAME, lTags())
137
138 return applyOnPrefix({
139 prefix,
140 bucketInfo,
141 commandBuilder: obj => {
142 logger.debug('Removing %s inside prefix %s in bucket %s', obj.Key, prefix, bucketInfo.BUCKET_NAME, lTags())
143
144 return new DeleteObjectCommand({
145 Bucket: bucketInfo.BUCKET_NAME,
146 Key: obj.Key
147 })
148 }
149 })
150 }
151
152 // ---------------------------------------------------------------------------
153
154 async function makeAvailable (options: {
155 key: string
156 destination: string
157 bucketInfo: BucketInfo
158 }) {
159 const { key, destination, bucketInfo } = options
160
161 await ensureDir(dirname(options.destination))
162
163 const command = new GetObjectCommand({
164 Bucket: bucketInfo.BUCKET_NAME,
165 Key: buildKey(key, bucketInfo)
166 })
167 const response = await getClient().send(command)
168
169 const file = createWriteStream(destination)
170 await pipelinePromise(response.Body as Readable, file)
171
172 file.close()
173 }
174
175 function buildKey (key: string, bucketInfo: BucketInfo) {
176 return bucketInfo.PREFIX + key
177 }
178
179 // ---------------------------------------------------------------------------
180
181 async function createObjectReadStream (options: {
182 key: string
183 bucketInfo: BucketInfo
184 rangeHeader: string
185 }) {
186 const { key, bucketInfo, rangeHeader } = options
187
188 const command = new GetObjectCommand({
189 Bucket: bucketInfo.BUCKET_NAME,
190 Key: buildKey(key, bucketInfo),
191 Range: rangeHeader
192 })
193
194 const response = await getClient().send(command)
195
196 return {
197 response,
198 stream: response.Body as Readable
199 }
200 }
201
202 // ---------------------------------------------------------------------------
203
204 export {
205 BucketInfo,
206 buildKey,
207
208 storeObject,
209
210 removeObject,
211 removeObjectByFullKey,
212 removePrefix,
213
214 makeAvailable,
215
216 updateObjectACL,
217 updatePrefixACL,
218
219 listKeysOfPrefix,
220 createObjectReadStream
221 }
222
223 // ---------------------------------------------------------------------------
224
225 async function uploadToStorage (options: {
226 content: ReadStream
227 objectStorageKey: string
228 bucketInfo: BucketInfo
229 isPrivate: boolean
230 }) {
231 const { content, objectStorageKey, bucketInfo, isPrivate } = options
232
233 const input: PutObjectCommandInput = {
234 Body: content,
235 Bucket: bucketInfo.BUCKET_NAME,
236 Key: buildKey(objectStorageKey, bucketInfo)
237 }
238
239 const acl = getACL(isPrivate)
240 if (acl) input.ACL = acl
241
242 const parallelUploads3 = new Upload({
243 client: getClient(),
244 queueSize: 4,
245 partSize: CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART,
246
247 // `leavePartsOnError` must be set to `true` to avoid silently dropping failed parts
248 // More detailed explanation:
249 // https://github.com/aws/aws-sdk-js-v3/blob/v3.164.0/lib/lib-storage/src/Upload.ts#L274
250 // https://github.com/aws/aws-sdk-js-v3/issues/2311#issuecomment-939413928
251 leavePartsOnError: true,
252 params: input
253 })
254
255 const response = (await parallelUploads3.done()) as CompleteMultipartUploadCommandOutput
256 // Check is needed even if the HTTP status code is 200 OK
257 // For more information, see https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html
258 if (!response.Bucket) {
259 const message = `Error uploading ${objectStorageKey} to bucket ${bucketInfo.BUCKET_NAME}`
260 logger.error(message, { response, ...lTags() })
261 throw new Error(message)
262 }
263
264 logger.debug(
265 'Completed %s%s in bucket %s',
266 bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags()
267 )
268
269 return getInternalUrl(bucketInfo, objectStorageKey)
270 }
271
272 async function applyOnPrefix (options: {
273 prefix: string
274 bucketInfo: BucketInfo
275 commandBuilder: (obj: _Object) => Parameters<S3Client['send']>[0]
276
277 continuationToken?: string
278 }) {
279 const { prefix, bucketInfo, commandBuilder, continuationToken } = options
280
281 const s3Client = getClient()
282
283 const commandPrefix = buildKey(prefix, bucketInfo)
284 const listCommand = new ListObjectsV2Command({
285 Bucket: bucketInfo.BUCKET_NAME,
286 Prefix: commandPrefix,
287 ContinuationToken: continuationToken
288 })
289
290 const listedObjects = await s3Client.send(listCommand)
291
292 if (isArray(listedObjects.Contents) !== true) {
293 const message = `Cannot apply function on ${commandPrefix} prefix in bucket ${bucketInfo.BUCKET_NAME}: no files listed.`
294
295 logger.error(message, { response: listedObjects, ...lTags() })
296 throw new Error(message)
297 }
298
299 await map(listedObjects.Contents, object => {
300 const command = commandBuilder(object)
301
302 return s3Client.send(command)
303 }, { concurrency: 10 })
304
305 // Repeat if not all objects could be listed at once (limit of 1000?)
306 if (listedObjects.IsTruncated) {
307 await applyOnPrefix({ ...options, continuationToken: listedObjects.ContinuationToken })
308 }
309 }
310
311 function getACL (isPrivate: boolean) {
312 return isPrivate
313 ? CONFIG.OBJECT_STORAGE.UPLOAD_ACL.PRIVATE
314 : CONFIG.OBJECT_STORAGE.UPLOAD_ACL.PUBLIC
315 }