]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/lib/object-storage/shared/object-storage-helpers.ts
Merge branch 'release/4.3.0' into develop
[github/Chocobozzz/PeerTube.git] / server / lib / object-storage / shared / object-storage-helpers.ts
1 import { map } from 'bluebird'
2 import { createReadStream, createWriteStream, ensureDir, ReadStream } from 'fs-extra'
3 import { dirname } from 'path'
4 import { Readable } from 'stream'
5 import {
6 _Object,
7 CompleteMultipartUploadCommandOutput,
8 DeleteObjectCommand,
9 GetObjectCommand,
10 ListObjectsV2Command,
11 PutObjectAclCommand,
12 PutObjectCommandInput,
13 S3Client
14 } from '@aws-sdk/client-s3'
15 import { Upload } from '@aws-sdk/lib-storage'
16 import { pipelinePromise } from '@server/helpers/core-utils'
17 import { isArray } from '@server/helpers/custom-validators/misc'
18 import { logger } from '@server/helpers/logger'
19 import { CONFIG } from '@server/initializers/config'
20 import { getInternalUrl } from '../urls'
21 import { getClient } from './client'
22 import { lTags } from './logger'
23
24 type BucketInfo = {
25 BUCKET_NAME: string
26 PREFIX?: string
27 }
28
29 async function listKeysOfPrefix (prefix: string, bucketInfo: BucketInfo) {
30 const s3Client = getClient()
31
32 const commandPrefix = bucketInfo.PREFIX + prefix
33 const listCommand = new ListObjectsV2Command({
34 Bucket: bucketInfo.BUCKET_NAME,
35 Prefix: commandPrefix
36 })
37
38 const listedObjects = await s3Client.send(listCommand)
39
40 if (isArray(listedObjects.Contents) !== true) return []
41
42 return listedObjects.Contents.map(c => c.Key)
43 }
44
45 // ---------------------------------------------------------------------------
46
47 async function storeObject (options: {
48 inputPath: string
49 objectStorageKey: string
50 bucketInfo: BucketInfo
51 isPrivate: boolean
52 }): Promise<string> {
53 const { inputPath, objectStorageKey, bucketInfo, isPrivate } = options
54
55 logger.debug('Uploading file %s to %s%s in bucket %s', inputPath, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags())
56
57 const fileStream = createReadStream(inputPath)
58
59 return uploadToStorage({ objectStorageKey, content: fileStream, bucketInfo, isPrivate })
60 }
61
62 // ---------------------------------------------------------------------------
63
64 function updateObjectACL (options: {
65 objectStorageKey: string
66 bucketInfo: BucketInfo
67 isPrivate: boolean
68 }) {
69 const { objectStorageKey, bucketInfo, isPrivate } = options
70
71 const key = buildKey(objectStorageKey, bucketInfo)
72
73 logger.debug('Updating ACL file %s in bucket %s', key, bucketInfo.BUCKET_NAME, lTags())
74
75 const command = new PutObjectAclCommand({
76 Bucket: bucketInfo.BUCKET_NAME,
77 Key: key,
78 ACL: getACL(isPrivate)
79 })
80
81 return getClient().send(command)
82 }
83
84 function updatePrefixACL (options: {
85 prefix: string
86 bucketInfo: BucketInfo
87 isPrivate: boolean
88 }) {
89 const { prefix, bucketInfo, isPrivate } = options
90
91 logger.debug('Updating ACL of files in prefix %s in bucket %s', prefix, bucketInfo.BUCKET_NAME, lTags())
92
93 return applyOnPrefix({
94 prefix,
95 bucketInfo,
96 commandBuilder: obj => {
97 logger.debug('Updating ACL of %s inside prefix %s in bucket %s', obj.Key, prefix, bucketInfo.BUCKET_NAME, lTags())
98
99 return new PutObjectAclCommand({
100 Bucket: bucketInfo.BUCKET_NAME,
101 Key: obj.Key,
102 ACL: getACL(isPrivate)
103 })
104 }
105 })
106 }
107
108 // ---------------------------------------------------------------------------
109
110 function removeObject (objectStorageKey: string, bucketInfo: BucketInfo) {
111 const key = buildKey(objectStorageKey, bucketInfo)
112
113 return removeObjectByFullKey(key, bucketInfo)
114 }
115
116 function removeObjectByFullKey (fullKey: string, bucketInfo: BucketInfo) {
117 logger.debug('Removing file %s in bucket %s', fullKey, bucketInfo.BUCKET_NAME, lTags())
118
119 const command = new DeleteObjectCommand({
120 Bucket: bucketInfo.BUCKET_NAME,
121 Key: fullKey
122 })
123
124 return getClient().send(command)
125 }
126
127 async function removePrefix (prefix: string, bucketInfo: BucketInfo) {
128 // FIXME: use bulk delete when s3ninja will support this operation
129
130 logger.debug('Removing prefix %s in bucket %s', prefix, bucketInfo.BUCKET_NAME, lTags())
131
132 return applyOnPrefix({
133 prefix,
134 bucketInfo,
135 commandBuilder: obj => {
136 logger.debug('Removing %s inside prefix %s in bucket %s', obj.Key, prefix, bucketInfo.BUCKET_NAME, lTags())
137
138 return new DeleteObjectCommand({
139 Bucket: bucketInfo.BUCKET_NAME,
140 Key: obj.Key
141 })
142 }
143 })
144 }
145
146 // ---------------------------------------------------------------------------
147
148 async function makeAvailable (options: {
149 key: string
150 destination: string
151 bucketInfo: BucketInfo
152 }) {
153 const { key, destination, bucketInfo } = options
154
155 await ensureDir(dirname(options.destination))
156
157 const command = new GetObjectCommand({
158 Bucket: bucketInfo.BUCKET_NAME,
159 Key: buildKey(key, bucketInfo)
160 })
161 const response = await getClient().send(command)
162
163 const file = createWriteStream(destination)
164 await pipelinePromise(response.Body as Readable, file)
165
166 file.close()
167 }
168
169 function buildKey (key: string, bucketInfo: BucketInfo) {
170 return bucketInfo.PREFIX + key
171 }
172
173 // ---------------------------------------------------------------------------
174
175 async function createObjectReadStream (options: {
176 key: string
177 bucketInfo: BucketInfo
178 rangeHeader: string
179 }) {
180 const { key, bucketInfo, rangeHeader } = options
181
182 const command = new GetObjectCommand({
183 Bucket: bucketInfo.BUCKET_NAME,
184 Key: buildKey(key, bucketInfo),
185 Range: rangeHeader
186 })
187
188 const response = await getClient().send(command)
189
190 return response.Body as Readable
191 }
192
193 // ---------------------------------------------------------------------------
194
195 export {
196 BucketInfo,
197 buildKey,
198
199 storeObject,
200
201 removeObject,
202 removeObjectByFullKey,
203 removePrefix,
204
205 makeAvailable,
206
207 updateObjectACL,
208 updatePrefixACL,
209
210 listKeysOfPrefix,
211 createObjectReadStream
212 }
213
214 // ---------------------------------------------------------------------------
215
216 async function uploadToStorage (options: {
217 content: ReadStream
218 objectStorageKey: string
219 bucketInfo: BucketInfo
220 isPrivate: boolean
221 }) {
222 const { content, objectStorageKey, bucketInfo, isPrivate } = options
223
224 const input: PutObjectCommandInput = {
225 Body: content,
226 Bucket: bucketInfo.BUCKET_NAME,
227 Key: buildKey(objectStorageKey, bucketInfo),
228 ACL: getACL(isPrivate)
229 }
230
231 const parallelUploads3 = new Upload({
232 client: getClient(),
233 queueSize: 4,
234 partSize: CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART,
235
236 // `leavePartsOnError` must be set to `true` to avoid silently dropping failed parts
237 // More detailed explanation:
238 // https://github.com/aws/aws-sdk-js-v3/blob/v3.164.0/lib/lib-storage/src/Upload.ts#L274
239 // https://github.com/aws/aws-sdk-js-v3/issues/2311#issuecomment-939413928
240 leavePartsOnError: true,
241 params: input
242 })
243
244 const response = (await parallelUploads3.done()) as CompleteMultipartUploadCommandOutput
245 // Check is needed even if the HTTP status code is 200 OK
246 // For more information, see https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html
247 if (!response.Bucket) {
248 const message = `Error uploading ${objectStorageKey} to bucket ${bucketInfo.BUCKET_NAME}`
249 logger.error(message, { response, ...lTags() })
250 throw new Error(message)
251 }
252
253 logger.debug(
254 'Completed %s%s in bucket %s',
255 bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags()
256 )
257
258 return getInternalUrl(bucketInfo, objectStorageKey)
259 }
260
261 async function applyOnPrefix (options: {
262 prefix: string
263 bucketInfo: BucketInfo
264 commandBuilder: (obj: _Object) => Parameters<S3Client['send']>[0]
265
266 continuationToken?: string
267 }) {
268 const { prefix, bucketInfo, commandBuilder, continuationToken } = options
269
270 const s3Client = getClient()
271
272 const commandPrefix = buildKey(prefix, bucketInfo)
273 const listCommand = new ListObjectsV2Command({
274 Bucket: bucketInfo.BUCKET_NAME,
275 Prefix: commandPrefix,
276 ContinuationToken: continuationToken
277 })
278
279 const listedObjects = await s3Client.send(listCommand)
280
281 if (isArray(listedObjects.Contents) !== true) {
282 const message = `Cannot apply function on ${commandPrefix} prefix in bucket ${bucketInfo.BUCKET_NAME}: no files listed.`
283
284 logger.error(message, { response: listedObjects, ...lTags() })
285 throw new Error(message)
286 }
287
288 await map(listedObjects.Contents, object => {
289 const command = commandBuilder(object)
290
291 return s3Client.send(command)
292 }, { concurrency: 10 })
293
294 // Repeat if not all objects could be listed at once (limit of 1000?)
295 if (listedObjects.IsTruncated) {
296 await applyOnPrefix({ ...options, continuationToken: listedObjects.ContinuationToken })
297 }
298 }
299
300 function getACL (isPrivate: boolean) {
301 return isPrivate
302 ? CONFIG.OBJECT_STORAGE.UPLOAD_ACL.PRIVATE
303 : CONFIG.OBJECT_STORAGE.UPLOAD_ACL.PUBLIC
304 }