aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--config/default.yaml2
-rw-r--r--config/production.yaml.example2
-rw-r--r--server/lib/object-storage/shared/object-storage-helpers.ts53
3 files changed, 20 insertions, 37 deletions
diff --git a/config/default.yaml b/config/default.yaml
index 9bf1ca284..0b0a54eef 100644
--- a/config/default.yaml
+++ b/config/default.yaml
@@ -152,7 +152,7 @@ object_storage:
152 secret_access_key: '' 152 secret_access_key: ''
153 153
154 # Maximum amount to upload in one request to object storage 154 # Maximum amount to upload in one request to object storage
155 max_upload_part: 2GB 155 max_upload_part: 100MB
156 156
157 streaming_playlists: 157 streaming_playlists:
158 bucket_name: 'streaming-playlists' 158 bucket_name: 'streaming-playlists'
diff --git a/config/production.yaml.example b/config/production.yaml.example
index f6dc6ccdb..209aaa56a 100644
--- a/config/production.yaml.example
+++ b/config/production.yaml.example
@@ -150,7 +150,7 @@ object_storage:
150 secret_access_key: '' 150 secret_access_key: ''
151 151
152 # Maximum amount to upload in one request to object storage 152 # Maximum amount to upload in one request to object storage
153 max_upload_part: 2GB 153 max_upload_part: 100MB
154 154
155 streaming_playlists: 155 streaming_playlists:
156 bucket_name: 'streaming-playlists' 156 bucket_name: 'streaming-playlists'
diff --git a/server/lib/object-storage/shared/object-storage-helpers.ts b/server/lib/object-storage/shared/object-storage-helpers.ts
index a2de92532..16161362c 100644
--- a/server/lib/object-storage/shared/object-storage-helpers.ts
+++ b/server/lib/object-storage/shared/object-storage-helpers.ts
@@ -1,11 +1,11 @@
1import { createReadStream, createWriteStream, ensureDir, ReadStream, stat } from 'fs-extra' 1import { createReadStream, createWriteStream, ensureDir, ReadStream } from 'fs-extra'
2import { dirname } from 'path' 2import { dirname } from 'path'
3import { Readable } from 'stream' 3import { Readable } from 'stream'
4import { 4import {
5 CompleteMultipartUploadCommandOutput,
5 DeleteObjectCommand, 6 DeleteObjectCommand,
6 GetObjectCommand, 7 GetObjectCommand,
7 ListObjectsV2Command, 8 ListObjectsV2Command,
8 PutObjectCommand,
9 PutObjectCommandInput 9 PutObjectCommandInput
10} from '@aws-sdk/client-s3' 10} from '@aws-sdk/client-s3'
11import { Upload } from '@aws-sdk/lib-storage' 11import { Upload } from '@aws-sdk/lib-storage'
@@ -31,14 +31,9 @@ async function storeObject (options: {
31 31
32 logger.debug('Uploading file %s to %s%s in bucket %s', inputPath, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags()) 32 logger.debug('Uploading file %s to %s%s in bucket %s', inputPath, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags())
33 33
34 const stats = await stat(inputPath)
35 const fileStream = createReadStream(inputPath) 34 const fileStream = createReadStream(inputPath)
36 35
37 if (stats.size > CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART) { 36 return uploadToStorage({ objectStorageKey, content: fileStream, bucketInfo })
38 return multiPartUpload({ content: fileStream, objectStorageKey, bucketInfo })
39 }
40
41 return objectStoragePut({ objectStorageKey, content: fileStream, bucketInfo })
42} 37}
43 38
44async function removeObject (filename: string, bucketInfo: BucketInfo) { 39async function removeObject (filename: string, bucketInfo: BucketInfo) {
@@ -132,31 +127,7 @@ export {
132 127
133// --------------------------------------------------------------------------- 128// ---------------------------------------------------------------------------
134 129
135async function objectStoragePut (options: { 130async function uploadToStorage (options: {
136 objectStorageKey: string
137 content: ReadStream
138 bucketInfo: BucketInfo
139}) {
140 const { objectStorageKey, content, bucketInfo } = options
141
142 const input: PutObjectCommandInput = {
143 Bucket: bucketInfo.BUCKET_NAME,
144 Key: buildKey(objectStorageKey, bucketInfo),
145 Body: content
146 }
147
148 if (CONFIG.OBJECT_STORAGE.UPLOAD_ACL) {
149 input.ACL = CONFIG.OBJECT_STORAGE.UPLOAD_ACL
150 }
151
152 const command = new PutObjectCommand(input)
153
154 await getClient().send(command)
155
156 return getPrivateUrl(bucketInfo, objectStorageKey)
157}
158
159async function multiPartUpload (options: {
160 content: ReadStream 131 content: ReadStream
161 objectStorageKey: string 132 objectStorageKey: string
162 bucketInfo: BucketInfo 133 bucketInfo: BucketInfo
@@ -177,11 +148,23 @@ async function multiPartUpload (options: {
177 client: getClient(), 148 client: getClient(),
178 queueSize: 4, 149 queueSize: 4,
179 partSize: CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART, 150 partSize: CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART,
180 leavePartsOnError: false, 151
152 // `leavePartsOnError` must be set to `true` to avoid silently dropping failed parts
153 // More detailed explanation:
154 // https://github.com/aws/aws-sdk-js-v3/blob/v3.164.0/lib/lib-storage/src/Upload.ts#L274
155 // https://github.com/aws/aws-sdk-js-v3/issues/2311#issuecomment-939413928
156 leavePartsOnError: true,
181 params: input 157 params: input
182 }) 158 })
183 159
184 await parallelUploads3.done() 160 const response = (await parallelUploads3.done()) as CompleteMultipartUploadCommandOutput
161 // Check is needed even if the HTTP status code is 200 OK
162 // For more information, see https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html
163 if (!response.Bucket) {
164 const message = `Error uploading ${objectStorageKey} to bucket ${bucketInfo.BUCKET_NAME}`
165 logger.error(message, { response, ...lTags() })
166 throw new Error(message)
167 }
185 168
186 logger.debug( 169 logger.debug(
187 'Completed %s%s in bucket %s', 170 'Completed %s%s in bucket %s',