-import { map } from 'bluebird'
-import { readdir, remove, stat } from 'fs-extra'
+
import { logger, loggerTagsFactory } from '@server/helpers/logger'
-import { getResumableUploadPath } from '@server/helpers/upload'
import { SCHEDULER_INTERVALS_MS } from '@server/initializers/constants'
-import { METAFILE_EXTNAME } from '@uploadx/core'
+import { uploadx } from '../uploadx'
import { AbstractScheduler } from './abstract-scheduler'
const lTags = loggerTagsFactory('scheduler', 'resumable-upload', 'cleaner')
}
protected async internalExecute () {
- const path = getResumableUploadPath()
- const files = await readdir(path)
-
- const metafiles = files.filter(f => f.endsWith(METAFILE_EXTNAME))
+ logger.debug('Removing dangling resumable uploads', lTags())
- if (metafiles.length === 0) return
-
- logger.debug('Reading resumable video upload folder %s with %d files', path, metafiles.length, lTags())
+ const now = new Date().getTime()
try {
- await map(metafiles, metafile => {
- return this.deleteIfOlderThan(metafile, this.lastExecutionTimeMs)
- }, { concurrency: 5 })
+ // Remove files that were not updated since the last execution
+ await uploadx.storage.purge(now - this.lastExecutionTimeMs)
} catch (error) {
logger.error('Failed to handle file during resumable video upload folder cleanup', { error, ...lTags() })
} finally {
- this.lastExecutionTimeMs = new Date().getTime()
- }
- }
-
- private async deleteIfOlderThan (metafile: string, olderThan: number) {
- const metafilePath = getResumableUploadPath(metafile)
- const statResult = await stat(metafilePath)
-
- // Delete uploads that started since a long time
- if (statResult.ctimeMs < olderThan) {
- await remove(metafilePath)
-
- const datafile = metafilePath.replace(new RegExp(`${METAFILE_EXTNAME}$`), '')
- await remove(datafile)
+ this.lastExecutionTimeMs = now
}
}