]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/commitdiff
Rewrite youtube-dl import
authorChocobozzz <me@florianbigard.com>
Thu, 21 Oct 2021 14:28:39 +0000 (16:28 +0200)
committerChocobozzz <me@florianbigard.com>
Fri, 22 Oct 2021 08:25:24 +0000 (10:25 +0200)
Use python3 binary
Allows to use a custom youtube-dl release URL
Allows to use yt-dlp (youtube-dl fork)
Remove proxy config from configuration to use HTTP_PROXY and HTTPS_PROXY
env variables

30 files changed:
config/default.yaml
config/production.yaml.example
config/test-1.yaml
config/test-2.yaml
config/test-3.yaml
config/test-4.yaml
config/test-5.yaml
config/test-6.yaml
config/test.yaml
package.json
server/controllers/api/videos/import.ts
server/helpers/requests.ts
server/helpers/youtube-dl.ts [deleted file]
server/helpers/youtube-dl/index.ts [new file with mode: 0644]
server/helpers/youtube-dl/youtube-dl-cli.ts [new file with mode: 0644]
server/helpers/youtube-dl/youtube-dl-info-builder.ts [new file with mode: 0644]
server/helpers/youtube-dl/youtube-dl-wrapper.ts [new file with mode: 0644]
server/initializers/config.ts
server/initializers/constants.ts
server/lib/job-queue/handlers/video-import.ts
server/lib/schedulers/youtube-dl-update-scheduler.ts
server/tests/api/server/proxy.ts
server/tests/api/videos/video-imports.ts
server/tests/fixtures/video_import_preview_yt_dlp.jpg [new file with mode: 0644]
server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg [new file with mode: 0644]
server/tools/peertube-import-videos.ts
shared/extra-utils/miscs/tests.ts
shared/extra-utils/videos/captions.ts
support/docker/production/config/production.yaml
yarn.lock

index c46d0e883a9c594845413f8702685f9d9fbe3ae6..ec962247765712521f51df714a66a8ea57cb43cd 100644 (file)
@@ -85,6 +85,7 @@ client:
 # From the project root directory
 storage:
   tmp: 'storage/tmp/' # Use to download data (imports etc), store uploaded files before and during processing...
+  bin: 'storage/bin/'
   avatars: 'storage/avatars/'
   videos: 'storage/videos/'
   streaming_playlists: 'storage/streaming-playlists/'
@@ -394,13 +395,21 @@ import:
     http: # Classic HTTP or all sites supported by youtube-dl https://rg3.github.io/youtube-dl/supportedsites.html
       enabled: false
 
+      youtube_dl_release:
+        # Direct download URL to youtube-dl binary
+        # Github releases API is also supported
+        # Examples:
+        #   * https://api.github.com/repos/ytdl-org/youtube-dl/releases
+        #   * https://api.github.com/repos/yt-dlp/yt-dlp/releases
+        url: 'https://yt-dl.org/downloads/latest/youtube-dl'
+
+        # youtube-dl binary name
+        # yt-dlp is also supported
+        name: 'youtube-dl'
+
       # IPv6 is very strongly rate-limited on most sites supported by youtube-dl
       force_ipv4: false
 
-      # You can use an HTTP/HTTPS/SOCKS proxy with youtube-dl
-      proxy:
-        enabled: false
-        url: ''
     torrent: # Magnet URI or torrent file (use classic TCP/UDP/WebSeed to download the file)
       enabled: false
 
index d023070e3c7a47341ee6bc7e9a3778d095433b60..588d6a3a58aaf1652309d12f0767a4ee28bd236f 100644 (file)
@@ -83,6 +83,7 @@ client:
 # From the project root directory
 storage:
   tmp: '/var/www/peertube/storage/tmp/' # Use to download data (imports etc), store uploaded files before and during processing...
+  bin: '/var/www/peertube/storage/bin/'
   avatars: '/var/www/peertube/storage/avatars/'
   videos: '/var/www/peertube/storage/videos/'
   streaming_playlists: '/var/www/peertube/storage/streaming-playlists/'
@@ -407,10 +408,6 @@ import:
       # IPv6 is very strongly rate-limited on most sites supported by youtube-dl
       force_ipv4: false
 
-      # You can use an HTTP/HTTPS/SOCKS proxy with youtube-dl
-      proxy:
-        enabled: false
-        url: ''
     torrent: # Magnet URI or torrent file (use classic TCP/UDP/WebSeed to download the file)
       enabled: false
 
index fe5b3cf4488abe711f4b0a36d0a76d67bee3fb1c..d5f8299e06aa6f4d020d2c83158eb70c7e5c0da9 100644 (file)
@@ -11,6 +11,7 @@ database:
 # From the project root directory
 storage:
   tmp: 'test1/tmp/'
+  bin: 'test1/bin/'
   avatars: 'test1/avatars/'
   videos: 'test1/videos/'
   streaming_playlists: 'test1/streaming-playlists/'
index b559769c350c607e95eec60d4994b2d21afd0b59..9da79da16d1dc3f548d2a0c9d8305032277e23ca 100644 (file)
@@ -11,6 +11,7 @@ database:
 # From the project root directory
 storage:
   tmp: 'test2/tmp/'
+  bin: 'test2/bin/'
   avatars: 'test2/avatars/'
   videos: 'test2/videos/'
   streaming_playlists: 'test2/streaming-playlists/'
index 9a7a944e99ef76bf1d18abea598230d129d6fd1b..594439b623563a42b31f81bff9f32c71e19bfff5 100644 (file)
@@ -11,6 +11,7 @@ database:
 # From the project root directory
 storage:
   tmp: 'test3/tmp/'
+  bin: 'test3/bin/'
   avatars: 'test3/avatars/'
   videos: 'test3/videos/'
   streaming_playlists: 'test3/streaming-playlists/'
index 1e4bee9748d6adbb4f3499099c4fadca470b507d..1e6368bf76eba96d8880a5d8f8a4ab25c2ef35c0 100644 (file)
@@ -11,6 +11,7 @@ database:
 # From the project root directory
 storage:
   tmp: 'test4/tmp/'
+  bin: 'test4/bin/'
   avatars: 'test4/avatars/'
   videos: 'test4/videos/'
   streaming_playlists: 'test4/streaming-playlists/'
index 9725e84f42d4777ca3c912f5f82aaf69d0cb23c7..97f18a7a0d1acd0ec60fad2a15a738bec806940a 100644 (file)
@@ -11,6 +11,7 @@ database:
 # From the project root directory
 storage:
   tmp: 'test5/tmp/'
+  bin: 'test5/bin/'
   avatars: 'test5/avatars/'
   videos: 'test5/videos/'
   streaming_playlists: 'test5/streaming-playlists/'
index a04c8a6a935b59bd1df1dff92c9dd1b7eb6cc3e4..156da84d2935f4b38d22b0110387e5ea975e9ef7 100644 (file)
@@ -11,6 +11,7 @@ database:
 # From the project root directory
 storage:
   tmp: 'test6/tmp/'
+  bin: 'test6/bin/'
   avatars: 'test6/avatars/'
   videos: 'test6/videos/'
   streaming_playlists: 'test6/streaming-playlists/'
index 9a522a983819fe80d3148de551ac3065a2c9300a..3eb2f04d83be9807522be28ac93dae0b2c9f007f 100644 (file)
@@ -118,9 +118,6 @@ import:
     concurrency: 2
     http:
       enabled: true
-      proxy:
-        enabled: false
-        url: ""
     torrent:
       enabled: true
 
index e1ddf1168313dda204d4d04fa41457053e1a5962..0737df7d52e30071a58d77f3d82b886bb6b4c42b 100644 (file)
@@ -91,6 +91,7 @@
     "decache": "^4.6.0",
     "deep-object-diff": "^1.1.0",
     "email-templates": "^8.0.3",
+    "execa": "^5.1.1",
     "express": "^4.12.4",
     "express-rate-limit": "^5.0.0",
     "express-validator": "^6.4.0",
     "webfinger.js": "^2.6.6",
     "webtorrent": "^1.0.0",
     "winston": "3.3.3",
-    "ws": "^8.0.0",
-    "youtube-dl": "^3.0.2"
+    "ws": "^8.0.0"
   },
   "devDependencies": {
     "@types/async": "^3.0.0",
index 4265f3217d0b7fe5e7349de3adabd18c1f3ebfb5..eddb9b32db58d6a494f78eb3ebe332103031de76 100644 (file)
@@ -26,7 +26,7 @@ import { isArray } from '../../../helpers/custom-validators/misc'
 import { cleanUpReqFiles, createReqFiles } from '../../../helpers/express-utils'
 import { logger } from '../../../helpers/logger'
 import { getSecureTorrentName } from '../../../helpers/utils'
-import { YoutubeDL, YoutubeDLInfo } from '../../../helpers/youtube-dl'
+import { YoutubeDLWrapper, YoutubeDLInfo } from '../../../helpers/youtube-dl'
 import { CONFIG } from '../../../initializers/config'
 import { MIMETYPES } from '../../../initializers/constants'
 import { sequelizeTypescript } from '../../../initializers/database'
@@ -134,12 +134,12 @@ async function addYoutubeDLImport (req: express.Request, res: express.Response)
   const targetUrl = body.targetUrl
   const user = res.locals.oauth.token.User
 
-  const youtubeDL = new YoutubeDL(targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod'))
+  const youtubeDL = new YoutubeDLWrapper(targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod'))
 
   // Get video infos
   let youtubeDLInfo: YoutubeDLInfo
   try {
-    youtubeDLInfo = await youtubeDL.getYoutubeDLInfo()
+    youtubeDLInfo = await youtubeDL.getInfoForDownload()
   } catch (err) {
     logger.info('Cannot fetch information from import for URL %s.', targetUrl, { err })
 
@@ -373,9 +373,9 @@ function extractNameFromArray (name: string | string[]) {
   return isArray(name) ? name[0] : name
 }
 
-async function processYoutubeSubtitles (youtubeDL: YoutubeDL, targetUrl: string, videoId: number) {
+async function processYoutubeSubtitles (youtubeDL: YoutubeDLWrapper, targetUrl: string, videoId: number) {
   try {
-    const subtitles = await youtubeDL.getYoutubeDLSubs()
+    const subtitles = await youtubeDL.getSubtitles()
 
     logger.info('Will create %s subtitles from youtube import %s.', subtitles.length, targetUrl)
 
index 99127095210638a9390502311626dab425325f1a..d93f55776f62d9980aa78862764ff841a195e4ff 100644 (file)
@@ -1,9 +1,9 @@
 import { createWriteStream, remove } from 'fs-extra'
-import got, { CancelableRequest, Options as GotOptions, RequestError } from 'got'
+import got, { CancelableRequest, Options as GotOptions, RequestError, Response } from 'got'
 import { HttpProxyAgent, HttpsProxyAgent } from 'hpagent'
 import { join } from 'path'
 import { CONFIG } from '../initializers/config'
-import { ACTIVITY_PUB, PEERTUBE_VERSION, REQUEST_TIMEOUT, WEBSERVER } from '../initializers/constants'
+import { ACTIVITY_PUB, BINARY_CONTENT_TYPES, PEERTUBE_VERSION, REQUEST_TIMEOUT, WEBSERVER } from '../initializers/constants'
 import { pipelinePromise } from './core-utils'
 import { processImage } from './image-utils'
 import { logger } from './logger'
@@ -180,12 +180,17 @@ function getUserAgent () {
   return `PeerTube/${PEERTUBE_VERSION} (+${WEBSERVER.URL})`
 }
 
+function isBinaryResponse (result: Response<any>) {
+  return BINARY_CONTENT_TYPES.has(result.headers['content-type'])
+}
+
 // ---------------------------------------------------------------------------
 
 export {
   doRequest,
   doJSONRequest,
   doRequestAndSaveToFile,
+  isBinaryResponse,
   downloadImage,
   peertubeGot
 }
diff --git a/server/helpers/youtube-dl.ts b/server/helpers/youtube-dl.ts
deleted file mode 100644 (file)
index 0392ec4..0000000
+++ /dev/null
@@ -1,394 +0,0 @@
-import { createWriteStream } from 'fs'
-import { ensureDir, move, pathExists, remove, writeFile } from 'fs-extra'
-import { join } from 'path'
-import { CONFIG } from '@server/initializers/config'
-import { HttpStatusCode } from '../../shared/models/http/http-error-codes'
-import { VideoResolution } from '../../shared/models/videos'
-import { CONSTRAINTS_FIELDS, VIDEO_CATEGORIES, VIDEO_LANGUAGES, VIDEO_LICENCES } from '../initializers/constants'
-import { peertubeTruncate, pipelinePromise, root } from './core-utils'
-import { isVideoFileExtnameValid } from './custom-validators/videos'
-import { logger } from './logger'
-import { peertubeGot } from './requests'
-import { generateVideoImportTmpPath } from './utils'
-
-export type YoutubeDLInfo = {
-  name?: string
-  description?: string
-  category?: number
-  language?: string
-  licence?: number
-  nsfw?: boolean
-  tags?: string[]
-  thumbnailUrl?: string
-  ext?: string
-  originallyPublishedAt?: Date
-}
-
-export type YoutubeDLSubs = {
-  language: string
-  filename: string
-  path: string
-}[]
-
-const processOptions = {
-  maxBuffer: 1024 * 1024 * 10 // 10MB
-}
-
-class YoutubeDL {
-
-  constructor (private readonly url: string = '', private readonly enabledResolutions: number[] = []) {
-
-  }
-
-  getYoutubeDLInfo (opts?: string[]): Promise<YoutubeDLInfo> {
-    return new Promise<YoutubeDLInfo>((res, rej) => {
-      let args = opts || []
-
-      if (CONFIG.IMPORT.VIDEOS.HTTP.FORCE_IPV4) {
-        args.push('--force-ipv4')
-      }
-
-      args = this.wrapWithProxyOptions(args)
-      args = [ '-f', this.getYoutubeDLVideoFormat() ].concat(args)
-
-      YoutubeDL.safeGetYoutubeDL()
-        .then(youtubeDL => {
-          youtubeDL.getInfo(this.url, args, processOptions, (err, info) => {
-            if (err) return rej(err)
-            if (info.is_live === true) return rej(new Error('Cannot download a live streaming.'))
-
-            const obj = this.buildVideoInfo(this.normalizeObject(info))
-            if (obj.name && obj.name.length < CONSTRAINTS_FIELDS.VIDEOS.NAME.min) obj.name += ' video'
-
-            return res(obj)
-          })
-        })
-        .catch(err => rej(err))
-    })
-  }
-
-  getYoutubeDLSubs (opts?: object): Promise<YoutubeDLSubs> {
-    return new Promise<YoutubeDLSubs>((res, rej) => {
-      const cwd = CONFIG.STORAGE.TMP_DIR
-      const options = opts || { all: true, format: 'vtt', cwd }
-
-      YoutubeDL.safeGetYoutubeDL()
-        .then(youtubeDL => {
-          youtubeDL.getSubs(this.url, options, (err, files) => {
-            if (err) return rej(err)
-            if (!files) return []
-
-            logger.debug('Get subtitles from youtube dl.', { url: this.url, files })
-
-            const subtitles = files.reduce((acc, filename) => {
-              const matched = filename.match(/\.([a-z]{2})(-[a-z]+)?\.(vtt|ttml)/i)
-              if (!matched || !matched[1]) return acc
-
-              return [
-                ...acc,
-                {
-                  language: matched[1],
-                  path: join(cwd, filename),
-                  filename
-                }
-              ]
-            }, [])
-
-            return res(subtitles)
-          })
-        })
-        .catch(err => rej(err))
-    })
-  }
-
-  getYoutubeDLVideoFormat () {
-    /**
-     * list of format selectors in order or preference
-     * see https://github.com/ytdl-org/youtube-dl#format-selection
-     *
-     * case #1 asks for a mp4 using h264 (avc1) and the exact resolution in the hope
-     * of being able to do a "quick-transcode"
-     * case #2 is the first fallback. No "quick-transcode" means we can get anything else (like vp9)
-     * case #3 is the resolution-degraded equivalent of #1, and already a pretty safe fallback
-     *
-     * in any case we avoid AV1, see https://github.com/Chocobozzz/PeerTube/issues/3499
-     **/
-    const resolution = this.enabledResolutions.length === 0
-      ? VideoResolution.H_720P
-      : Math.max(...this.enabledResolutions)
-
-    return [
-      `bestvideo[vcodec^=avc1][height=${resolution}]+bestaudio[ext=m4a]`, // case #1
-      `bestvideo[vcodec!*=av01][vcodec!*=vp9.2][height=${resolution}]+bestaudio`, // case #2
-      `bestvideo[vcodec^=avc1][height<=${resolution}]+bestaudio[ext=m4a]`, // case #3
-      `bestvideo[vcodec!*=av01][vcodec!*=vp9.2]+bestaudio`,
-      'best[vcodec!*=av01][vcodec!*=vp9.2]', // case fallback for known formats
-      'best' // Ultimate fallback
-    ].join('/')
-  }
-
-  downloadYoutubeDLVideo (fileExt: string, timeout: number) {
-    // Leave empty the extension, youtube-dl will add it
-    const pathWithoutExtension = generateVideoImportTmpPath(this.url, '')
-
-    let timer
-
-    logger.info('Importing youtubeDL video %s to %s', this.url, pathWithoutExtension)
-
-    let options = [ '-f', this.getYoutubeDLVideoFormat(), '-o', pathWithoutExtension ]
-    options = this.wrapWithProxyOptions(options)
-
-    if (process.env.FFMPEG_PATH) {
-      options = options.concat([ '--ffmpeg-location', process.env.FFMPEG_PATH ])
-    }
-
-    logger.debug('YoutubeDL options for %s.', this.url, { options })
-
-    return new Promise<string>((res, rej) => {
-      YoutubeDL.safeGetYoutubeDL()
-        .then(youtubeDL => {
-          youtubeDL.exec(this.url, options, processOptions, async err => {
-            clearTimeout(timer)
-
-            try {
-              // If youtube-dl did not guess an extension for our file, just use .mp4 as default
-              if (await pathExists(pathWithoutExtension)) {
-                await move(pathWithoutExtension, pathWithoutExtension + '.mp4')
-              }
-
-              const path = await this.guessVideoPathWithExtension(pathWithoutExtension, fileExt)
-
-              if (err) {
-                remove(path)
-                  .catch(err => logger.error('Cannot delete path on YoutubeDL error.', { err }))
-
-                return rej(err)
-              }
-
-              return res(path)
-            } catch (err) {
-              return rej(err)
-            }
-          })
-
-          timer = setTimeout(() => {
-            const err = new Error('YoutubeDL download timeout.')
-
-            this.guessVideoPathWithExtension(pathWithoutExtension, fileExt)
-              .then(path => remove(path))
-              .finally(() => rej(err))
-              .catch(err => {
-                logger.error('Cannot remove file in youtubeDL timeout.', { err })
-                return rej(err)
-              })
-          }, timeout)
-        })
-        .catch(err => rej(err))
-    })
-  }
-
-  buildOriginallyPublishedAt (obj: any) {
-    let originallyPublishedAt: Date = null
-
-    const uploadDateMatcher = /^(\d{4})(\d{2})(\d{2})$/.exec(obj.upload_date)
-    if (uploadDateMatcher) {
-      originallyPublishedAt = new Date()
-      originallyPublishedAt.setHours(0, 0, 0, 0)
-
-      const year = parseInt(uploadDateMatcher[1], 10)
-      // Month starts from 0
-      const month = parseInt(uploadDateMatcher[2], 10) - 1
-      const day = parseInt(uploadDateMatcher[3], 10)
-
-      originallyPublishedAt.setFullYear(year, month, day)
-    }
-
-    return originallyPublishedAt
-  }
-
-  private async guessVideoPathWithExtension (tmpPath: string, sourceExt: string) {
-    if (!isVideoFileExtnameValid(sourceExt)) {
-      throw new Error('Invalid video extension ' + sourceExt)
-    }
-
-    const extensions = [ sourceExt, '.mp4', '.mkv', '.webm' ]
-
-    for (const extension of extensions) {
-      const path = tmpPath + extension
-
-      if (await pathExists(path)) return path
-    }
-
-    throw new Error('Cannot guess path of ' + tmpPath)
-  }
-
-  private normalizeObject (obj: any) {
-    const newObj: any = {}
-
-    for (const key of Object.keys(obj)) {
-      // Deprecated key
-      if (key === 'resolution') continue
-
-      const value = obj[key]
-
-      if (typeof value === 'string') {
-        newObj[key] = value.normalize()
-      } else {
-        newObj[key] = value
-      }
-    }
-
-    return newObj
-  }
-
-  private buildVideoInfo (obj: any): YoutubeDLInfo {
-    return {
-      name: this.titleTruncation(obj.title),
-      description: this.descriptionTruncation(obj.description),
-      category: this.getCategory(obj.categories),
-      licence: this.getLicence(obj.license),
-      language: this.getLanguage(obj.language),
-      nsfw: this.isNSFW(obj),
-      tags: this.getTags(obj.tags),
-      thumbnailUrl: obj.thumbnail || undefined,
-      originallyPublishedAt: this.buildOriginallyPublishedAt(obj),
-      ext: obj.ext
-    }
-  }
-
-  private titleTruncation (title: string) {
-    return peertubeTruncate(title, {
-      length: CONSTRAINTS_FIELDS.VIDEOS.NAME.max,
-      separator: /,? +/,
-      omission: ' […]'
-    })
-  }
-
-  private descriptionTruncation (description: string) {
-    if (!description || description.length < CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.min) return undefined
-
-    return peertubeTruncate(description, {
-      length: CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.max,
-      separator: /,? +/,
-      omission: ' […]'
-    })
-  }
-
-  private isNSFW (info: any) {
-    return info.age_limit && info.age_limit >= 16
-  }
-
-  private getTags (tags: any) {
-    if (Array.isArray(tags) === false) return []
-
-    return tags
-      .filter(t => t.length < CONSTRAINTS_FIELDS.VIDEOS.TAG.max && t.length > CONSTRAINTS_FIELDS.VIDEOS.TAG.min)
-      .map(t => t.normalize())
-      .slice(0, 5)
-  }
-
-  private getLicence (licence: string) {
-    if (!licence) return undefined
-
-    if (licence.includes('Creative Commons Attribution')) return 1
-
-    for (const key of Object.keys(VIDEO_LICENCES)) {
-      const peertubeLicence = VIDEO_LICENCES[key]
-      if (peertubeLicence.toLowerCase() === licence.toLowerCase()) return parseInt(key, 10)
-    }
-
-    return undefined
-  }
-
-  private getCategory (categories: string[]) {
-    if (!categories) return undefined
-
-    const categoryString = categories[0]
-    if (!categoryString || typeof categoryString !== 'string') return undefined
-
-    if (categoryString === 'News & Politics') return 11
-
-    for (const key of Object.keys(VIDEO_CATEGORIES)) {
-      const category = VIDEO_CATEGORIES[key]
-      if (categoryString.toLowerCase() === category.toLowerCase()) return parseInt(key, 10)
-    }
-
-    return undefined
-  }
-
-  private getLanguage (language: string) {
-    return VIDEO_LANGUAGES[language] ? language : undefined
-  }
-
-  private wrapWithProxyOptions (options: string[]) {
-    if (CONFIG.IMPORT.VIDEOS.HTTP.PROXY.ENABLED) {
-      logger.debug('Using proxy for YoutubeDL')
-
-      return [ '--proxy', CONFIG.IMPORT.VIDEOS.HTTP.PROXY.URL ].concat(options)
-    }
-
-    return options
-  }
-
-  // Thanks: https://github.com/przemyslawpluta/node-youtube-dl/blob/master/lib/downloader.js
-  // We rewrote it to avoid sync calls
-  static async updateYoutubeDLBinary () {
-    logger.info('Updating youtubeDL binary.')
-
-    const binDirectory = join(root(), 'node_modules', 'youtube-dl', 'bin')
-    const bin = join(binDirectory, 'youtube-dl')
-    const detailsPath = join(binDirectory, 'details')
-    const url = process.env.YOUTUBE_DL_DOWNLOAD_HOST || 'https://yt-dl.org/downloads/latest/youtube-dl'
-
-    await ensureDir(binDirectory)
-
-    try {
-      const gotContext = { bodyKBLimit: 20_000 }
-
-      const result = await peertubeGot(url, { followRedirect: false, context: gotContext })
-
-      if (result.statusCode !== HttpStatusCode.FOUND_302) {
-        logger.error('youtube-dl update error: did not get redirect for the latest version link. Status %d', result.statusCode)
-        return
-      }
-
-      const newUrl = result.headers.location
-      const newVersion = /\/(\d{4}\.\d\d\.\d\d(\.\d)?)\/youtube-dl$/.exec(newUrl)[1]
-
-      const downloadFileStream = peertubeGot.stream(newUrl, { context: gotContext })
-      const writeStream = createWriteStream(bin, { mode: 493 })
-
-      await pipelinePromise(
-        downloadFileStream,
-        writeStream
-      )
-
-      const details = JSON.stringify({ version: newVersion, path: bin, exec: 'youtube-dl' })
-      await writeFile(detailsPath, details, { encoding: 'utf8' })
-
-      logger.info('youtube-dl updated to version %s.', newVersion)
-    } catch (err) {
-      logger.error('Cannot update youtube-dl.', { err })
-    }
-  }
-
-  static async safeGetYoutubeDL () {
-    let youtubeDL
-
-    try {
-      youtubeDL = require('youtube-dl')
-    } catch (e) {
-      // Download binary
-      await this.updateYoutubeDLBinary()
-      youtubeDL = require('youtube-dl')
-    }
-
-    return youtubeDL
-  }
-}
-
-// ---------------------------------------------------------------------------
-
-export {
-  YoutubeDL
-}
diff --git a/server/helpers/youtube-dl/index.ts b/server/helpers/youtube-dl/index.ts
new file mode 100644 (file)
index 0000000..6afc77d
--- /dev/null
@@ -0,0 +1,3 @@
+export * from './youtube-dl-cli'
+export * from './youtube-dl-info-builder'
+export * from './youtube-dl-wrapper'
diff --git a/server/helpers/youtube-dl/youtube-dl-cli.ts b/server/helpers/youtube-dl/youtube-dl-cli.ts
new file mode 100644 (file)
index 0000000..4408692
--- /dev/null
@@ -0,0 +1,198 @@
+import execa from 'execa'
+import { pathExists, writeFile } from 'fs-extra'
+import { join } from 'path'
+import { CONFIG } from '@server/initializers/config'
+import { VideoResolution } from '@shared/models'
+import { logger, loggerTagsFactory } from '../logger'
+import { getProxy, isProxyEnabled } from '../proxy'
+import { isBinaryResponse, peertubeGot } from '../requests'
+
+const lTags = loggerTagsFactory('youtube-dl')
+
+const youtubeDLBinaryPath = join(CONFIG.STORAGE.BIN_DIR, CONFIG.IMPORT.VIDEOS.HTTP.YOUTUBE_DL_RELEASE.NAME)
+
+export class YoutubeDLCLI {
+
+  static async safeGet () {
+    if (!await pathExists(youtubeDLBinaryPath)) {
+      await this.updateYoutubeDLBinary()
+    }
+
+    return new YoutubeDLCLI()
+  }
+
+  static async updateYoutubeDLBinary () {
+    const url = CONFIG.IMPORT.VIDEOS.HTTP.YOUTUBE_DL_RELEASE.URL
+
+    logger.info('Updating youtubeDL binary from %s.', url, lTags())
+
+    const gotOptions = { context: { bodyKBLimit: 20_000 }, responseType: 'buffer' as 'buffer' }
+
+    try {
+      let gotResult = await peertubeGot(url, gotOptions)
+
+      if (!isBinaryResponse(gotResult)) {
+        const json = JSON.parse(gotResult.body.toString())
+        const latest = json.filter(release => release.prerelease === false)[0]
+        if (!latest) throw new Error('Cannot find latest release')
+
+        const releaseName = CONFIG.IMPORT.VIDEOS.HTTP.YOUTUBE_DL_RELEASE.NAME
+        const releaseAsset = latest.assets.find(a => a.name === releaseName)
+        if (!releaseAsset) throw new Error(`Cannot find appropriate release with name ${releaseName} in release assets`)
+
+        gotResult = await peertubeGot(releaseAsset.browser_download_url, gotOptions)
+      }
+
+      if (!isBinaryResponse(gotResult)) {
+        throw new Error('Not a binary response')
+      }
+
+      await writeFile(youtubeDLBinaryPath, gotResult.body)
+
+      logger.info('youtube-dl updated %s.', youtubeDLBinaryPath, lTags())
+    } catch (err) {
+      logger.error('Cannot update youtube-dl from %s.', url, { err, ...lTags() })
+    }
+  }
+
+  static getYoutubeDLVideoFormat (enabledResolutions: VideoResolution[]) {
+    /**
+     * list of format selectors in order or preference
+     * see https://github.com/ytdl-org/youtube-dl#format-selection
+     *
+     * case #1 asks for a mp4 using h264 (avc1) and the exact resolution in the hope
+     * of being able to do a "quick-transcode"
+     * case #2 is the first fallback. No "quick-transcode" means we can get anything else (like vp9)
+     * case #3 is the resolution-degraded equivalent of #1, and already a pretty safe fallback
+     *
+     * in any case we avoid AV1, see https://github.com/Chocobozzz/PeerTube/issues/3499
+     **/
+    const resolution = enabledResolutions.length === 0
+      ? VideoResolution.H_720P
+      : Math.max(...enabledResolutions)
+
+    return [
+      `bestvideo[vcodec^=avc1][height=${resolution}]+bestaudio[ext=m4a]`, // case #1
+      `bestvideo[vcodec!*=av01][vcodec!*=vp9.2][height=${resolution}]+bestaudio`, // case #2
+      `bestvideo[vcodec^=avc1][height<=${resolution}]+bestaudio[ext=m4a]`, // case #3
+      `bestvideo[vcodec!*=av01][vcodec!*=vp9.2]+bestaudio`,
+      'best[vcodec!*=av01][vcodec!*=vp9.2]', // case fallback for known formats
+      'best' // Ultimate fallback
+    ].join('/')
+  }
+
+  private constructor () {
+
+  }
+
+  download (options: {
+    url: string
+    format: string
+    output: string
+    processOptions: execa.NodeOptions
+    additionalYoutubeDLArgs?: string[]
+  }) {
+    return this.run({
+      url: options.url,
+      processOptions: options.processOptions,
+      args: (options.additionalYoutubeDLArgs || []).concat([ '-f', options.format, '-o', options.output ])
+    })
+  }
+
+  async getInfo (options: {
+    url: string
+    format: string
+    processOptions: execa.NodeOptions
+    additionalYoutubeDLArgs?: string[]
+  }) {
+    const { url, format, additionalYoutubeDLArgs = [], processOptions } = options
+
+    const completeArgs = additionalYoutubeDLArgs.concat([ '--dump-json', '-f', format ])
+
+    const data = await this.run({ url, args: completeArgs, processOptions })
+    const info = data.map(this.parseInfo)
+
+    return info.length === 1
+      ? info[0]
+      : info
+  }
+
+  async getSubs (options: {
+    url: string
+    format: 'vtt'
+    processOptions: execa.NodeOptions
+  }) {
+    const { url, format, processOptions } = options
+
+    const args = [ '--skip-download', '--all-subs', `--sub-format=${format}` ]
+
+    const data = await this.run({ url, args, processOptions })
+    const files: string[] = []
+
+    const skipString = '[info] Writing video subtitles to: '
+
+    for (let i = 0, len = data.length; i < len; i++) {
+      const line = data[i]
+
+      if (line.indexOf(skipString) === 0) {
+        files.push(line.slice(skipString.length))
+      }
+    }
+
+    return files
+  }
+
+  private async run (options: {
+    url: string
+    args: string[]
+    processOptions: execa.NodeOptions
+  }) {
+    const { url, args, processOptions } = options
+
+    let completeArgs = this.wrapWithProxyOptions(args)
+    completeArgs = this.wrapWithIPOptions(completeArgs)
+    completeArgs = this.wrapWithFFmpegOptions(completeArgs)
+
+    const output = await execa('python', [ youtubeDLBinaryPath, ...completeArgs, url ], processOptions)
+
+    logger.debug('Runned youtube-dl command.', { command: output.command, stdout: output.stdout, ...lTags() })
+
+    return output.stdout
+      ? output.stdout.trim().split(/\r?\n/)
+      : undefined
+  }
+
+  private wrapWithProxyOptions (args: string[]) {
+    if (isProxyEnabled()) {
+      logger.debug('Using proxy %s for YoutubeDL', getProxy(), lTags())
+
+      return [ '--proxy', getProxy() ].concat(args)
+    }
+
+    return args
+  }
+
+  private wrapWithIPOptions (args: string[]) {
+    if (CONFIG.IMPORT.VIDEOS.HTTP.FORCE_IPV4) {
+      logger.debug('Force ipv4 for YoutubeDL')
+
+      return [ '--force-ipv4' ].concat(args)
+    }
+
+    return args
+  }
+
+  private wrapWithFFmpegOptions (args: string[]) {
+    if (process.env.FFMPEG_PATH) {
+      logger.debug('Using ffmpeg location %s for YoutubeDL', process.env.FFMPEG_PATH, lTags())
+
+      return [ '--ffmpeg-location', process.env.FFMPEG_PATH ].concat(args)
+    }
+
+    return args
+  }
+
+  private parseInfo (data: string) {
+    return JSON.parse(data)
+  }
+}
diff --git a/server/helpers/youtube-dl/youtube-dl-info-builder.ts b/server/helpers/youtube-dl/youtube-dl-info-builder.ts
new file mode 100644 (file)
index 0000000..9746a70
--- /dev/null
@@ -0,0 +1,154 @@
+import { CONSTRAINTS_FIELDS, VIDEO_CATEGORIES, VIDEO_LANGUAGES, VIDEO_LICENCES } from '../../initializers/constants'
+import { peertubeTruncate } from '../core-utils'
+
+type YoutubeDLInfo = {
+  name?: string
+  description?: string
+  category?: number
+  language?: string
+  licence?: number
+  nsfw?: boolean
+  tags?: string[]
+  thumbnailUrl?: string
+  ext?: string
+  originallyPublishedAt?: Date
+}
+
+class YoutubeDLInfoBuilder {
+  private readonly info: any
+
+  constructor (info: any) {
+    this.info = { ...info }
+  }
+
+  getInfo () {
+    const obj = this.buildVideoInfo(this.normalizeObject(this.info))
+    if (obj.name && obj.name.length < CONSTRAINTS_FIELDS.VIDEOS.NAME.min) obj.name += ' video'
+
+    return obj
+  }
+
+  private normalizeObject (obj: any) {
+    const newObj: any = {}
+
+    for (const key of Object.keys(obj)) {
+      // Deprecated key
+      if (key === 'resolution') continue
+
+      const value = obj[key]
+
+      if (typeof value === 'string') {
+        newObj[key] = value.normalize()
+      } else {
+        newObj[key] = value
+      }
+    }
+
+    return newObj
+  }
+
+  private buildOriginallyPublishedAt (obj: any) {
+    let originallyPublishedAt: Date = null
+
+    const uploadDateMatcher = /^(\d{4})(\d{2})(\d{2})$/.exec(obj.upload_date)
+    if (uploadDateMatcher) {
+      originallyPublishedAt = new Date()
+      originallyPublishedAt.setHours(0, 0, 0, 0)
+
+      const year = parseInt(uploadDateMatcher[1], 10)
+      // Month starts from 0
+      const month = parseInt(uploadDateMatcher[2], 10) - 1
+      const day = parseInt(uploadDateMatcher[3], 10)
+
+      originallyPublishedAt.setFullYear(year, month, day)
+    }
+
+    return originallyPublishedAt
+  }
+
+  private buildVideoInfo (obj: any): YoutubeDLInfo {
+    return {
+      name: this.titleTruncation(obj.title),
+      description: this.descriptionTruncation(obj.description),
+      category: this.getCategory(obj.categories),
+      licence: this.getLicence(obj.license),
+      language: this.getLanguage(obj.language),
+      nsfw: this.isNSFW(obj),
+      tags: this.getTags(obj.tags),
+      thumbnailUrl: obj.thumbnail || undefined,
+      originallyPublishedAt: this.buildOriginallyPublishedAt(obj),
+      ext: obj.ext
+    }
+  }
+
+  private titleTruncation (title: string) {
+    return peertubeTruncate(title, {
+      length: CONSTRAINTS_FIELDS.VIDEOS.NAME.max,
+      separator: /,? +/,
+      omission: ' […]'
+    })
+  }
+
+  private descriptionTruncation (description: string) {
+    if (!description || description.length < CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.min) return undefined
+
+    return peertubeTruncate(description, {
+      length: CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.max,
+      separator: /,? +/,
+      omission: ' […]'
+    })
+  }
+
+  private isNSFW (info: any) {
+    return info?.age_limit >= 16
+  }
+
+  private getTags (tags: string[]) {
+    if (Array.isArray(tags) === false) return []
+
+    return tags
+      .filter(t => t.length < CONSTRAINTS_FIELDS.VIDEOS.TAG.max && t.length > CONSTRAINTS_FIELDS.VIDEOS.TAG.min)
+      .map(t => t.normalize())
+      .slice(0, 5)
+  }
+
+  private getLicence (licence: string) {
+    if (!licence) return undefined
+
+    if (licence.includes('Creative Commons Attribution')) return 1
+
+    for (const key of Object.keys(VIDEO_LICENCES)) {
+      const peertubeLicence = VIDEO_LICENCES[key]
+      if (peertubeLicence.toLowerCase() === licence.toLowerCase()) return parseInt(key, 10)
+    }
+
+    return undefined
+  }
+
+  private getCategory (categories: string[]) {
+    if (!categories) return undefined
+
+    const categoryString = categories[0]
+    if (!categoryString || typeof categoryString !== 'string') return undefined
+
+    if (categoryString === 'News & Politics') return 11
+
+    for (const key of Object.keys(VIDEO_CATEGORIES)) {
+      const category = VIDEO_CATEGORIES[key]
+      if (categoryString.toLowerCase() === category.toLowerCase()) return parseInt(key, 10)
+    }
+
+    return undefined
+  }
+
+  private getLanguage (language: string) {
+    return VIDEO_LANGUAGES[language] ? language : undefined
+  }
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+  YoutubeDLInfo,
+  YoutubeDLInfoBuilder
+}
diff --git a/server/helpers/youtube-dl/youtube-dl-wrapper.ts b/server/helpers/youtube-dl/youtube-dl-wrapper.ts
new file mode 100644 (file)
index 0000000..6960fba
--- /dev/null
@@ -0,0 +1,135 @@
+import { move, pathExists, readdir, remove } from 'fs-extra'
+import { dirname, join } from 'path'
+import { CONFIG } from '@server/initializers/config'
+import { isVideoFileExtnameValid } from '../custom-validators/videos'
+import { logger, loggerTagsFactory } from '../logger'
+import { generateVideoImportTmpPath } from '../utils'
+import { YoutubeDLCLI } from './youtube-dl-cli'
+import { YoutubeDLInfo, YoutubeDLInfoBuilder } from './youtube-dl-info-builder'
+
+const lTags = loggerTagsFactory('youtube-dl')
+
+export type YoutubeDLSubs = {
+  language: string
+  filename: string
+  path: string
+}[]
+
+const processOptions = {
+  maxBuffer: 1024 * 1024 * 10 // 10MB
+}
+
+class YoutubeDLWrapper {
+
+  constructor (private readonly url: string = '', private readonly enabledResolutions: number[] = []) {
+
+  }
+
+  async getInfoForDownload (youtubeDLArgs: string[] = []): Promise<YoutubeDLInfo> {
+    const youtubeDL = await YoutubeDLCLI.safeGet()
+
+    const info = await youtubeDL.getInfo({
+      url: this.url,
+      format: YoutubeDLCLI.getYoutubeDLVideoFormat(this.enabledResolutions),
+      additionalYoutubeDLArgs: youtubeDLArgs,
+      processOptions
+    })
+
+    if (info.is_live === true) throw new Error('Cannot download a live streaming.')
+
+    const infoBuilder = new YoutubeDLInfoBuilder(info)
+
+    return infoBuilder.getInfo()
+  }
+
+  async getSubtitles (): Promise<YoutubeDLSubs> {
+    const cwd = CONFIG.STORAGE.TMP_DIR
+
+    const youtubeDL = await YoutubeDLCLI.safeGet()
+
+    const files = await youtubeDL.getSubs({ url: this.url, format: 'vtt', processOptions: { cwd } })
+    if (!files) return []
+
+    logger.debug('Get subtitles from youtube dl.', { url: this.url, files, ...lTags() })
+
+    const subtitles = files.reduce((acc, filename) => {
+      const matched = filename.match(/\.([a-z]{2})(-[a-z]+)?\.(vtt|ttml)/i)
+      if (!matched || !matched[1]) return acc
+
+      return [
+        ...acc,
+        {
+          language: matched[1],
+          path: join(cwd, filename),
+          filename
+        }
+      ]
+    }, [])
+
+    return subtitles
+  }
+
+  async downloadVideo (fileExt: string, timeout: number): Promise<string> {
+    // Leave empty the extension, youtube-dl will add it
+    const pathWithoutExtension = generateVideoImportTmpPath(this.url, '')
+
+    let timer: NodeJS.Timeout
+
+    logger.info('Importing youtubeDL video %s to %s', this.url, pathWithoutExtension, lTags())
+
+    const youtubeDL = await YoutubeDLCLI.safeGet()
+
+    const timeoutPromise = new Promise<string>((_, rej) => {
+      timer = setTimeout(() => rej(new Error('YoutubeDL download timeout.')), timeout)
+    })
+
+    const downloadPromise = youtubeDL.download({
+      url: this.url,
+      format: YoutubeDLCLI.getYoutubeDLVideoFormat(this.enabledResolutions),
+      output: pathWithoutExtension,
+      processOptions
+    }).then(() => clearTimeout(timer))
+      .then(async () => {
+        // If youtube-dl did not guess an extension for our file, just use .mp4 as default
+        if (await pathExists(pathWithoutExtension)) {
+          await move(pathWithoutExtension, pathWithoutExtension + '.mp4')
+        }
+
+        return this.guessVideoPathWithExtension(pathWithoutExtension, fileExt)
+      })
+
+    return Promise.race([ downloadPromise, timeoutPromise ])
+      .catch(async err => {
+        const path = await this.guessVideoPathWithExtension(pathWithoutExtension, fileExt)
+
+        remove(path)
+          .catch(err => logger.error('Cannot remove file in youtubeDL timeout.', { err, ...lTags() }))
+
+        throw err
+      })
+  }
+
+  private async guessVideoPathWithExtension (tmpPath: string, sourceExt: string) {
+    if (!isVideoFileExtnameValid(sourceExt)) {
+      throw new Error('Invalid video extension ' + sourceExt)
+    }
+
+    const extensions = [ sourceExt, '.mp4', '.mkv', '.webm' ]
+
+    for (const extension of extensions) {
+      const path = tmpPath + extension
+
+      if (await pathExists(path)) return path
+    }
+
+    const directoryContent = await readdir(dirname(tmpPath))
+
+    throw new Error(`Cannot guess path of ${tmpPath}. Directory content: ${directoryContent.join(', ')}`)
+  }
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+  YoutubeDLWrapper
+}
index 3a7c72a1caf376e71cdc61d619c23f5ec40e6ff7..e20efe02c02e99f5bb3d293cf770f96b8e57a0b8 100644 (file)
@@ -69,6 +69,7 @@ const CONFIG = {
 
   STORAGE: {
     TMP_DIR: buildPath(config.get<string>('storage.tmp')),
+    BIN_DIR: buildPath(config.get<string>('storage.bin')),
     ACTOR_IMAGES: buildPath(config.get<string>('storage.avatars')),
     LOG_DIR: buildPath(config.get<string>('storage.logs')),
     VIDEOS_DIR: buildPath(config.get<string>('storage.videos')),
@@ -292,11 +293,13 @@ const CONFIG = {
 
       HTTP: {
         get ENABLED () { return config.get<boolean>('import.videos.http.enabled') },
-        get FORCE_IPV4 () { return config.get<boolean>('import.videos.http.force_ipv4') },
-        PROXY: {
-          get ENABLED () { return config.get<boolean>('import.videos.http.proxy.enabled') },
-          get URL () { return config.get<string>('import.videos.http.proxy.url') }
-        }
+
+        YOUTUBE_DL_RELEASE: {
+          get URL () { return config.get<string>('import.videos.http.youtube_dl_release.url') },
+          get NAME () { return config.get<string>('import.videos.http.youtube_dl_release.name') }
+        },
+
+        get FORCE_IPV4 () { return config.get<boolean>('import.videos.http.force_ipv4') }
       },
       TORRENT: {
         get ENABLED () { return config.get<boolean>('import.videos.torrent.enabled') }
index dcbad92644a0aa995f3e94a0e14eb73dbc94066c..1d434d5abcaa1c240d7f32658618277645a63600 100644 (file)
@@ -497,6 +497,12 @@ const MIMETYPES = {
 MIMETYPES.AUDIO.EXT_MIMETYPE = invert(MIMETYPES.AUDIO.MIMETYPE_EXT)
 MIMETYPES.IMAGE.EXT_MIMETYPE = invert(MIMETYPES.IMAGE.MIMETYPE_EXT)
 
+const BINARY_CONTENT_TYPES = new Set([
+  'binary/octet-stream',
+  'application/octet-stream',
+  'application/x-binary'
+])
+
 // ---------------------------------------------------------------------------
 
 const OVERVIEWS = {
@@ -903,6 +909,7 @@ export {
   MIMETYPES,
   CRAWL_REQUEST_CONCURRENCY,
   DEFAULT_AUDIO_RESOLUTION,
+  BINARY_CONTENT_TYPES,
   JOB_COMPLETED_LIFETIME,
   HTTP_SIGNATURE,
   VIDEO_IMPORT_STATES,
index 8313c256180ec84e55781e6893bb4d72dc2f66c4..4ce1a6c30d9b52d22dc0e1a6fdeaacee353cfe7a 100644 (file)
@@ -2,7 +2,7 @@ import { Job } from 'bull'
 import { move, remove, stat } from 'fs-extra'
 import { getLowercaseExtension } from '@server/helpers/core-utils'
 import { retryTransactionWrapper } from '@server/helpers/database-utils'
-import { YoutubeDL } from '@server/helpers/youtube-dl'
+import { YoutubeDLWrapper } from '@server/helpers/youtube-dl'
 import { isPostImportVideoAccepted } from '@server/lib/moderation'
 import { generateWebTorrentVideoFilename } from '@server/lib/paths'
 import { Hooks } from '@server/lib/plugins/hooks'
@@ -77,10 +77,10 @@ async function processYoutubeDLImport (job: Job, payload: VideoImportYoutubeDLPa
     videoImportId: videoImport.id
   }
 
-  const youtubeDL = new YoutubeDL(videoImport.targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod'))
+  const youtubeDL = new YoutubeDLWrapper(videoImport.targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod'))
 
   return processFile(
-    () => youtubeDL.downloadYoutubeDLVideo(payload.fileExt, VIDEO_IMPORT_TIMEOUT),
+    () => youtubeDL.downloadVideo(payload.fileExt, VIDEO_IMPORT_TIMEOUT),
     videoImport,
     options
   )
index 898691c13279dfb57169005ea1643f55358aa097..93d02f8a97bbbf8d297eb39d41e8f21b9cf6da3a 100644 (file)
@@ -1,4 +1,4 @@
-import { YoutubeDL } from '@server/helpers/youtube-dl'
+import { YoutubeDLCLI } from '@server/helpers/youtube-dl'
 import { SCHEDULER_INTERVALS_MS } from '../../initializers/constants'
 import { AbstractScheduler } from './abstract-scheduler'
 
@@ -13,7 +13,7 @@ export class YoutubeDlUpdateScheduler extends AbstractScheduler {
   }
 
   protected internalExecute () {
-    return YoutubeDL.updateYoutubeDLBinary()
+    return YoutubeDLCLI.updateYoutubeDLBinary()
   }
 
   static get Instance () {
index 72bd49078dd6ba27e8c5292a6beb75910fbb54ee..29f3e10d89a68e757b612a7f4ba03928adbc7ad7 100644 (file)
@@ -2,8 +2,18 @@
 
 import 'mocha'
 import * as chai from 'chai'
-import { cleanupTests, createMultipleServers, doubleFollow, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/extra-utils'
+import {
+  cleanupTests,
+  createMultipleServers,
+  doubleFollow,
+  FIXTURE_URLS,
+  PeerTubeServer,
+  setAccessTokensToServers,
+  setDefaultVideoChannel,
+  waitJobs
+} from '@shared/extra-utils'
 import { MockProxy } from '@shared/extra-utils/mock-servers/mock-proxy'
+import { HttpStatusCode, VideoPrivacy } from '@shared/models'
 
 const expect = chai.expect
 
@@ -25,43 +35,90 @@ describe('Test proxy', function () {
     goodEnv.HTTP_PROXY = 'http://localhost:' + proxyPort
 
     await setAccessTokensToServers(servers)
+    await setDefaultVideoChannel(servers)
     await doubleFollow(servers[0], servers[1])
   })
 
-  it('Should succeed federation with the appropriate proxy config', async function () {
-    await servers[0].kill()
-    await servers[0].run({}, { env: goodEnv })
+  describe('Federation', function () {
 
-    await servers[0].videos.quickUpload({ name: 'video 1' })
+    it('Should succeed federation with the appropriate proxy config', async function () {
+      this.timeout(40000)
 
-    await waitJobs(servers)
+      await servers[0].kill()
+      await servers[0].run({}, { env: goodEnv })
 
-    for (const server of servers) {
-      const { total, data } = await server.videos.list()
-      expect(total).to.equal(1)
-      expect(data).to.have.lengthOf(1)
-    }
+      await servers[0].videos.quickUpload({ name: 'video 1' })
+
+      await waitJobs(servers)
+
+      for (const server of servers) {
+        const { total, data } = await server.videos.list()
+        expect(total).to.equal(1)
+        expect(data).to.have.lengthOf(1)
+      }
+    })
+
+    it('Should fail federation with a wrong proxy config', async function () {
+      this.timeout(40000)
+
+      await servers[0].kill()
+      await servers[0].run({}, { env: badEnv })
+
+      await servers[0].videos.quickUpload({ name: 'video 2' })
+
+      await waitJobs(servers)
+
+      {
+        const { total, data } = await servers[0].videos.list()
+        expect(total).to.equal(2)
+        expect(data).to.have.lengthOf(2)
+      }
+
+      {
+        const { total, data } = await servers[1].videos.list()
+        expect(total).to.equal(1)
+        expect(data).to.have.lengthOf(1)
+      }
+    })
   })
 
-  it('Should fail federation with a wrong proxy config', async function () {
-    await servers[0].kill()
-    await servers[0].run({}, { env: badEnv })
+  describe('Videos import', async function () {
+
+    function quickImport (expectedStatus: HttpStatusCode = HttpStatusCode.OK_200) {
+      return servers[0].imports.importVideo({
+        attributes: {
+          name: 'video import',
+          channelId: servers[0].store.channel.id,
+          privacy: VideoPrivacy.PUBLIC,
+          targetUrl: FIXTURE_URLS.peertube_long
+        },
+        expectedStatus
+      })
+    }
+
+    it('Should succeed import with the appropriate proxy config', async function () {
+      this.timeout(40000)
+
+      await servers[0].kill()
+      await servers[0].run({}, { env: goodEnv })
 
-    await servers[0].videos.quickUpload({ name: 'video 2' })
+      await quickImport()
 
-    await waitJobs(servers)
+      await waitJobs(servers)
 
-    {
       const { total, data } = await servers[0].videos.list()
-      expect(total).to.equal(2)
-      expect(data).to.have.lengthOf(2)
-    }
+      expect(total).to.equal(3)
+      expect(data).to.have.lengthOf(3)
+    })
 
-    {
-      const { total, data } = await servers[1].videos.list()
-      expect(total).to.equal(1)
-      expect(data).to.have.lengthOf(1)
-    }
+    it('Should fail import with a wrong proxy config', async function () {
+      this.timeout(40000)
+
+      await servers[0].kill()
+      await servers[0].run({}, { env: badEnv })
+
+      await quickImport(HttpStatusCode.BAD_REQUEST_400)
+    })
   })
 
   after(async function () {
index 948c779e8057d13e953537b7e98798e0cf502ba9..cfb18806093f3783da52a05e5ca9eb10d94407ba 100644 (file)
 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
 
 import 'mocha'
-import * as chai from 'chai'
+import { expect } from 'chai'
+import { pathExists, remove } from 'fs-extra'
+import { join } from 'path'
 import {
   areHttpImportTestsDisabled,
   cleanupTests,
   createMultipleServers,
+  createSingleServer,
   doubleFollow,
   FIXTURE_URLS,
   PeerTubeServer,
   setAccessTokensToServers,
+  setDefaultVideoChannel,
   testCaptionFile,
   testImage,
   waitJobs
 } from '@shared/extra-utils'
 import { VideoPrivacy, VideoResolution } from '@shared/models'
 
-const expect = chai.expect
+async function checkVideosServer1 (server: PeerTubeServer, idHttp: string, idMagnet: string, idTorrent: string) {
+  const videoHttp = await server.videos.get({ id: idHttp })
+
+  expect(videoHttp.name).to.equal('small video - youtube')
+  // FIXME: youtube-dl seems broken
+  // expect(videoHttp.category.label).to.equal('News & Politics')
+  // expect(videoHttp.licence.label).to.equal('Attribution')
+  expect(videoHttp.language.label).to.equal('Unknown')
+  expect(videoHttp.nsfw).to.be.false
+  expect(videoHttp.description).to.equal('this is a super description')
+  expect(videoHttp.tags).to.deep.equal([ 'tag1', 'tag2' ])
+  expect(videoHttp.files).to.have.lengthOf(1)
+
+  const originallyPublishedAt = new Date(videoHttp.originallyPublishedAt)
+  expect(originallyPublishedAt.getDate()).to.equal(14)
+  expect(originallyPublishedAt.getMonth()).to.equal(0)
+  expect(originallyPublishedAt.getFullYear()).to.equal(2019)
+
+  const videoMagnet = await server.videos.get({ id: idMagnet })
+  const videoTorrent = await server.videos.get({ id: idTorrent })
+
+  for (const video of [ videoMagnet, videoTorrent ]) {
+    expect(video.category.label).to.equal('Misc')
+    expect(video.licence.label).to.equal('Unknown')
+    expect(video.language.label).to.equal('Unknown')
+    expect(video.nsfw).to.be.false
+    expect(video.description).to.equal('this is a super torrent description')
+    expect(video.tags).to.deep.equal([ 'tag_torrent1', 'tag_torrent2' ])
+    expect(video.files).to.have.lengthOf(1)
+  }
+
+  expect(videoTorrent.name).to.contain('你好 ä¸–ç•Œ 720p.mp4')
+  expect(videoMagnet.name).to.contain('super peertube2 video')
+
+  const bodyCaptions = await server.captions.list({ videoId: idHttp })
+  expect(bodyCaptions.total).to.equal(2)
+}
+
+async function checkVideoServer2 (server: PeerTubeServer, id: number | string) {
+  const video = await server.videos.get({ id })
+
+  expect(video.name).to.equal('my super name')
+  expect(video.category.label).to.equal('Entertainment')
+  expect(video.licence.label).to.equal('Public Domain Dedication')
+  expect(video.language.label).to.equal('English')
+  expect(video.nsfw).to.be.false
+  expect(video.description).to.equal('my super description')
+  expect(video.tags).to.deep.equal([ 'supertag1', 'supertag2' ])
+
+  expect(video.files).to.have.lengthOf(1)
+
+  const bodyCaptions = await server.captions.list({ videoId: id })
+  expect(bodyCaptions.total).to.equal(2)
+}
 
 describe('Test video imports', function () {
-  let servers: PeerTubeServer[] = []
-  let channelIdServer1: number
-  let channelIdServer2: number
 
   if (areHttpImportTestsDisabled()) return
 
-  async function checkVideosServer1 (server: PeerTubeServer, idHttp: string, idMagnet: string, idTorrent: string) {
-    const videoHttp = await server.videos.get({ id: idHttp })
-
-    expect(videoHttp.name).to.equal('small video - youtube')
-    // FIXME: youtube-dl seems broken
-    // expect(videoHttp.category.label).to.equal('News & Politics')
-    // expect(videoHttp.licence.label).to.equal('Attribution')
-    expect(videoHttp.language.label).to.equal('Unknown')
-    expect(videoHttp.nsfw).to.be.false
-    expect(videoHttp.description).to.equal('this is a super description')
-    expect(videoHttp.tags).to.deep.equal([ 'tag1', 'tag2' ])
-    expect(videoHttp.files).to.have.lengthOf(1)
-
-    const originallyPublishedAt = new Date(videoHttp.originallyPublishedAt)
-    expect(originallyPublishedAt.getDate()).to.equal(14)
-    expect(originallyPublishedAt.getMonth()).to.equal(0)
-    expect(originallyPublishedAt.getFullYear()).to.equal(2019)
-
-    const videoMagnet = await server.videos.get({ id: idMagnet })
-    const videoTorrent = await server.videos.get({ id: idTorrent })
-
-    for (const video of [ videoMagnet, videoTorrent ]) {
-      expect(video.category.label).to.equal('Misc')
-      expect(video.licence.label).to.equal('Unknown')
-      expect(video.language.label).to.equal('Unknown')
-      expect(video.nsfw).to.be.false
-      expect(video.description).to.equal('this is a super torrent description')
-      expect(video.tags).to.deep.equal([ 'tag_torrent1', 'tag_torrent2' ])
-      expect(video.files).to.have.lengthOf(1)
-    }
+  function runSuite (mode: 'youtube-dl' | 'yt-dlp') {
 
-    expect(videoTorrent.name).to.contain('你好 ä¸–ç•Œ 720p.mp4')
-    expect(videoMagnet.name).to.contain('super peertube2 video')
+    describe('Import ' + mode, function () {
+      let servers: PeerTubeServer[] = []
 
-    const bodyCaptions = await server.captions.list({ videoId: idHttp })
-    expect(bodyCaptions.total).to.equal(2)
-  }
+      before(async function () {
+        this.timeout(30_000)
 
-  async function checkVideoServer2 (server: PeerTubeServer, id: number | string) {
-    const video = await server.videos.get({ id })
+        // Run servers
+        servers = await createMultipleServers(2, {
+          import: {
+            videos: {
+              http: {
+                youtube_dl_release: {
+                  url: mode === 'youtube-dl'
+                    ? 'https://yt-dl.org/downloads/latest/youtube-dl'
+                    : 'https://api.github.com/repos/yt-dlp/yt-dlp/releases',
 
-    expect(video.name).to.equal('my super name')
-    expect(video.category.label).to.equal('Entertainment')
-    expect(video.licence.label).to.equal('Public Domain Dedication')
-    expect(video.language.label).to.equal('English')
-    expect(video.nsfw).to.be.false
-    expect(video.description).to.equal('my super description')
-    expect(video.tags).to.deep.equal([ 'supertag1', 'supertag2' ])
+                  name: mode
+                }
+              }
+            }
+          }
+        })
 
-    expect(video.files).to.have.lengthOf(1)
+        await setAccessTokensToServers(servers)
+        await setDefaultVideoChannel(servers)
 
-    const bodyCaptions = await server.captions.list({ videoId: id })
-    expect(bodyCaptions.total).to.equal(2)
-  }
+        await doubleFollow(servers[0], servers[1])
+      })
 
-  before(async function () {
-    this.timeout(30_000)
+      it('Should import videos on server 1', async function () {
+        this.timeout(60_000)
 
-    // Run servers
-    servers = await createMultipleServers(2)
+        const baseAttributes = {
+          channelId: servers[0].store.channel.id,
+          privacy: VideoPrivacy.PUBLIC
+        }
 
-    await setAccessTokensToServers(servers)
+        {
+          const attributes = { ...baseAttributes, targetUrl: FIXTURE_URLS.youtube }
+          const { video } = await servers[0].imports.importVideo({ attributes })
+          expect(video.name).to.equal('small video - youtube')
 
-    {
-      const { videoChannels } = await servers[0].users.getMyInfo()
-      channelIdServer1 = videoChannels[0].id
-    }
+          {
+            expect(video.thumbnailPath).to.match(new RegExp(`^/static/thumbnails/.+.jpg$`))
+            expect(video.previewPath).to.match(new RegExp(`^/lazy-static/previews/.+.jpg$`))
 
-    {
-      const { videoChannels } = await servers[1].users.getMyInfo()
-      channelIdServer2 = videoChannels[0].id
-    }
+            const suffix = mode === 'yt-dlp'
+              ? '_yt_dlp'
+              : ''
 
-    await doubleFollow(servers[0], servers[1])
-  })
+            await testImage(servers[0].url, 'video_import_thumbnail' + suffix, video.thumbnailPath)
+            await testImage(servers[0].url, 'video_import_preview' + suffix, video.previewPath)
+          }
 
-  it('Should import videos on server 1', async function () {
-    this.timeout(60_000)
+          const bodyCaptions = await servers[0].captions.list({ videoId: video.id })
+          const videoCaptions = bodyCaptions.data
+          expect(videoCaptions).to.have.lengthOf(2)
 
-    const baseAttributes = {
-      channelId: channelIdServer1,
-      privacy: VideoPrivacy.PUBLIC
-    }
+          {
+            const enCaption = videoCaptions.find(caption => caption.language.id === 'en')
+            expect(enCaption).to.exist
+            expect(enCaption.language.label).to.equal('English')
+            expect(enCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-en.vtt$`))
 
-    {
-      const attributes = { ...baseAttributes, targetUrl: FIXTURE_URLS.youtube }
-      const { video } = await servers[0].imports.importVideo({ attributes })
-      expect(video.name).to.equal('small video - youtube')
+            const regex = `WEBVTT[ \n]+Kind: captions[ \n]+Language: en[ \n]+00:00:01.600 --> 00:00:04.200[ \n]+English \\(US\\)[ \n]+` +
+              `00:00:05.900 --> 00:00:07.999[ \n]+This is a subtitle in American English[ \n]+` +
+              `00:00:10.000 --> 00:00:14.000[ \n]+Adding subtitles is very easy to do`
+            await testCaptionFile(servers[0].url, enCaption.captionPath, new RegExp(regex))
+          }
 
-      expect(video.thumbnailPath).to.match(new RegExp(`^/static/thumbnails/.+.jpg$`))
-      expect(video.previewPath).to.match(new RegExp(`^/lazy-static/previews/.+.jpg$`))
+          {
+            const frCaption = videoCaptions.find(caption => caption.language.id === 'fr')
+            expect(frCaption).to.exist
+            expect(frCaption.language.label).to.equal('French')
+            expect(frCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-fr.vtt`))
 
-      await testImage(servers[0].url, 'video_import_thumbnail', video.thumbnailPath)
-      await testImage(servers[0].url, 'video_import_preview', video.previewPath)
+            const regex = `WEBVTT[ \n]+Kind: captions[ \n]+Language: fr[ \n]+00:00:01.600 --> 00:00:04.200[ \n]+` +
+              `Français \\(FR\\)[ \n]+00:00:05.900 --> 00:00:07.999[ \n]+C'est un sous-titre français[ \n]+` +
+              `00:00:10.000 --> 00:00:14.000[ \n]+Ajouter un sous-titre est vraiment facile`
 
-      const bodyCaptions = await servers[0].captions.list({ videoId: video.id })
-      const videoCaptions = bodyCaptions.data
-      expect(videoCaptions).to.have.lengthOf(2)
+            await testCaptionFile(servers[0].url, frCaption.captionPath, new RegExp(regex))
+          }
+        }
 
-      const enCaption = videoCaptions.find(caption => caption.language.id === 'en')
-      expect(enCaption).to.exist
-      expect(enCaption.language.label).to.equal('English')
-      expect(enCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-en.vtt$`))
-      await testCaptionFile(servers[0].url, enCaption.captionPath, `WEBVTT
-Kind: captions
-Language: en
+        {
+          const attributes = {
+            ...baseAttributes,
+            magnetUri: FIXTURE_URLS.magnet,
+            description: 'this is a super torrent description',
+            tags: [ 'tag_torrent1', 'tag_torrent2' ]
+          }
+          const { video } = await servers[0].imports.importVideo({ attributes })
+          expect(video.name).to.equal('super peertube2 video')
+        }
 
-00:00:01.600 --> 00:00:04.200
-English (US)
+        {
+          const attributes = {
+            ...baseAttributes,
+            torrentfile: 'video-720p.torrent' as any,
+            description: 'this is a super torrent description',
+            tags: [ 'tag_torrent1', 'tag_torrent2' ]
+          }
+          const { video } = await servers[0].imports.importVideo({ attributes })
+          expect(video.name).to.equal('你好 ä¸–ç•Œ 720p.mp4')
+        }
+      })
 
-00:00:05.900 --> 00:00:07.999
-This is a subtitle in American English
+      it('Should list the videos to import in my videos on server 1', async function () {
+        const { total, data } = await servers[0].videos.listMyVideos({ sort: 'createdAt' })
 
-00:00:10.000 --> 00:00:14.000
-Adding subtitles is very easy to do`)
+        expect(total).to.equal(3)
 
-      const frCaption = videoCaptions.find(caption => caption.language.id === 'fr')
-      expect(frCaption).to.exist
-      expect(frCaption.language.label).to.equal('French')
-      expect(frCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-fr.vtt`))
-      await testCaptionFile(servers[0].url, frCaption.captionPath, `WEBVTT
-Kind: captions
-Language: fr
+        expect(data).to.have.lengthOf(3)
+        expect(data[0].name).to.equal('small video - youtube')
+        expect(data[1].name).to.equal('super peertube2 video')
+        expect(data[2].name).to.equal('你好 ä¸–ç•Œ 720p.mp4')
+      })
 
-00:00:01.600 --> 00:00:04.200
-Français (FR)
+      it('Should list the videos to import in my imports on server 1', async function () {
+        const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ sort: '-createdAt' })
+        expect(total).to.equal(3)
 
-00:00:05.900 --> 00:00:07.999
-C'est un sous-titre français
+        expect(videoImports).to.have.lengthOf(3)
 
-00:00:10.000 --> 00:00:14.000
-Ajouter un sous-titre est vraiment facile`)
-    }
+        expect(videoImports[2].targetUrl).to.equal(FIXTURE_URLS.youtube)
+        expect(videoImports[2].magnetUri).to.be.null
+        expect(videoImports[2].torrentName).to.be.null
+        expect(videoImports[2].video.name).to.equal('small video - youtube')
 
-    {
-      const attributes = {
-        ...baseAttributes,
-        magnetUri: FIXTURE_URLS.magnet,
-        description: 'this is a super torrent description',
-        tags: [ 'tag_torrent1', 'tag_torrent2' ]
-      }
-      const { video } = await servers[0].imports.importVideo({ attributes })
-      expect(video.name).to.equal('super peertube2 video')
-    }
+        expect(videoImports[1].targetUrl).to.be.null
+        expect(videoImports[1].magnetUri).to.equal(FIXTURE_URLS.magnet)
+        expect(videoImports[1].torrentName).to.be.null
+        expect(videoImports[1].video.name).to.equal('super peertube2 video')
 
-    {
-      const attributes = {
-        ...baseAttributes,
-        torrentfile: 'video-720p.torrent' as any,
-        description: 'this is a super torrent description',
-        tags: [ 'tag_torrent1', 'tag_torrent2' ]
-      }
-      const { video } = await servers[0].imports.importVideo({ attributes })
-      expect(video.name).to.equal('你好 ä¸–ç•Œ 720p.mp4')
-    }
-  })
+        expect(videoImports[0].targetUrl).to.be.null
+        expect(videoImports[0].magnetUri).to.be.null
+        expect(videoImports[0].torrentName).to.equal('video-720p.torrent')
+        expect(videoImports[0].video.name).to.equal('你好 ä¸–ç•Œ 720p.mp4')
+      })
 
-  it('Should list the videos to import in my videos on server 1', async function () {
-    const { total, data } = await servers[0].videos.listMyVideos({ sort: 'createdAt' })
+      it('Should have the video listed on the two instances', async function () {
+        this.timeout(120_000)
 
-    expect(total).to.equal(3)
+        await waitJobs(servers)
 
-    expect(data).to.have.lengthOf(3)
-    expect(data[0].name).to.equal('small video - youtube')
-    expect(data[1].name).to.equal('super peertube2 video')
-    expect(data[2].name).to.equal('你好 ä¸–ç•Œ 720p.mp4')
-  })
+        for (const server of servers) {
+          const { total, data } = await server.videos.list()
+          expect(total).to.equal(3)
+          expect(data).to.have.lengthOf(3)
 
-  it('Should list the videos to import in my imports on server 1', async function () {
-    const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ sort: '-createdAt' })
-    expect(total).to.equal(3)
+          const [ videoHttp, videoMagnet, videoTorrent ] = data
+          await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid)
+        }
+      })
+
+      it('Should import a video on server 2 with some fields', async function () {
+        this.timeout(60_000)
+
+        const attributes = {
+          targetUrl: FIXTURE_URLS.youtube,
+          channelId: servers[1].store.channel.id,
+          privacy: VideoPrivacy.PUBLIC,
+          category: 10,
+          licence: 7,
+          language: 'en',
+          name: 'my super name',
+          description: 'my super description',
+          tags: [ 'supertag1', 'supertag2' ]
+        }
+        const { video } = await servers[1].imports.importVideo({ attributes })
+        expect(video.name).to.equal('my super name')
+      })
 
-    expect(videoImports).to.have.lengthOf(3)
+      it('Should have the videos listed on the two instances', async function () {
+        this.timeout(120_000)
 
-    expect(videoImports[2].targetUrl).to.equal(FIXTURE_URLS.youtube)
-    expect(videoImports[2].magnetUri).to.be.null
-    expect(videoImports[2].torrentName).to.be.null
-    expect(videoImports[2].video.name).to.equal('small video - youtube')
+        await waitJobs(servers)
 
-    expect(videoImports[1].targetUrl).to.be.null
-    expect(videoImports[1].magnetUri).to.equal(FIXTURE_URLS.magnet)
-    expect(videoImports[1].torrentName).to.be.null
-    expect(videoImports[1].video.name).to.equal('super peertube2 video')
+        for (const server of servers) {
+          const { total, data } = await server.videos.list()
+          expect(total).to.equal(4)
+          expect(data).to.have.lengthOf(4)
 
-    expect(videoImports[0].targetUrl).to.be.null
-    expect(videoImports[0].magnetUri).to.be.null
-    expect(videoImports[0].torrentName).to.equal('video-720p.torrent')
-    expect(videoImports[0].video.name).to.equal('你好 ä¸–ç•Œ 720p.mp4')
-  })
+          await checkVideoServer2(server, data[0].uuid)
 
-  it('Should have the video listed on the two instances', async function () {
-    this.timeout(120_000)
+          const [ , videoHttp, videoMagnet, videoTorrent ] = data
+          await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid)
+        }
+      })
 
-    await waitJobs(servers)
+      it('Should import a video that will be transcoded', async function () {
+        this.timeout(240_000)
 
-    for (const server of servers) {
-      const { total, data } = await server.videos.list()
-      expect(total).to.equal(3)
-      expect(data).to.have.lengthOf(3)
+        const attributes = {
+          name: 'transcoded video',
+          magnetUri: FIXTURE_URLS.magnet,
+          channelId: servers[1].store.channel.id,
+          privacy: VideoPrivacy.PUBLIC
+        }
+        const { video } = await servers[1].imports.importVideo({ attributes })
+        const videoUUID = video.uuid
 
-      const [ videoHttp, videoMagnet, videoTorrent ] = data
-      await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid)
-    }
-  })
+        await waitJobs(servers)
 
-  it('Should import a video on server 2 with some fields', async function () {
-    this.timeout(60_000)
-
-    const attributes = {
-      targetUrl: FIXTURE_URLS.youtube,
-      channelId: channelIdServer2,
-      privacy: VideoPrivacy.PUBLIC,
-      category: 10,
-      licence: 7,
-      language: 'en',
-      name: 'my super name',
-      description: 'my super description',
-      tags: [ 'supertag1', 'supertag2' ]
-    }
-    const { video } = await servers[1].imports.importVideo({ attributes })
-    expect(video.name).to.equal('my super name')
-  })
+        for (const server of servers) {
+          const video = await server.videos.get({ id: videoUUID })
 
-  it('Should have the videos listed on the two instances', async function () {
-    this.timeout(120_000)
+          expect(video.name).to.equal('transcoded video')
+          expect(video.files).to.have.lengthOf(4)
+        }
+      })
+
+      it('Should import no HDR version on a HDR video', async function () {
+        this.timeout(300_000)
+
+        const config = {
+          transcoding: {
+            enabled: true,
+            resolutions: {
+              '240p': true,
+              '360p': false,
+              '480p': false,
+              '720p': false,
+              '1080p': false, // the resulting resolution shouldn't be higher than this, and not vp9.2/av01
+              '1440p': false,
+              '2160p': false
+            },
+            webtorrent: { enabled: true },
+            hls: { enabled: false }
+          },
+          import: {
+            videos: {
+              http: {
+                enabled: true
+              },
+              torrent: {
+                enabled: true
+              }
+            }
+          }
+        }
+        await servers[0].config.updateCustomSubConfig({ newConfig: config })
 
-    await waitJobs(servers)
+        const attributes = {
+          name: 'hdr video',
+          targetUrl: FIXTURE_URLS.youtubeHDR,
+          channelId: servers[0].store.channel.id,
+          privacy: VideoPrivacy.PUBLIC
+        }
+        const { video: videoImported } = await servers[0].imports.importVideo({ attributes })
+        const videoUUID = videoImported.uuid
+
+        await waitJobs(servers)
+
+        // test resolution
+        const video = await servers[0].videos.get({ id: videoUUID })
+        expect(video.name).to.equal('hdr video')
+        const maxResolution = Math.max.apply(Math, video.files.map(function (o) { return o.resolution.id }))
+        expect(maxResolution, 'expected max resolution not met').to.equals(VideoResolution.H_240P)
+      })
+
+      it('Should import a peertube video', async function () {
+        this.timeout(120_000)
+
+        // TODO: include peertube_short when https://github.com/ytdl-org/youtube-dl/pull/29475 is merged
+        for (const targetUrl of [ FIXTURE_URLS.peertube_long ]) {
+        // for (const targetUrl of [ FIXTURE_URLS.peertube_long, FIXTURE_URLS.peertube_short ]) {
+          await servers[0].config.disableTranscoding()
+
+          const attributes = {
+            targetUrl,
+            channelId: servers[0].store.channel.id,
+            privacy: VideoPrivacy.PUBLIC
+          }
+          const { video } = await servers[0].imports.importVideo({ attributes })
+          const videoUUID = video.uuid
 
-    for (const server of servers) {
-      const { total, data } = await server.videos.list()
-      expect(total).to.equal(4)
-      expect(data).to.have.lengthOf(4)
+          await waitJobs(servers)
 
-      await checkVideoServer2(server, data[0].uuid)
+          for (const server of servers) {
+            const video = await server.videos.get({ id: videoUUID })
 
-      const [ , videoHttp, videoMagnet, videoTorrent ] = data
-      await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid)
-    }
-  })
+            expect(video.name).to.equal('E2E tests')
+          }
+        }
+      })
 
-  it('Should import a video that will be transcoded', async function () {
-    this.timeout(240_000)
+      after(async function () {
+        await cleanupTests(servers)
+      })
+    })
+  }
 
-    const attributes = {
-      name: 'transcoded video',
-      magnetUri: FIXTURE_URLS.magnet,
-      channelId: channelIdServer2,
-      privacy: VideoPrivacy.PUBLIC
-    }
-    const { video } = await servers[1].imports.importVideo({ attributes })
-    const videoUUID = video.uuid
+  runSuite('youtube-dl')
 
-    await waitJobs(servers)
+  runSuite('yt-dlp')
 
-    for (const server of servers) {
-      const video = await server.videos.get({ id: videoUUID })
+  describe('Auto update', function () {
+    let server: PeerTubeServer
 
-      expect(video.name).to.equal('transcoded video')
-      expect(video.files).to.have.lengthOf(4)
+    function quickPeerTubeImport () {
+      const attributes = {
+        targetUrl: FIXTURE_URLS.peertube_long,
+        channelId: server.store.channel.id,
+        privacy: VideoPrivacy.PUBLIC
+      }
+
+      return server.imports.importVideo({ attributes })
     }
-  })
 
-  it('Should import no HDR version on a HDR video', async function () {
-    this.timeout(300_000)
-
-    const config = {
-      transcoding: {
-        enabled: true,
-        resolutions: {
-          '240p': true,
-          '360p': false,
-          '480p': false,
-          '720p': false,
-          '1080p': false, // the resulting resolution shouldn't be higher than this, and not vp9.2/av01
-          '1440p': false,
-          '2160p': false
-        },
-        webtorrent: { enabled: true },
-        hls: { enabled: false }
-      },
-      import: {
-        videos: {
-          http: {
-            enabled: true
-          },
-          torrent: {
-            enabled: true
+    async function testBinaryUpdate (releaseUrl: string, releaseName: string) {
+      await remove(join(server.servers.buildDirectory('bin'), releaseName))
+
+      await server.kill()
+      await server.run({
+        import: {
+          videos: {
+            http: {
+              youtube_dl_release: {
+                url: releaseUrl,
+                name: releaseName
+              }
+            }
           }
         }
-      }
-    }
-    await servers[0].config.updateCustomSubConfig({ newConfig: config })
+      })
+
+      await quickPeerTubeImport()
 
-    const attributes = {
-      name: 'hdr video',
-      targetUrl: FIXTURE_URLS.youtubeHDR,
-      channelId: channelIdServer1,
-      privacy: VideoPrivacy.PUBLIC
+      expect(await pathExists(join(server.servers.buildDirectory('bin'), releaseName))).to.be.true
     }
-    const { video: videoImported } = await servers[0].imports.importVideo({ attributes })
-    const videoUUID = videoImported.uuid
 
-    await waitJobs(servers)
+    before(async function () {
+      this.timeout(30_000)
 
-    // test resolution
-    const video = await servers[0].videos.get({ id: videoUUID })
-    expect(video.name).to.equal('hdr video')
-    const maxResolution = Math.max.apply(Math, video.files.map(function (o) { return o.resolution.id }))
-    expect(maxResolution, 'expected max resolution not met').to.equals(VideoResolution.H_240P)
-  })
+      // Run servers
+      server = await createSingleServer(1)
 
-  it('Should import a peertube video', async function () {
-    this.timeout(120_000)
+      await setAccessTokensToServers([ server ])
+      await setDefaultVideoChannel([ server ])
+    })
 
-    // TODO: include peertube_short when https://github.com/ytdl-org/youtube-dl/pull/29475 is merged
-    for (const targetUrl of [ FIXTURE_URLS.peertube_long ]) {
-    // for (const targetUrl of [ FIXTURE_URLS.peertube_long, FIXTURE_URLS.peertube_short ]) {
-      await servers[0].config.disableTranscoding()
+    it('Should update youtube-dl from github URL', async function () {
+      this.timeout(120_000)
 
-      const attributes = {
-        targetUrl,
-        channelId: channelIdServer1,
-        privacy: VideoPrivacy.PUBLIC
-      }
-      const { video } = await servers[0].imports.importVideo({ attributes })
-      const videoUUID = video.uuid
+      await testBinaryUpdate('https://api.github.com/repos/ytdl-org/youtube-dl/releases', 'youtube-dl')
+    })
 
-      await waitJobs(servers)
+    it('Should update youtube-dl from raw URL', async function () {
+      this.timeout(120_000)
 
-      for (const server of servers) {
-        const video = await server.videos.get({ id: videoUUID })
+      await testBinaryUpdate('https://yt-dl.org/downloads/latest/youtube-dl', 'youtube-dl')
+    })
 
-        expect(video.name).to.equal('E2E tests')
-      }
-    }
-  })
+    it('Should update youtube-dl from youtube-dl fork', async function () {
+      this.timeout(120_000)
 
-  after(async function () {
-    await cleanupTests(servers)
+      await testBinaryUpdate('https://api.github.com/repos/yt-dlp/yt-dlp/releases', 'yt-dlp')
+    })
   })
 })
diff --git a/server/tests/fixtures/video_import_preview_yt_dlp.jpg b/server/tests/fixtures/video_import_preview_yt_dlp.jpg
new file mode 100644 (file)
index 0000000..9e8833b
Binary files /dev/null and b/server/tests/fixtures/video_import_preview_yt_dlp.jpg differ
diff --git a/server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg b/server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg
new file mode 100644 (file)
index 0000000..f672a78
Binary files /dev/null and b/server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg differ
index 758b561e1b959c4b3745fae55026c971fde5b942..54ac910e634c5d61218ef922537fb4cffe0af8d9 100644 (file)
@@ -4,13 +4,9 @@ registerTSPaths()
 import { program } from 'commander'
 import { accessSync, constants } from 'fs'
 import { remove } from 'fs-extra'
-import { truncate } from 'lodash'
 import { join } from 'path'
-import { promisify } from 'util'
-import { YoutubeDL } from '@server/helpers/youtube-dl'
 import { sha256 } from '../helpers/core-utils'
 import { doRequestAndSaveToFile } from '../helpers/requests'
-import { CONSTRAINTS_FIELDS } from '../initializers/constants'
 import {
   assignToken,
   buildCommonVideoOptions,
@@ -19,8 +15,8 @@ import {
   getLogger,
   getServerCredentials
 } from './cli'
-import { PeerTubeServer } from '@shared/extra-utils'
-
+import { wait } from '@shared/extra-utils'
+import { YoutubeDLCLI, YoutubeDLInfo, YoutubeDLInfoBuilder } from '@server/helpers/youtube-dl'
 import prompt = require('prompt')
 
 const processOptions = {
@@ -73,7 +69,7 @@ getServerCredentials(command)
 async function run (url: string, username: string, password: string) {
   if (!password) password = await promptPassword()
 
-  const youtubeDLBinary = await YoutubeDL.safeGetYoutubeDL()
+  const youtubeDLBinary = await YoutubeDLCLI.safeGet()
 
   let info = await getYoutubeDLInfo(youtubeDLBinary, options.targetUrl, command.args)
 
@@ -96,8 +92,6 @@ async function run (url: string, username: string, password: string) {
   } else if (options.last) {
     infoArray = infoArray.slice(-options.last)
   }
-  // Normalize utf8 fields
-  infoArray = infoArray.map(i => normalizeObject(i))
 
   log.info('Will download and upload %d videos.\n', infoArray.length)
 
@@ -105,8 +99,9 @@ async function run (url: string, username: string, password: string) {
     try {
       if (index > 0 && options.waitInterval) {
         log.info("Wait for %d seconds before continuing.", options.waitInterval / 1000)
-        await new Promise(res => setTimeout(res, options.waitInterval))
+        await wait(options.waitInterval)
       }
+
       await processVideo({
         cwd: options.tmpdir,
         url,
@@ -131,29 +126,26 @@ async function processVideo (parameters: {
   youtubeInfo: any
 }) {
   const { youtubeInfo, cwd, url, username, password } = parameters
-  const youtubeDL = new YoutubeDL('', [])
 
   log.debug('Fetching object.', youtubeInfo)
 
   const videoInfo = await fetchObject(youtubeInfo)
   log.debug('Fetched object.', videoInfo)
 
-  const originallyPublishedAt = youtubeDL.buildOriginallyPublishedAt(videoInfo)
-
-  if (options.since && originallyPublishedAt && originallyPublishedAt.getTime() < options.since.getTime()) {
-    log.info('Video "%s" has been published before "%s", don\'t upload it.\n', videoInfo.title, formatDate(options.since))
+  if (options.since && videoInfo.originallyPublishedAt && videoInfo.originallyPublishedAt.getTime() < options.since.getTime()) {
+    log.info('Video "%s" has been published before "%s", don\'t upload it.\n', videoInfo.name, formatDate(options.since))
     return
   }
 
-  if (options.until && originallyPublishedAt && originallyPublishedAt.getTime() > options.until.getTime()) {
-    log.info('Video "%s" has been published after "%s", don\'t upload it.\n', videoInfo.title, formatDate(options.until))
+  if (options.until && videoInfo.originallyPublishedAt && videoInfo.originallyPublishedAt.getTime() > options.until.getTime()) {
+    log.info('Video "%s" has been published after "%s", don\'t upload it.\n', videoInfo.name, formatDate(options.until))
     return
   }
 
   const server = buildServer(url)
   const { data } = await server.search.advancedVideoSearch({
     search: {
-      search: videoInfo.title,
+      search: videoInfo.name,
       sort: '-match',
       searchTarget: 'local'
     }
@@ -161,28 +153,32 @@ async function processVideo (parameters: {
 
   log.info('############################################################\n')
 
-  if (data.find(v => v.name === videoInfo.title)) {
-    log.info('Video "%s" already exists, don\'t reupload it.\n', videoInfo.title)
+  if (data.find(v => v.name === videoInfo.name)) {
+    log.info('Video "%s" already exists, don\'t reupload it.\n', videoInfo.name)
     return
   }
 
   const path = join(cwd, sha256(videoInfo.url) + '.mp4')
 
-  log.info('Downloading video "%s"...', videoInfo.title)
+  log.info('Downloading video "%s"...', videoInfo.name)
 
-  const youtubeDLOptions = [ '-f', youtubeDL.getYoutubeDLVideoFormat(), ...command.args, '-o', path ]
   try {
-    const youtubeDLBinary = await YoutubeDL.safeGetYoutubeDL()
-    const youtubeDLExec = promisify(youtubeDLBinary.exec).bind(youtubeDLBinary)
-    const output = await youtubeDLExec(videoInfo.url, youtubeDLOptions, processOptions)
+    const youtubeDLBinary = await YoutubeDLCLI.safeGet()
+    const output = await youtubeDLBinary.download({
+      url: videoInfo.url,
+      format: YoutubeDLCLI.getYoutubeDLVideoFormat([]),
+      output: path,
+      additionalYoutubeDLArgs: command.args,
+      processOptions
+    })
+
     log.info(output.join('\n'))
     await uploadVideoOnPeerTube({
-      youtubeDL,
       cwd,
       url,
       username,
       password,
-      videoInfo: normalizeObject(videoInfo),
+      videoInfo,
       videoPath: path
     })
   } catch (err) {
@@ -191,57 +187,34 @@ async function processVideo (parameters: {
 }
 
 async function uploadVideoOnPeerTube (parameters: {
-  youtubeDL: YoutubeDL
-  videoInfo: any
+  videoInfo: YoutubeDLInfo
   videoPath: string
   cwd: string
   url: string
   username: string
   password: string
 }) {
-  const { youtubeDL, videoInfo, videoPath, cwd, url, username, password } = parameters
+  const { videoInfo, videoPath, cwd, url, username, password } = parameters
 
   const server = buildServer(url)
   await assignToken(server, username, password)
 
-  const category = await getCategory(server, videoInfo.categories)
-  const licence = getLicence(videoInfo.license)
-  let tags = []
-  if (Array.isArray(videoInfo.tags)) {
-    tags = videoInfo.tags
-                    .filter(t => t.length < CONSTRAINTS_FIELDS.VIDEOS.TAG.max && t.length > CONSTRAINTS_FIELDS.VIDEOS.TAG.min)
-                    .map(t => t.normalize())
-                    .slice(0, 5)
-  }
-
-  let thumbnailfile
-  if (videoInfo.thumbnail) {
-    thumbnailfile = join(cwd, sha256(videoInfo.thumbnail) + '.jpg')
+  let thumbnailfile: string
+  if (videoInfo.thumbnailUrl) {
+    thumbnailfile = join(cwd, sha256(videoInfo.thumbnailUrl) + '.jpg')
 
-    await doRequestAndSaveToFile(videoInfo.thumbnail, thumbnailfile)
+    await doRequestAndSaveToFile(videoInfo.thumbnailUrl, thumbnailfile)
   }
 
-  const originallyPublishedAt = youtubeDL.buildOriginallyPublishedAt(videoInfo)
-
-  const defaultAttributes = {
-    name: truncate(videoInfo.title, {
-      length: CONSTRAINTS_FIELDS.VIDEOS.NAME.max,
-      separator: /,? +/,
-      omission: ' […]'
-    }),
-    category,
-    licence,
-    nsfw: isNSFW(videoInfo),
-    description: videoInfo.description,
-    tags
-  }
-
-  const baseAttributes = await buildVideoAttributesFromCommander(server, program, defaultAttributes)
+  const baseAttributes = await buildVideoAttributesFromCommander(server, program, videoInfo)
 
   const attributes = {
     ...baseAttributes,
 
-    originallyPublishedAt: originallyPublishedAt ? originallyPublishedAt.toISOString() : null,
+    originallyPublishedAt: videoInfo.originallyPublishedAt
+      ? videoInfo.originallyPublishedAt.toISOString()
+      : null,
+
     thumbnailfile,
     previewfile: thumbnailfile,
     fixture: videoPath
@@ -266,67 +239,26 @@ async function uploadVideoOnPeerTube (parameters: {
   await remove(videoPath)
   if (thumbnailfile) await remove(thumbnailfile)
 
-  log.warn('Uploaded video "%s"!\n', attributes.name)
+  log.info('Uploaded video "%s"!\n', attributes.name)
 }
 
 /* ---------------------------------------------------------- */
 
-async function getCategory (server: PeerTubeServer, categories: string[]) {
-  if (!categories) return undefined
-
-  const categoryString = categories[0]
-
-  if (categoryString === 'News & Politics') return 11
-
-  const categoriesServer = await server.videos.getCategories()
-
-  for (const key of Object.keys(categoriesServer)) {
-    const categoryServer = categoriesServer[key]
-    if (categoryString.toLowerCase() === categoryServer.toLowerCase()) return parseInt(key, 10)
-  }
-
-  return undefined
-}
-
-function getLicence (licence: string) {
-  if (!licence) return undefined
-
-  if (licence.includes('Creative Commons Attribution licence')) return 1
-
-  return undefined
-}
-
-function normalizeObject (obj: any) {
-  const newObj: any = {}
-
-  for (const key of Object.keys(obj)) {
-    // Deprecated key
-    if (key === 'resolution') continue
-
-    const value = obj[key]
-
-    if (typeof value === 'string') {
-      newObj[key] = value.normalize()
-    } else {
-      newObj[key] = value
-    }
-  }
+async function fetchObject (info: any) {
+  const url = buildUrl(info)
 
-  return newObj
-}
+  const youtubeDLCLI = await YoutubeDLCLI.safeGet()
+  const result = await youtubeDLCLI.getInfo({
+    url,
+    format: YoutubeDLCLI.getYoutubeDLVideoFormat([]),
+    processOptions
+  })
 
-function fetchObject (info: any) {
-  const url = buildUrl(info)
+  const builder = new YoutubeDLInfoBuilder(result)
 
-  return new Promise<any>(async (res, rej) => {
-    const youtubeDL = await YoutubeDL.safeGetYoutubeDL()
-    youtubeDL.getInfo(url, undefined, processOptions, (err, videoInfo) => {
-      if (err) return rej(err)
+  const videoInfo = builder.getInfo()
 
-      const videoInfoWithUrl = Object.assign(videoInfo, { url })
-      return res(normalizeObject(videoInfoWithUrl))
-    })
-  })
+  return { ...videoInfo, url }
 }
 
 function buildUrl (info: any) {
@@ -340,10 +272,6 @@ function buildUrl (info: any) {
   return 'https://www.youtube.com/watch?v=' + info.id
 }
 
-function isNSFW (info: any) {
-  return info.age_limit && info.age_limit >= 16
-}
-
 function normalizeTargetUrl (url: string) {
   let normalizedUrl = url.replace(/\/+$/, '')
 
@@ -404,14 +332,11 @@ function exitError (message: string, ...meta: any[]) {
   process.exit(-1)
 }
 
-function getYoutubeDLInfo (youtubeDL: any, url: string, args: string[]) {
-  return new Promise<any>((res, rej) => {
-    const options = [ '-j', '--flat-playlist', '--playlist-reverse', ...args ]
-
-    youtubeDL.getInfo(url, options, processOptions, (err, info) => {
-      if (err) return rej(err)
-
-      return res(info)
-    })
+function getYoutubeDLInfo (youtubeDLCLI: YoutubeDLCLI, url: string, args: string[]) {
+  return youtubeDLCLI.getInfo({
+    url,
+    format: YoutubeDLCLI.getYoutubeDLVideoFormat([]),
+    additionalYoutubeDLArgs: [ '-j', '--flat-playlist', '--playlist-reverse', ...args ],
+    processOptions
   })
 }
index 6299a48f5033db84562646ba717fbdae69ec690f..658fe5fd35d1d702fa0446128124baeca9ae3a25 100644 (file)
@@ -20,7 +20,7 @@ const FIXTURE_URLS = {
   youtubeHDR: 'https://www.youtube.com/watch?v=RQgnBB9z_N4',
 
   // eslint-disable-next-line max-len
-  magnet: 'magnet:?xs=https%3A%2F%2Fpeertube2.cpy.re%2Fstatic%2Ftorrents%2Fb209ca00-c8bb-4b2b-b421-1ede169f3dbc-720.torrent&xt=urn:btih:0f498834733e8057ed5c6f2ee2b4efd8d84a76ee&dn=super+peertube2+video&tr=wss%3A%2F%2Fpeertube2.cpy.re%3A443%2Ftracker%2Fsocket&tr=https%3A%2F%2Fpeertube2.cpy.re%2Ftracker%2Fannounce&ws=https%3A%2F%2Fpeertube2.cpy.re%2Fstatic%2Fwebseed%2Fb209ca00-c8bb-4b2b-b421-1ede169f3dbc-720.mp4',
+  magnet: 'magnet:?xs=https%3A%2F%2Fpeertube2.cpy.re%2Flazy-static%2Ftorrents%2Fb209ca00-c8bb-4b2b-b421-1ede169f3dbc-720.torrent&xt=urn:btih:0f498834733e8057ed5c6f2ee2b4efd8d84a76ee&dn=super+peertube2+video&tr=https%3A%2F%2Fpeertube2.cpy.re%2Ftracker%2Fannounce&tr=wss%3A%2F%2Fpeertube2.cpy.re%3A443%2Ftracker%2Fsocket&ws=https%3A%2F%2Fpeertube2.cpy.re%2Fstatic%2Fwebseed%2Fb209ca00-c8bb-4b2b-b421-1ede169f3dbc-720.mp4',
 
   badVideo: 'https://download.cpy.re/peertube/bad_video.mp4',
   goodVideo: 'https://download.cpy.re/peertube/good_video.mp4',
index fc44cd250756717fb49ad61777f0a8edd3b19cec..35e722408a036935f56d83164126fe85a12bffdc 100644 (file)
@@ -2,12 +2,16 @@ import { expect } from 'chai'
 import request from 'supertest'
 import { HttpStatusCode } from '@shared/models'
 
-async function testCaptionFile (url: string, captionPath: string, containsString: string) {
+async function testCaptionFile (url: string, captionPath: string, toTest: RegExp | string) {
   const res = await request(url)
     .get(captionPath)
     .expect(HttpStatusCode.OK_200)
 
-  expect(res.text).to.contain(containsString)
+  if (toTest instanceof RegExp) {
+    expect(res.text).to.match(toTest)
+  } else {
+    expect(res.text).to.contain(toTest)
+  }
 }
 
 // ---------------------------------------------------------------------------
index 54d619b1a42ea1133c41119496498a276fc13696..2a91172421997cd5584786787b46fa28ae927ac4 100644 (file)
@@ -44,6 +44,7 @@ redis:
 # From the project root directory
 storage:
   tmp: '../data/tmp/' # Use to download data (imports etc), store uploaded files before and during processing...
+  bin: '../data/bin/'
   avatars: '../data/avatars/'
   videos: '../data/videos/'
   streaming_playlists: '../data/streaming-playlists'
index d49f52b43dca505c2c0bb051e8672d73bf8f18db..4e18fbbe113b3b5118bace35d87b93cc690f4db2 100644 (file)
--- a/yarn.lock
+++ b/yarn.lock
@@ -2064,11 +2064,6 @@ array-differ@^3.0.0:
   resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b"
   integrity sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg==
 
-array-find-index@^1.0.1:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1"
-  integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=
-
 array-flatten@1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
@@ -2104,11 +2099,6 @@ arraybuffer.slice@~0.0.7:
   resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
   integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
 
-arrify@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d"
-  integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=
-
 arrify@^2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa"
@@ -2296,32 +2286,6 @@ better-assert@~1.0.0:
   dependencies:
     callsite "1.0.0"
 
-bin-version-check-cli@~2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/bin-version-check-cli/-/bin-version-check-cli-2.0.0.tgz#7d45a23dc55024bbf741b8e66dc5c0afbac7d738"
-  integrity sha512-wPASWpdpQuY/qkiT0hOLpTT/siAkmM/GXkuuQ/kgF1HuO4LEoIR6CgjnmuEv6lCbOSh2CWxAqmeyynp2OA1qhQ==
-  dependencies:
-    arrify "^1.0.1"
-    bin-version-check "^4.0.0"
-    meow "^5.0.0"
-
-bin-version-check@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/bin-version-check/-/bin-version-check-4.0.0.tgz#7d819c62496991f80d893e6e02a3032361608f71"
-  integrity sha512-sR631OrhC+1f8Cvs8WyVWOA33Y8tgwjETNPyyD/myRBXLkfS/vl74FmH/lFcRl9KY3zwGh7jFhvyk9vV3/3ilQ==
-  dependencies:
-    bin-version "^3.0.0"
-    semver "^5.6.0"
-    semver-truncate "^1.1.2"
-
-bin-version@^3.0.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/bin-version/-/bin-version-3.1.0.tgz#5b09eb280752b1bd28f0c9db3f96f2f43b6c0839"
-  integrity sha512-Mkfm4iE1VFt4xd4vH+gx+0/71esbfus2LsnCGe8Pi4mndSPyT+NGES/Eg99jx8/lUGWfu3z2yuB/bt5UB+iVbQ==
-  dependencies:
-    execa "^1.0.0"
-    find-versions "^3.0.0"
-
 binary-extensions@^2.0.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
@@ -2666,25 +2630,11 @@ callsites@^3.0.0:
   resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
   integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
 
-camelcase-keys@^4.0.0:
-  version "4.2.0"
-  resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-4.2.0.tgz#a2aa5fb1af688758259c32c141426d78923b9b77"
-  integrity sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=
-  dependencies:
-    camelcase "^4.1.0"
-    map-obj "^2.0.0"
-    quick-lru "^1.0.0"
-
 camelcase@5.0.0:
   version "5.0.0"
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.0.0.tgz#03295527d58bd3cd4aa75363f35b2e8d97be2f42"
   integrity sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==
 
-camelcase@^4.1.0:
-  version "4.1.0"
-  resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd"
-  integrity sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=
-
 camelcase@^5.0.0:
   version "5.3.1"
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
@@ -3215,17 +3165,6 @@ cross-argv@^1.0.0:
   resolved "https://registry.yarnpkg.com/cross-argv/-/cross-argv-1.0.0.tgz#e7221e9ff73092a80496c699c8c45efb20f6486c"
   integrity sha512-uAVe/bgNHlPdP1VE4Sk08u9pAJ7o1x/tVQtX77T5zlhYhuwOWtVkPBEtHdvF5cq48VzeCG5i1zN4dQc8pwLYrw==
 
-cross-spawn@^6.0.0:
-  version "6.0.5"
-  resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
-  integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
-  dependencies:
-    nice-try "^1.0.4"
-    path-key "^2.0.1"
-    semver "^5.5.0"
-    shebang-command "^1.2.0"
-    which "^1.2.9"
-
 cross-spawn@^7.0.2, cross-spawn@^7.0.3:
   version "7.0.3"
   resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
@@ -3261,13 +3200,6 @@ css-what@^5.0.0, css-what@^5.0.1:
   resolved "https://registry.yarnpkg.com/css-what/-/css-what-5.1.0.tgz#3f7b707aadf633baf62c2ceb8579b545bb40f7fe"
   integrity sha512-arSMRWIIFY0hV8pIxZMEfmMI47Wj3R/aWpZDDxWYCPEiOMv6tfOrnpDtgxBYPEQD4V0Y/958+1TdC3iWTFcUPw==
 
-currently-unhandled@^0.4.1:
-  version "0.4.1"
-  resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea"
-  integrity sha1-mI3zP+qxke95mmE2nddsF635V+o=
-  dependencies:
-    array-find-index "^1.0.1"
-
 cycle@1.0.x:
   version "1.0.3"
   resolved "https://registry.yarnpkg.com/cycle/-/cycle-1.0.3.tgz#21e80b2be8580f98b468f379430662b046c34ad2"
@@ -3355,15 +3287,7 @@ decache@^4.6.0:
   dependencies:
     callsite "^1.0.0"
 
-decamelize-keys@^1.0.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/decamelize-keys/-/decamelize-keys-1.1.0.tgz#d171a87933252807eb3cb61dc1c1445d078df2d9"
-  integrity sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk=
-  dependencies:
-    decamelize "^1.1.0"
-    map-obj "^1.0.0"
-
-decamelize@^1.1.0, decamelize@^1.2.0:
+decamelize@^1.2.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
   integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
@@ -3737,13 +3661,6 @@ err-code@^3.0.1:
   resolved "https://registry.yarnpkg.com/err-code/-/err-code-3.0.1.tgz#a444c7b992705f2b120ee320b09972eef331c920"
   integrity sha512-GiaH0KJUewYok+eeY05IIgjtAe4Yltygk9Wqp1V5yVWLdhf0hYZchRjNIT9bb0mSwRcIusT3cx7PJUf3zEIfUA==
 
-error-ex@^1.3.1:
-  version "1.3.2"
-  resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
-  integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
-  dependencies:
-    is-arrayish "^0.2.1"
-
 es-abstract@^1.19.0, es-abstract@^1.19.1:
   version "1.19.1"
   resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.19.1.tgz#d4885796876916959de78edaa0df456627115ec3"
@@ -4084,23 +4001,10 @@ event-target-shim@^5.0.0:
   resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789"
   integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==
 
-execa@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
-  integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
-  dependencies:
-    cross-spawn "^6.0.0"
-    get-stream "^4.0.0"
-    is-stream "^1.1.0"
-    npm-run-path "^2.0.0"
-    p-finally "^1.0.0"
-    signal-exit "^3.0.0"
-    strip-eof "^1.0.0"
-
-execa@~5.0.0:
-  version "5.0.1"
-  resolved "https://registry.yarnpkg.com/execa/-/execa-5.0.1.tgz#aee63b871c9b2cb56bc9addcd3c70a785c6bf0d1"
-  integrity sha512-4hFTjFbFzQa3aCLobpbPJR/U+VoL1wdV5ozOWjeet0AWDeYr9UFGM1eUFWHX+VtOWFq4p0xXUXfW1YxUaP4fpw==
+execa@^5.1.1:
+  version "5.1.1"
+  resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
+  integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==
   dependencies:
     cross-spawn "^7.0.3"
     get-stream "^6.0.0"
@@ -4306,7 +4210,7 @@ find-up@5.0.0:
     locate-path "^6.0.0"
     path-exists "^4.0.0"
 
-find-up@^2.0.0, find-up@^2.1.0:
+find-up@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
   integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c=
@@ -4321,13 +4225,6 @@ find-up@^4.1.0:
     locate-path "^5.0.0"
     path-exists "^4.0.0"
 
-find-versions@^3.0.0:
-  version "3.2.0"
-  resolved "https://registry.yarnpkg.com/find-versions/-/find-versions-3.2.0.tgz#10297f98030a786829681690545ef659ed1d254e"
-  integrity sha512-P8WRou2S+oe222TOCHitLy8zj+SIsVJh52VP4lvXkaFVnOFFdoWv1H1Jjvel1aI6NCFOAaeAVm8qrI0odiLcww==
-  dependencies:
-    semver-regex "^2.0.0"
-
 flat-cache@^3.0.4:
   version "3.0.4"
   resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11"
@@ -4516,7 +4413,7 @@ get-stdin@^8.0.0:
   resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53"
   integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==
 
-get-stream@^4.0.0, get-stream@^4.1.0:
+get-stream@^4.1.0:
   version "4.1.0"
   resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
   integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
@@ -4630,7 +4527,7 @@ globby@^11.0.3:
     merge2 "^1.3.0"
     slash "^3.0.0"
 
-got@^11.8.2, got@~11.8.1:
+got@^11.8.2:
   version "11.8.2"
   resolved "https://registry.yarnpkg.com/got/-/got-11.8.2.tgz#7abb3959ea28c31f3576f1576c1effce23f33599"
   integrity sha512-D0QywKgIe30ODs+fm8wMZiAcZjypcCodPNuMz5H9Mny7RJ+IjJ10BdmGW7OM7fHXP+O7r6ZwapQ/YQmMSvB0UQ==
@@ -4759,18 +4656,6 @@ helmet@^4.1.0:
   resolved "https://registry.yarnpkg.com/helmet/-/helmet-4.6.0.tgz#579971196ba93c5978eb019e4e8ec0e50076b4df"
   integrity sha512-HVqALKZlR95ROkrnesdhbbZJFi/rIVSoNq6f3jA/9u6MIbTsPh3xZwihjeI5+DO/2sOV6HMHooXcEOuwskHpTg==
 
-hh-mm-ss@~1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/hh-mm-ss/-/hh-mm-ss-1.2.0.tgz#6d0f0b8280824a634cb1d1f20e0bc7bc8b689948"
-  integrity sha512-f4I9Hz1dLpX/3mrEs7yq30+FiuO3tt5NWAqAGeBTaoeoBfB8vhcQ3BphuDc5DjZb/K809agqrAaFlP0jhEU/8w==
-  dependencies:
-    zero-fill "^2.2.3"
-
-hosted-git-info@^2.1.4:
-  version "2.8.9"
-  resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
-  integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==
-
 hpagent@^0.1.2:
   version "0.1.2"
   resolved "https://registry.yarnpkg.com/hpagent/-/hpagent-0.1.2.tgz#cab39c66d4df2d4377dbd212295d878deb9bdaa9"
@@ -5008,11 +4893,6 @@ imurmurhash@^0.1.4:
   resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
   integrity sha1-khi5srkoojixPcT7a21XbyMUU+o=
 
-indent-string@^3.0.0:
-  version "3.2.0"
-  resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289"
-  integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=
-
 indexof@0.0.1:
   version "0.0.1"
   resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d"
@@ -5114,11 +4994,6 @@ ipv6-normalize@1.0.1:
   resolved "https://registry.yarnpkg.com/ipv6-normalize/-/ipv6-normalize-1.0.1.tgz#1b3258290d365fa83239e89907dde4592e7620a8"
   integrity sha1-GzJYKQ02X6gyOeiZB93kWS52IKg=
 
-is-arrayish@^0.2.1:
-  version "0.2.1"
-  resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
-  integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
-
 is-arrayish@^0.3.1:
   version "0.3.2"
   resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03"
@@ -5294,11 +5169,6 @@ is-path-inside@^3.0.2:
   resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
   integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
 
-is-plain-obj@^1.1.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
-  integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4=
-
 is-plain-obj@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287"
@@ -5327,11 +5197,6 @@ is-shared-array-buffer@^1.0.1:
   resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz#97b0c85fbdacb59c9c446fe653b82cf2b5b7cfe6"
   integrity sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==
 
-is-stream@^1.1.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
-  integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
-
 is-stream@^2.0.0:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
@@ -5466,11 +5331,6 @@ json-buffer@3.0.1:
   resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13"
   integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==
 
-json-parse-better-errors@^1.0.1:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
-  integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==
-
 json-schema-traverse@^0.4.1:
   version "0.4.1"
   resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
@@ -5703,16 +5563,6 @@ load-ip-set@^2.2.1:
     simple-get "^4.0.0"
     split "^1.0.1"
 
-load-json-file@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b"
-  integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs=
-  dependencies:
-    graceful-fs "^4.1.2"
-    parse-json "^4.0.0"
-    pify "^3.0.0"
-    strip-bom "^3.0.0"
-
 locate-path@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
@@ -5794,14 +5644,6 @@ logform@^2.2.0:
     safe-stable-stringify "^1.1.0"
     triple-beam "^1.3.0"
 
-loud-rejection@^1.0.0:
-  version "1.6.0"
-  resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f"
-  integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=
-  dependencies:
-    currently-unhandled "^0.4.1"
-    signal-exit "^3.0.0"
-
 lowercase-keys@^1.0.0, lowercase-keys@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
@@ -5937,16 +5779,6 @@ manage-path@^2.0.0:
   resolved "https://registry.yarnpkg.com/manage-path/-/manage-path-2.0.0.tgz#f4cf8457b926eeee2a83b173501414bc76eb9597"
   integrity sha1-9M+EV7km7u4qg7FzUBQUvHbrlZc=
 
-map-obj@^1.0.0:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d"
-  integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=
-
-map-obj@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-2.0.0.tgz#a65cd29087a92598b8791257a523e021222ac1f9"
-  integrity sha1-plzSkIepJZi4eRJXpSPgISIqwfk=
-
 markdown-it-emoji@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/markdown-it-emoji/-/markdown-it-emoji-2.0.0.tgz#3164ad4c009efd946e98274f7562ad611089a231"
@@ -6022,21 +5854,6 @@ mensch@^0.3.4:
   resolved "https://registry.yarnpkg.com/mensch/-/mensch-0.3.4.tgz#770f91b46cb16ea5b204ee735768c3f0c491fecd"
   integrity sha512-IAeFvcOnV9V0Yk+bFhYR07O3yNina9ANIN5MoXBKYJ/RLYPurd2d0yw14MDhpr9/momp0WofT1bPUh3hkzdi/g==
 
-meow@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/meow/-/meow-5.0.0.tgz#dfc73d63a9afc714a5e371760eb5c88b91078aa4"
-  integrity sha512-CbTqYU17ABaLefO8vCU153ZZlprKYWDljcndKKDCFcYQITzWCXZAVk4QMFZPgvzrnUQ3uItnIE/LoUOwrT15Ig==
-  dependencies:
-    camelcase-keys "^4.0.0"
-    decamelize-keys "^1.0.0"
-    loud-rejection "^1.0.0"
-    minimist-options "^3.0.1"
-    normalize-package-data "^2.3.4"
-    read-pkg-up "^3.0.0"
-    redent "^2.0.0"
-    trim-newlines "^2.0.0"
-    yargs-parser "^10.0.0"
-
 merge-descriptors@1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
@@ -6135,14 +5952,6 @@ minimatch@3.0.4, minimatch@^3.0.4:
   dependencies:
     brace-expansion "^1.1.7"
 
-minimist-options@^3.0.1:
-  version "3.0.2"
-  resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-3.0.2.tgz#fba4c8191339e13ecf4d61beb03f070103f3d954"
-  integrity sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ==
-  dependencies:
-    arrify "^1.0.1"
-    is-plain-obj "^1.1.0"
-
 minimist@^1.1.0, minimist@^1.2.0, minimist@^1.2.5:
   version "1.2.5"
   resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
@@ -6180,7 +5989,7 @@ mkdirp@^0.5.1, mkdirp@^0.5.4:
   dependencies:
     minimist "^1.2.5"
 
-mkdirp@^1.0.3, mkdirp@~1.0.4:
+mkdirp@^1.0.3:
   version "1.0.4"
   resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
   integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
@@ -6392,11 +6201,6 @@ next-tick@~1.0.0:
   resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c"
   integrity sha1-yobR/ogoFpsBICCOPchCS524NCw=
 
-nice-try@^1.0.4:
-  version "1.0.5"
-  resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
-  integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
-
 node-addon-api@^3.1.0:
   version "3.2.1"
   resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161"
@@ -6481,16 +6285,6 @@ nopt@~1.0.10:
   dependencies:
     abbrev "1"
 
-normalize-package-data@^2.3.2, normalize-package-data@^2.3.4:
-  version "2.5.0"
-  resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
-  integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==
-  dependencies:
-    hosted-git-info "^2.1.4"
-    resolve "^1.10.0"
-    semver "2 || 3 || 4 || 5"
-    validate-npm-package-license "^3.0.1"
-
 normalize-path@^3.0.0, normalize-path@~3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
@@ -6506,13 +6300,6 @@ normalize-url@^6.0.1:
   resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a"
   integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==
 
-npm-run-path@^2.0.0:
-  version "2.0.2"
-  resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
-  integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
-  dependencies:
-    path-key "^2.0.0"
-
 npm-run-path@^4.0.1:
   version "4.0.1"
   resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea"
@@ -6813,14 +6600,6 @@ parse-headers@^2.0.0:
   resolved "https://registry.yarnpkg.com/parse-headers/-/parse-headers-2.0.4.tgz#9eaf2d02bed2d1eff494331ce3df36d7924760bf"
   integrity sha512-psZ9iZoCNFLrgRjZ1d8mn0h9WRqJwFxM9q3x7iUjN/YT2OksthDJ5TiPCu2F38kS4zutqfW+YdVVkBZZx3/1aw==
 
-parse-json@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0"
-  integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=
-  dependencies:
-    error-ex "^1.3.1"
-    json-parse-better-errors "^1.0.1"
-
 parse-srcset@^1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/parse-srcset/-/parse-srcset-1.0.2.tgz#f2bd221f6cc970a938d88556abc589caaaa2bde1"
@@ -6908,11 +6687,6 @@ path-is-absolute@^1.0.0:
   resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
   integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
 
-path-key@^2.0.0, path-key@^2.0.1:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
-  integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
-
 path-key@^3.0.0, path-key@^3.1.0:
   version "3.1.1"
   resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
@@ -6928,13 +6702,6 @@ path-to-regexp@0.1.7:
   resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
   integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=
 
-path-type@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f"
-  integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==
-  dependencies:
-    pify "^3.0.0"
-
 path-type@^4.0.0:
   version "4.0.0"
   resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
@@ -7043,11 +6810,6 @@ piece-length@^2.0.1:
   resolved "https://registry.yarnpkg.com/piece-length/-/piece-length-2.0.1.tgz#dbed4e78976955f34466d0a65304d0cb21914ac9"
   integrity sha512-dBILiDmm43y0JPISWEmVGKBETQjwJe6mSU9GND+P9KW0SJGUwoU/odyH1nbalOP9i8WSYuqf1lQnaj92Bhw+Ug==
 
-pify@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
-  integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=
-
 pify@^4.0.1:
   version "4.0.1"
   resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231"
@@ -7371,11 +7133,6 @@ queue-tick@^1.0.0:
   resolved "https://registry.yarnpkg.com/queue-tick/-/queue-tick-1.0.0.tgz#011104793a3309ae86bfeddd54e251dc94a36725"
   integrity sha512-ULWhjjE8BmiICGn3G8+1L9wFpERNxkf8ysxkAer4+TFdRefDaXOCV5m92aMB9FtBVmn/8sETXLXY6BfW7hyaWQ==
 
-quick-lru@^1.0.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-1.1.0.tgz#4360b17c61136ad38078397ff11416e186dcfbb8"
-  integrity sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=
-
 quick-lru@^5.1.1:
   version "5.1.1"
   resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932"
@@ -7470,23 +7227,6 @@ rdf-canonize@^3.0.0:
   dependencies:
     setimmediate "^1.0.5"
 
-read-pkg-up@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07"
-  integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=
-  dependencies:
-    find-up "^2.0.0"
-    read-pkg "^3.0.0"
-
-read-pkg@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389"
-  integrity sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=
-  dependencies:
-    load-json-file "^4.0.0"
-    normalize-package-data "^2.3.2"
-    path-type "^3.0.0"
-
 read@1.0.x:
   version "1.0.7"
   resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4"
@@ -7555,14 +7295,6 @@ record-cache@^1.0.2:
   resolved "https://registry.yarnpkg.com/record-cache/-/record-cache-1.1.1.tgz#ba3088a489f50491a4af7b14d410822c394fb811"
   integrity sha512-L5hZlgWc7CmGbztnemQoKE1bLu9rtI2skOB0ttE4C5+TVszLE8Rd0YLTROSgvXKLAqPumS/soyN5tJW5wJLmJQ==
 
-redent@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/redent/-/redent-2.0.0.tgz#c1b2007b42d57eb1389079b3c8333639d5e1ccaa"
-  integrity sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo=
-  dependencies:
-    indent-string "^3.0.0"
-    strip-indent "^2.0.0"
-
 redis-commands@1.7.0, redis-commands@^1.7.0:
   version "1.7.0"
   resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.7.0.tgz#15a6fea2d58281e27b1cd1acfb4b293e278c3a89"
@@ -7655,7 +7387,7 @@ resolve-from@^4.0.0:
   resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
   integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
 
-resolve@^1.10.0, resolve@^1.10.1, resolve@^1.15.1, resolve@^1.20.0:
+resolve@^1.10.1, resolve@^1.15.1, resolve@^1.20.0:
   version "1.20.0"
   resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975"
   integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==
@@ -7801,19 +7533,7 @@ semver-diff@^3.1.1:
   dependencies:
     semver "^6.3.0"
 
-semver-regex@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/semver-regex/-/semver-regex-2.0.0.tgz#a93c2c5844539a770233379107b38c7b4ac9d338"
-  integrity sha512-mUdIBBvdn0PLOeP3TEkMH7HHeUP3GjsXCwKarjv/kGmUFOYg1VqEemKhoQpWMu6X2I8kHeuVdGibLGkVK+/5Qw==
-
-semver-truncate@^1.1.2:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/semver-truncate/-/semver-truncate-1.1.2.tgz#57f41de69707a62709a7e0104ba2117109ea47e8"
-  integrity sha1-V/Qd5pcHpicJp+AQS6IRcQnqR+g=
-  dependencies:
-    semver "^5.3.0"
-
-"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
+semver@^5.7.1:
   version "5.7.1"
   resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
   integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
@@ -7917,13 +7637,6 @@ setprototypeof@1.2.0:
   resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
   integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
 
-shebang-command@^1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
-  integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
-  dependencies:
-    shebang-regex "^1.0.0"
-
 shebang-command@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
@@ -7931,11 +7644,6 @@ shebang-command@^2.0.0:
   dependencies:
     shebang-regex "^3.0.0"
 
-shebang-regex@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
-  integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
-
 shebang-regex@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
@@ -8176,32 +7884,6 @@ spawn-command@^0.0.2-1:
   resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2-1.tgz#62f5e9466981c1b796dc5929937e11c9c6921bd0"
   integrity sha1-YvXpRmmBwbeW3Fkpk34RycaSG9A=
 
-spdx-correct@^3.0.0:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9"
-  integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==
-  dependencies:
-    spdx-expression-parse "^3.0.0"
-    spdx-license-ids "^3.0.0"
-
-spdx-exceptions@^2.1.0:
-  version "2.3.0"
-  resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d"
-  integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==
-
-spdx-expression-parse@^3.0.0:
-  version "3.0.1"
-  resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679"
-  integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==
-  dependencies:
-    spdx-exceptions "^2.1.0"
-    spdx-license-ids "^3.0.0"
-
-spdx-license-ids@^3.0.0:
-  version "3.0.10"
-  resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.10.tgz#0d9becccde7003d6c658d487dd48a32f0bf3014b"
-  integrity sha512-oie3/+gKf7QtpitB0LYLETe+k8SifzsX4KixvpOsbI6S0kRiRQ5MKOio8eMSAKQ17N06+wdEOXRiId+zOxo0hA==
-
 speed-limiter@^1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/speed-limiter/-/speed-limiter-1.0.2.tgz#e4632f476a1d25d32557aad7bd089b3a0d948116"
@@ -8331,11 +8013,6 @@ stream-with-known-length-to-buffer@^1.0.4:
   dependencies:
     once "^1.4.0"
 
-streamify@~1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/streamify/-/streamify-1.0.0.tgz#c80a1347d6d3b905c0382011adac67402a3b1e2b"
-  integrity sha512-pe2ZoxE+ie5wAjRgKWb5Ur4R5Oa++eoQmHLqGGy4nQn/8BetJcpHkHXRuP3ZIJ/Ptl/rbd76fdn9aQJNys8cKA==
-
 streamsearch@0.1.2:
   version "0.1.2"
   resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a"
@@ -8444,21 +8121,11 @@ strip-bom@^3.0.0:
   resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
   integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
 
-strip-eof@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
-  integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
-
 strip-final-newline@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad"
   integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==
 
-strip-indent@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-2.0.0.tgz#5ef8db295d01e6ed6cbf7aab96998d7822527b68"
-  integrity sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=
-
 strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
   version "3.1.1"
   resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
@@ -8726,11 +8393,6 @@ tree-kill@^1.2.2:
   resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc"
   integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==
 
-trim-newlines@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-2.0.0.tgz#b403d0b91be50c331dfc4b82eeceb22c3de16d20"
-  integrity sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA=
-
 triple-beam@^1.2.0, triple-beam@^1.3.0:
   version "1.3.0"
   resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9"
@@ -8896,7 +8558,7 @@ unique-string@^2.0.0:
   dependencies:
     crypto-random-string "^2.0.0"
 
-universalify@^2.0.0, universalify@~2.0.0:
+universalify@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717"
   integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==
@@ -9051,14 +8713,6 @@ valid-data-url@^3.0.0:
   resolved "https://registry.yarnpkg.com/valid-data-url/-/valid-data-url-3.0.1.tgz#826c1744e71b5632e847dd15dbd45b9fb38aa34f"
   integrity sha512-jOWVmzVceKlVVdwjNSenT4PbGghU0SBIizAev8ofZVgivk/TVHXSbNL8LP6M3spZvkR9/QolkyJavGSX5Cs0UA==
 
-validate-npm-package-license@^3.0.1:
-  version "3.0.4"
-  resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a"
-  integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==
-  dependencies:
-    spdx-correct "^3.0.0"
-    spdx-expression-parse "^3.0.0"
-
 validator@^13.0.0, validator@^13.6.0:
   version "13.6.0"
   resolved "https://registry.yarnpkg.com/validator/-/validator-13.6.0.tgz#1e71899c14cdc7b2068463cb24c1cc16f6ec7059"
@@ -9203,7 +8857,7 @@ which@2.0.2, which@^2.0.1, which@^2.0.2:
   dependencies:
     isexe "^2.0.0"
 
-which@^1.1.1, which@^1.2.9:
+which@^1.1.1:
   version "1.3.1"
   resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
   integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
@@ -9429,13 +9083,6 @@ yargs-parser@20.2.4:
   resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54"
   integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==
 
-yargs-parser@^10.0.0:
-  version "10.1.0"
-  resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-10.1.0.tgz#7202265b89f7e9e9f2e5765e0fe735a905edbaa8"
-  integrity sha512-VCIyR1wJoEBZUqk5PA+oOBF6ypbwh5aNB3I50guxAL/quggdfs4TtNHQrSazFA3fYZ+tEqfs0zIGlv0c/rgjbQ==
-  dependencies:
-    camelcase "^4.1.0"
-
 yargs-parser@^18.1.2:
   version "18.1.3"
   resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
@@ -9504,19 +9151,6 @@ yocto-queue@^0.1.0:
   resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
   integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
 
-youtube-dl@^3.0.2:
-  version "3.5.0"
-  resolved "https://registry.yarnpkg.com/youtube-dl/-/youtube-dl-3.5.0.tgz#75e7be8647128de34244cb74606edf87b9ce60fa"
-  integrity sha512-+I9o908rD154LqaVdP1f9Xlu+qYp3/m/bZeUwaxsAV7nR8W0IObqz0oAyxvE1Qrn7oTCvvg6MZ1oqkHIA8LA+g==
-  dependencies:
-    bin-version-check-cli "~2.0.0"
-    execa "~5.0.0"
-    got "~11.8.1"
-    hh-mm-ss "~1.2.0"
-    mkdirp "~1.0.4"
-    streamify "~1.0.0"
-    universalify "~2.0.0"
-
 z-schema@^5.0.1:
   version "5.0.1"
   resolved "https://registry.yarnpkg.com/z-schema/-/z-schema-5.0.1.tgz#f4d4efb1e8763c968b5539e42d11b6a47e91da62"
@@ -9527,8 +9161,3 @@ z-schema@^5.0.1:
     validator "^13.6.0"
   optionalDependencies:
     commander "^2.7.1"
-
-zero-fill@^2.2.3:
-  version "2.2.4"
-  resolved "https://registry.yarnpkg.com/zero-fill/-/zero-fill-2.2.4.tgz#b041320973dbcb03cd90193270ac8d4a3da05fc1"
-  integrity sha512-/N5GEDauLHz2uGnuJXWO1Wfib4EC+q4yp9C1jojM7RubwEKADqIqMcYpETMm1lRop403fi3v1qTOdgDE8DIOdw==