diff options
author | Chocobozzz <me@florianbigard.com> | 2023-07-31 14:34:36 +0200 |
---|---|---|
committer | Chocobozzz <me@florianbigard.com> | 2023-08-11 15:02:33 +0200 |
commit | 3a4992633ee62d5edfbb484d9c6bcb3cf158489d (patch) | |
tree | e4510b39bdac9c318fdb4b47018d08f15368b8f0 /server/scripts | |
parent | 04d1da5621d25d59bd5fa1543b725c497bf5d9a8 (diff) | |
download | PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.gz PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.zst PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.zip |
Migrate server to ESM
Sorry for the very big commit that may lead to git log issues and merge
conflicts, but it's a major step forward:
* Server can be faster at startup because imports() are async and we can
easily lazy import big modules
* Angular doesn't seem to support ES import (with .js extension), so we
had to correctly organize peertube into a monorepo:
* Use yarn workspace feature
* Use typescript reference projects for dependencies
* Shared projects have been moved into "packages", each one is now a
node module (with a dedicated package.json/tsconfig.json)
* server/tools have been moved into apps/ and is now a dedicated app
bundled and published on NPM so users don't have to build peertube
cli tools manually
* server/tests have been moved into packages/ so we don't compile
them every time we want to run the server
* Use isolatedModule option:
* Had to move from const enum to const
(https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums)
* Had to explictely specify "type" imports when used in decorators
* Prefer tsx (that uses esbuild under the hood) instead of ts-node to
load typescript files (tests with mocha or scripts):
* To reduce test complexity as esbuild doesn't support decorator
metadata, we only test server files that do not import server
models
* We still build tests files into js files for a faster CI
* Remove unmaintained peertube CLI import script
* Removed some barrels to speed up execution (less imports)
Diffstat (limited to 'server/scripts')
-rw-r--r-- | server/scripts/create-generate-storyboard-job.ts | 85 | ||||
-rw-r--r-- | server/scripts/create-import-video-file-job.ts | 50 | ||||
-rw-r--r-- | server/scripts/create-move-video-storage-job.ts | 99 | ||||
-rw-r--r-- | server/scripts/migrations/peertube-4.0.ts | 110 | ||||
-rw-r--r-- | server/scripts/migrations/peertube-4.2.ts | 123 | ||||
-rw-r--r-- | server/scripts/migrations/peertube-5.0.ts | 71 | ||||
-rwxr-xr-x | server/scripts/parse-log.ts | 161 | ||||
-rwxr-xr-x | server/scripts/plugin/install.ts | 41 | ||||
-rwxr-xr-x | server/scripts/plugin/uninstall.ts | 29 | ||||
-rwxr-xr-x | server/scripts/prune-storage.ts | 187 | ||||
-rw-r--r-- | server/scripts/regenerate-thumbnails.ts | 64 | ||||
-rwxr-xr-x | server/scripts/reset-password.ts | 58 | ||||
-rwxr-xr-x | server/scripts/update-host.ts | 140 | ||||
-rwxr-xr-x | server/scripts/upgrade.sh | 108 |
14 files changed, 1326 insertions, 0 deletions
diff --git a/server/scripts/create-generate-storyboard-job.ts b/server/scripts/create-generate-storyboard-job.ts new file mode 100644 index 000000000..1f70e4d15 --- /dev/null +++ b/server/scripts/create-generate-storyboard-job.ts | |||
@@ -0,0 +1,85 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { toCompleteUUID } from '@server/helpers/custom-validators/misc.js' | ||
3 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
4 | import { JobQueue } from '@server/lib/job-queue/index.js' | ||
5 | import { StoryboardModel } from '@server/models/video/storyboard.js' | ||
6 | import { VideoModel } from '@server/models/video/video.js' | ||
7 | |||
8 | program | ||
9 | .description('Generate videos storyboard') | ||
10 | .option('-v, --video [videoUUID]', 'Generate the storyboard of a specific video') | ||
11 | .option('-a, --all-videos', 'Generate missing storyboards of local videos') | ||
12 | .parse(process.argv) | ||
13 | |||
14 | const options = program.opts() | ||
15 | |||
16 | if (!options['video'] && !options['allVideos']) { | ||
17 | console.error('You need to choose videos for storyboard generation.') | ||
18 | process.exit(-1) | ||
19 | } | ||
20 | |||
21 | run() | ||
22 | .then(() => process.exit(0)) | ||
23 | .catch(err => { | ||
24 | console.error(err) | ||
25 | process.exit(-1) | ||
26 | }) | ||
27 | |||
28 | async function run () { | ||
29 | await initDatabaseModels(true) | ||
30 | |||
31 | JobQueue.Instance.init() | ||
32 | |||
33 | let ids: number[] = [] | ||
34 | |||
35 | if (options['video']) { | ||
36 | const video = await VideoModel.load(toCompleteUUID(options['video'])) | ||
37 | |||
38 | if (!video) { | ||
39 | console.error('Unknown video ' + options['video']) | ||
40 | process.exit(-1) | ||
41 | } | ||
42 | |||
43 | if (video.remote === true) { | ||
44 | console.error('Cannot process a remote video') | ||
45 | process.exit(-1) | ||
46 | } | ||
47 | |||
48 | if (video.isLive) { | ||
49 | console.error('Cannot process live video') | ||
50 | process.exit(-1) | ||
51 | } | ||
52 | |||
53 | ids.push(video.id) | ||
54 | } else { | ||
55 | ids = await listLocalMissingStoryboards() | ||
56 | } | ||
57 | |||
58 | for (const id of ids) { | ||
59 | const videoFull = await VideoModel.load(id) | ||
60 | |||
61 | if (videoFull.isLive) continue | ||
62 | |||
63 | await JobQueue.Instance.createJob({ | ||
64 | type: 'generate-video-storyboard', | ||
65 | payload: { | ||
66 | videoUUID: videoFull.uuid, | ||
67 | federate: true | ||
68 | } | ||
69 | }) | ||
70 | |||
71 | console.log(`Created generate-storyboard job for ${videoFull.name}.`) | ||
72 | } | ||
73 | } | ||
74 | |||
75 | async function listLocalMissingStoryboards () { | ||
76 | const ids = await VideoModel.listLocalIds() | ||
77 | const results: number[] = [] | ||
78 | |||
79 | for (const id of ids) { | ||
80 | const storyboard = await StoryboardModel.loadByVideo(id) | ||
81 | if (!storyboard) results.push(id) | ||
82 | } | ||
83 | |||
84 | return results | ||
85 | } | ||
diff --git a/server/scripts/create-import-video-file-job.ts b/server/scripts/create-import-video-file-job.ts new file mode 100644 index 000000000..33a45fba1 --- /dev/null +++ b/server/scripts/create-import-video-file-job.ts | |||
@@ -0,0 +1,50 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { resolve } from 'path' | ||
3 | import { isUUIDValid, toCompleteUUID } from '@server/helpers/custom-validators/misc.js' | ||
4 | import { initDatabaseModels } from '../server/initializers/database.js' | ||
5 | import { JobQueue } from '../server/lib/job-queue/index.js' | ||
6 | import { VideoModel } from '../server/models/video/video.js' | ||
7 | |||
8 | program | ||
9 | .option('-v, --video [videoUUID]', 'Video UUID') | ||
10 | .option('-i, --import [videoFile]', 'Video file') | ||
11 | .description('Import a video file to replace an already uploaded file or to add a new resolution') | ||
12 | .parse(process.argv) | ||
13 | |||
14 | const options = program.opts() | ||
15 | |||
16 | if (options.video === undefined || options.import === undefined) { | ||
17 | console.error('All parameters are mandatory.') | ||
18 | process.exit(-1) | ||
19 | } | ||
20 | |||
21 | run() | ||
22 | .then(() => process.exit(0)) | ||
23 | .catch(err => { | ||
24 | console.error(err) | ||
25 | process.exit(-1) | ||
26 | }) | ||
27 | |||
28 | async function run () { | ||
29 | await initDatabaseModels(true) | ||
30 | |||
31 | const uuid = toCompleteUUID(options.video) | ||
32 | |||
33 | if (isUUIDValid(uuid) === false) { | ||
34 | console.error('%s is not a valid video UUID.', options.video) | ||
35 | return | ||
36 | } | ||
37 | |||
38 | const video = await VideoModel.load(uuid) | ||
39 | if (!video) throw new Error('Video not found.') | ||
40 | if (video.isOwned() === false) throw new Error('Cannot import files of a non owned video.') | ||
41 | |||
42 | const dataInput = { | ||
43 | videoUUID: video.uuid, | ||
44 | filePath: resolve(options.import) | ||
45 | } | ||
46 | |||
47 | JobQueue.Instance.init() | ||
48 | await JobQueue.Instance.createJob({ type: 'video-file-import', payload: dataInput }) | ||
49 | console.log('Import job for video %s created.', video.uuid) | ||
50 | } | ||
diff --git a/server/scripts/create-move-video-storage-job.ts b/server/scripts/create-move-video-storage-job.ts new file mode 100644 index 000000000..a615d1f44 --- /dev/null +++ b/server/scripts/create-move-video-storage-job.ts | |||
@@ -0,0 +1,99 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { toCompleteUUID } from '@server/helpers/custom-validators/misc.js' | ||
3 | import { CONFIG } from '@server/initializers/config.js' | ||
4 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
5 | import { JobQueue } from '@server/lib/job-queue/index.js' | ||
6 | import { moveToExternalStorageState } from '@server/lib/video-state.js' | ||
7 | import { VideoModel } from '@server/models/video/video.js' | ||
8 | import { VideoState, VideoStorage } from '@peertube/peertube-models' | ||
9 | |||
10 | program | ||
11 | .description('Move videos to another storage.') | ||
12 | .option('-o, --to-object-storage', 'Move videos in object storage') | ||
13 | .option('-v, --video [videoUUID]', 'Move a specific video') | ||
14 | .option('-a, --all-videos', 'Migrate all videos') | ||
15 | .parse(process.argv) | ||
16 | |||
17 | const options = program.opts() | ||
18 | |||
19 | if (!options['toObjectStorage']) { | ||
20 | console.error('You need to choose where to send video files.') | ||
21 | process.exit(-1) | ||
22 | } | ||
23 | |||
24 | if (!options['video'] && !options['allVideos']) { | ||
25 | console.error('You need to choose which videos to move.') | ||
26 | process.exit(-1) | ||
27 | } | ||
28 | |||
29 | if (options['toObjectStorage'] && !CONFIG.OBJECT_STORAGE.ENABLED) { | ||
30 | console.error('Object storage is not enabled on this instance.') | ||
31 | process.exit(-1) | ||
32 | } | ||
33 | |||
34 | run() | ||
35 | .then(() => process.exit(0)) | ||
36 | .catch(err => { | ||
37 | console.error(err) | ||
38 | process.exit(-1) | ||
39 | }) | ||
40 | |||
41 | async function run () { | ||
42 | await initDatabaseModels(true) | ||
43 | |||
44 | JobQueue.Instance.init() | ||
45 | |||
46 | let ids: number[] = [] | ||
47 | |||
48 | if (options['video']) { | ||
49 | const video = await VideoModel.load(toCompleteUUID(options['video'])) | ||
50 | |||
51 | if (!video) { | ||
52 | console.error('Unknown video ' + options['video']) | ||
53 | process.exit(-1) | ||
54 | } | ||
55 | |||
56 | if (video.remote === true) { | ||
57 | console.error('Cannot process a remote video') | ||
58 | process.exit(-1) | ||
59 | } | ||
60 | |||
61 | if (video.isLive) { | ||
62 | console.error('Cannot process live video') | ||
63 | process.exit(-1) | ||
64 | } | ||
65 | |||
66 | if (video.state === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { | ||
67 | console.error('This video is already being moved to external storage') | ||
68 | process.exit(-1) | ||
69 | } | ||
70 | |||
71 | ids.push(video.id) | ||
72 | } else { | ||
73 | ids = await VideoModel.listLocalIds() | ||
74 | } | ||
75 | |||
76 | for (const id of ids) { | ||
77 | const videoFull = await VideoModel.loadFull(id) | ||
78 | |||
79 | if (videoFull.isLive) continue | ||
80 | |||
81 | const files = videoFull.VideoFiles || [] | ||
82 | const hls = videoFull.getHLSPlaylist() | ||
83 | |||
84 | if (files.some(f => f.storage === VideoStorage.FILE_SYSTEM) || hls?.storage === VideoStorage.FILE_SYSTEM) { | ||
85 | console.log('Processing video %s.', videoFull.name) | ||
86 | |||
87 | const success = await moveToExternalStorageState({ video: videoFull, isNewVideo: false, transaction: undefined }) | ||
88 | |||
89 | if (!success) { | ||
90 | console.error( | ||
91 | 'Cannot create move job for %s: job creation may have failed or there may be pending transcoding jobs for this video', | ||
92 | videoFull.name | ||
93 | ) | ||
94 | } | ||
95 | } | ||
96 | |||
97 | console.log(`Created move-to-object-storage job for ${videoFull.name}.`) | ||
98 | } | ||
99 | } | ||
diff --git a/server/scripts/migrations/peertube-4.0.ts b/server/scripts/migrations/peertube-4.0.ts new file mode 100644 index 000000000..619c1da71 --- /dev/null +++ b/server/scripts/migrations/peertube-4.0.ts | |||
@@ -0,0 +1,110 @@ | |||
1 | import Bluebird from 'bluebird' | ||
2 | import { move } from 'fs-extra/esm' | ||
3 | import { readFile, writeFile } from 'fs/promises' | ||
4 | import { join } from 'path' | ||
5 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
6 | import { federateVideoIfNeeded } from '@server/lib/activitypub/videos/index.js' | ||
7 | import { JobQueue } from '@server/lib/job-queue/index.js' | ||
8 | import { | ||
9 | generateHLSMasterPlaylistFilename, | ||
10 | generateHlsSha256SegmentsFilename, | ||
11 | getHlsResolutionPlaylistFilename | ||
12 | } from '@server/lib/paths.js' | ||
13 | import { VideoPathManager } from '@server/lib/video-path-manager.js' | ||
14 | import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist.js' | ||
15 | import { VideoModel } from '@server/models/video/video.js' | ||
16 | |||
17 | run() | ||
18 | .then(() => process.exit(0)) | ||
19 | .catch(err => { | ||
20 | console.error(err) | ||
21 | process.exit(-1) | ||
22 | |||
23 | }) | ||
24 | |||
25 | async function run () { | ||
26 | console.log('Migrate old HLS paths to new format.') | ||
27 | |||
28 | await initDatabaseModels(true) | ||
29 | |||
30 | JobQueue.Instance.init() | ||
31 | |||
32 | const ids = await VideoModel.listLocalIds() | ||
33 | |||
34 | await Bluebird.map(ids, async id => { | ||
35 | try { | ||
36 | await processVideo(id) | ||
37 | } catch (err) { | ||
38 | console.error('Cannot process video %s.', { err }) | ||
39 | } | ||
40 | }, { concurrency: 5 }) | ||
41 | |||
42 | console.log('Migration finished!') | ||
43 | } | ||
44 | |||
45 | async function processVideo (videoId: number) { | ||
46 | const video = await VideoModel.loadWithFiles(videoId) | ||
47 | |||
48 | const hls = video.getHLSPlaylist() | ||
49 | if (video.isLive || !hls || hls.playlistFilename !== 'master.m3u8' || hls.VideoFiles.length === 0) { | ||
50 | return | ||
51 | } | ||
52 | |||
53 | console.log(`Renaming HLS playlist files of video ${video.name}.`) | ||
54 | |||
55 | const playlist = await VideoStreamingPlaylistModel.loadHLSPlaylistByVideo(video.id) | ||
56 | const hlsDirPath = VideoPathManager.Instance.getFSHLSOutputPath(video) | ||
57 | |||
58 | const masterPlaylistPath = join(hlsDirPath, playlist.playlistFilename) | ||
59 | let masterPlaylistContent = await readFile(masterPlaylistPath, 'utf8') | ||
60 | |||
61 | for (const videoFile of hls.VideoFiles) { | ||
62 | const srcName = `${videoFile.resolution}.m3u8` | ||
63 | const dstName = getHlsResolutionPlaylistFilename(videoFile.filename) | ||
64 | |||
65 | const src = join(hlsDirPath, srcName) | ||
66 | const dst = join(hlsDirPath, dstName) | ||
67 | |||
68 | try { | ||
69 | await move(src, dst) | ||
70 | |||
71 | masterPlaylistContent = masterPlaylistContent.replace(new RegExp('^' + srcName + '$', 'm'), dstName) | ||
72 | } catch (err) { | ||
73 | console.error('Cannot move video file %s to %s.', src, dst, err) | ||
74 | } | ||
75 | } | ||
76 | |||
77 | await writeFile(masterPlaylistPath, masterPlaylistContent) | ||
78 | |||
79 | if (playlist.segmentsSha256Filename === 'segments-sha256.json') { | ||
80 | try { | ||
81 | const newName = generateHlsSha256SegmentsFilename(video.isLive) | ||
82 | |||
83 | const dst = join(hlsDirPath, newName) | ||
84 | await move(join(hlsDirPath, playlist.segmentsSha256Filename), dst) | ||
85 | playlist.segmentsSha256Filename = newName | ||
86 | } catch (err) { | ||
87 | console.error(`Cannot rename ${video.name} segments-sha256.json file to a new name`, err) | ||
88 | } | ||
89 | } | ||
90 | |||
91 | if (playlist.playlistFilename === 'master.m3u8') { | ||
92 | try { | ||
93 | const newName = generateHLSMasterPlaylistFilename(video.isLive) | ||
94 | |||
95 | const dst = join(hlsDirPath, newName) | ||
96 | await move(join(hlsDirPath, playlist.playlistFilename), dst) | ||
97 | playlist.playlistFilename = newName | ||
98 | } catch (err) { | ||
99 | console.error(`Cannot rename ${video.name} master.m3u8 file to a new name`, err) | ||
100 | } | ||
101 | } | ||
102 | |||
103 | // Everything worked, we can save the playlist now | ||
104 | await playlist.save() | ||
105 | |||
106 | const allVideo = await VideoModel.loadFull(video.id) | ||
107 | await federateVideoIfNeeded(allVideo, false) | ||
108 | |||
109 | console.log(`Successfully moved HLS files of ${video.name}.`) | ||
110 | } | ||
diff --git a/server/scripts/migrations/peertube-4.2.ts b/server/scripts/migrations/peertube-4.2.ts new file mode 100644 index 000000000..6c89ee39e --- /dev/null +++ b/server/scripts/migrations/peertube-4.2.ts | |||
@@ -0,0 +1,123 @@ | |||
1 | import { ActorImageType } from '@peertube/peertube-models' | ||
2 | import { buildUUID, getLowercaseExtension } from '@peertube/peertube-node-utils' | ||
3 | import { getImageSize, processImage } from '@server/helpers/image-utils.js' | ||
4 | import { CONFIG } from '@server/initializers/config.js' | ||
5 | import { ACTOR_IMAGES_SIZE } from '@server/initializers/constants.js' | ||
6 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
7 | import { updateActorImages } from '@server/lib/activitypub/actors/index.js' | ||
8 | import { sendUpdateActor } from '@server/lib/activitypub/send/index.js' | ||
9 | import { getBiggestActorImage } from '@server/lib/actor-image.js' | ||
10 | import { JobQueue } from '@server/lib/job-queue/index.js' | ||
11 | import { AccountModel } from '@server/models/account/account.js' | ||
12 | import { ActorModel } from '@server/models/actor/actor.js' | ||
13 | import { VideoChannelModel } from '@server/models/video/video-channel.js' | ||
14 | import { MAccountDefault, MActorDefault, MChannelDefault } from '@server/types/models/index.js' | ||
15 | import minBy from 'lodash-es/minBy.js' | ||
16 | import { join } from 'path' | ||
17 | |||
18 | run() | ||
19 | .then(() => process.exit(0)) | ||
20 | .catch(err => { | ||
21 | console.error(err) | ||
22 | process.exit(-1) | ||
23 | }) | ||
24 | |||
25 | async function run () { | ||
26 | console.log('Generate avatar miniatures from existing avatars.') | ||
27 | |||
28 | await initDatabaseModels(true) | ||
29 | JobQueue.Instance.init() | ||
30 | |||
31 | const accounts: AccountModel[] = await AccountModel.findAll({ | ||
32 | include: [ | ||
33 | { | ||
34 | model: ActorModel, | ||
35 | required: true, | ||
36 | where: { | ||
37 | serverId: null | ||
38 | } | ||
39 | }, | ||
40 | { | ||
41 | model: VideoChannelModel, | ||
42 | include: [ | ||
43 | { | ||
44 | model: AccountModel | ||
45 | } | ||
46 | ] | ||
47 | } | ||
48 | ] | ||
49 | }) | ||
50 | |||
51 | for (const account of accounts) { | ||
52 | try { | ||
53 | await fillAvatarSizeIfNeeded(account) | ||
54 | await generateSmallerAvatarIfNeeded(account) | ||
55 | } catch (err) { | ||
56 | console.error(`Cannot process account avatar ${account.name}`, err) | ||
57 | } | ||
58 | |||
59 | for (const videoChannel of account.VideoChannels) { | ||
60 | try { | ||
61 | await fillAvatarSizeIfNeeded(videoChannel) | ||
62 | await generateSmallerAvatarIfNeeded(videoChannel) | ||
63 | } catch (err) { | ||
64 | console.error(`Cannot process channel avatar ${videoChannel.name}`, err) | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | |||
69 | console.log('Generation finished!') | ||
70 | } | ||
71 | |||
72 | async function fillAvatarSizeIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) { | ||
73 | const avatars = accountOrChannel.Actor.Avatars | ||
74 | |||
75 | for (const avatar of avatars) { | ||
76 | if (avatar.width && avatar.height) continue | ||
77 | |||
78 | console.log('Filling size of avatars of %s.', accountOrChannel.name) | ||
79 | |||
80 | const { width, height } = await getImageSize(join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, avatar.filename)) | ||
81 | avatar.width = width | ||
82 | avatar.height = height | ||
83 | |||
84 | await avatar.save() | ||
85 | } | ||
86 | } | ||
87 | |||
88 | async function generateSmallerAvatarIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) { | ||
89 | const avatars = accountOrChannel.Actor.Avatars | ||
90 | if (avatars.length !== 1) { | ||
91 | return | ||
92 | } | ||
93 | |||
94 | console.log(`Processing ${accountOrChannel.name}.`) | ||
95 | |||
96 | await generateSmallerAvatar(accountOrChannel.Actor) | ||
97 | accountOrChannel.Actor = Object.assign(accountOrChannel.Actor, { Server: null }) | ||
98 | |||
99 | return sendUpdateActor(accountOrChannel, undefined) | ||
100 | } | ||
101 | |||
102 | async function generateSmallerAvatar (actor: MActorDefault) { | ||
103 | const bigAvatar = getBiggestActorImage(actor.Avatars) | ||
104 | |||
105 | const imageSize = minBy(ACTOR_IMAGES_SIZE[ActorImageType.AVATAR], 'width') | ||
106 | const sourceFilename = bigAvatar.filename | ||
107 | |||
108 | const newImageName = buildUUID() + getLowercaseExtension(sourceFilename) | ||
109 | const source = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, sourceFilename) | ||
110 | const destination = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, newImageName) | ||
111 | |||
112 | await processImage({ path: source, destination, newSize: imageSize, keepOriginal: true }) | ||
113 | |||
114 | const actorImageInfo = { | ||
115 | name: newImageName, | ||
116 | fileUrl: null, | ||
117 | height: imageSize.height, | ||
118 | width: imageSize.width, | ||
119 | onDisk: true | ||
120 | } | ||
121 | |||
122 | await updateActorImages(actor, ActorImageType.AVATAR, [ actorImageInfo ], undefined) | ||
123 | } | ||
diff --git a/server/scripts/migrations/peertube-5.0.ts b/server/scripts/migrations/peertube-5.0.ts new file mode 100644 index 000000000..6139abd08 --- /dev/null +++ b/server/scripts/migrations/peertube-5.0.ts | |||
@@ -0,0 +1,71 @@ | |||
1 | import { ensureDir } from 'fs-extra/esm' | ||
2 | import { Op } from 'sequelize' | ||
3 | import { updateTorrentMetadata } from '@server/helpers/webtorrent.js' | ||
4 | import { DIRECTORIES } from '@server/initializers/constants.js' | ||
5 | import { moveFilesIfPrivacyChanged } from '@server/lib/video-privacy.js' | ||
6 | import { VideoModel } from '@server/models/video/video.js' | ||
7 | import { MVideoFullLight } from '@server/types/models/index.js' | ||
8 | import { VideoPrivacy } from '@peertube/peertube-models' | ||
9 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
10 | |||
11 | run() | ||
12 | .then(() => process.exit(0)) | ||
13 | .catch(err => { | ||
14 | console.error(err) | ||
15 | process.exit(-1) | ||
16 | }) | ||
17 | |||
18 | async function run () { | ||
19 | console.log('Moving private video files in dedicated folders.') | ||
20 | |||
21 | await ensureDir(DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE) | ||
22 | await ensureDir(DIRECTORIES.VIDEOS.PRIVATE) | ||
23 | |||
24 | await initDatabaseModels(true) | ||
25 | |||
26 | const videos = await VideoModel.unscoped().findAll({ | ||
27 | attributes: [ 'uuid' ], | ||
28 | where: { | ||
29 | privacy: { | ||
30 | [Op.in]: [ VideoPrivacy.PRIVATE, VideoPrivacy.INTERNAL ] | ||
31 | } | ||
32 | } | ||
33 | }) | ||
34 | |||
35 | for (const { uuid } of videos) { | ||
36 | try { | ||
37 | console.log('Moving files of video %s.', uuid) | ||
38 | |||
39 | const video = await VideoModel.loadFull(uuid) | ||
40 | |||
41 | try { | ||
42 | await moveFilesIfPrivacyChanged(video, VideoPrivacy.PUBLIC) | ||
43 | } catch (err) { | ||
44 | console.error('Cannot move files of video %s.', uuid, err) | ||
45 | } | ||
46 | |||
47 | try { | ||
48 | await updateTorrents(video) | ||
49 | } catch (err) { | ||
50 | console.error('Cannot regenerate torrents of video %s.', uuid, err) | ||
51 | } | ||
52 | } catch (err) { | ||
53 | console.error('Cannot process video %s.', uuid, err) | ||
54 | } | ||
55 | } | ||
56 | } | ||
57 | |||
58 | async function updateTorrents (video: MVideoFullLight) { | ||
59 | for (const file of video.VideoFiles) { | ||
60 | await updateTorrentMetadata(video, file) | ||
61 | |||
62 | await file.save() | ||
63 | } | ||
64 | |||
65 | const playlist = video.getHLSPlaylist() | ||
66 | for (const file of (playlist?.VideoFiles || [])) { | ||
67 | await updateTorrentMetadata(playlist, file) | ||
68 | |||
69 | await file.save() | ||
70 | } | ||
71 | } | ||
diff --git a/server/scripts/parse-log.ts b/server/scripts/parse-log.ts new file mode 100755 index 000000000..e80c0d927 --- /dev/null +++ b/server/scripts/parse-log.ts | |||
@@ -0,0 +1,161 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { createReadStream } from 'fs' | ||
3 | import { readdir } from 'fs/promises' | ||
4 | import { join } from 'path' | ||
5 | import { stdin } from 'process' | ||
6 | import { createInterface } from 'readline' | ||
7 | import { format as sqlFormat } from 'sql-formatter' | ||
8 | import { inspect } from 'util' | ||
9 | import * as winston from 'winston' | ||
10 | import { labelFormatter, mtimeSortFilesDesc } from '@server/helpers/logger.js' | ||
11 | import { CONFIG } from '@server/initializers/config.js' | ||
12 | |||
13 | program | ||
14 | .option('-l, --level [level]', 'Level log (debug/info/warn/error)') | ||
15 | .option('-f, --files [file...]', 'Files to parse. If not provided, the script will parse the latest log file from config)') | ||
16 | .option('-t, --tags [tags...]', 'Display only lines with these tags') | ||
17 | .option('-nt, --not-tags [tags...]', 'Donrt display lines containing these tags') | ||
18 | .parse(process.argv) | ||
19 | |||
20 | const options = program.opts() | ||
21 | |||
22 | const excludedKeys = { | ||
23 | level: true, | ||
24 | message: true, | ||
25 | splat: true, | ||
26 | timestamp: true, | ||
27 | tags: true, | ||
28 | label: true, | ||
29 | sql: true | ||
30 | } | ||
31 | function keysExcluder (key, value) { | ||
32 | return excludedKeys[key] === true ? undefined : value | ||
33 | } | ||
34 | |||
35 | const loggerFormat = winston.format.printf((info) => { | ||
36 | let additionalInfos = JSON.stringify(info, keysExcluder, 2) | ||
37 | if (additionalInfos === '{}') additionalInfos = '' | ||
38 | else additionalInfos = ' ' + additionalInfos | ||
39 | |||
40 | if (info.sql) { | ||
41 | if (CONFIG.LOG.PRETTIFY_SQL) { | ||
42 | additionalInfos += '\n' + sqlFormat(info.sql, { | ||
43 | language: 'sql', | ||
44 | tabWidth: 2 | ||
45 | }) | ||
46 | } else { | ||
47 | additionalInfos += ' - ' + info.sql | ||
48 | } | ||
49 | } | ||
50 | |||
51 | return `[${info.label}] ${toTimeFormat(info.timestamp)} ${info.level}: ${info.message}${additionalInfos}` | ||
52 | }) | ||
53 | |||
54 | const logger = winston.createLogger({ | ||
55 | transports: [ | ||
56 | new winston.transports.Console({ | ||
57 | level: options.level || 'debug', | ||
58 | stderrLevels: [], | ||
59 | format: winston.format.combine( | ||
60 | winston.format.splat(), | ||
61 | labelFormatter(), | ||
62 | winston.format.colorize(), | ||
63 | loggerFormat | ||
64 | ) | ||
65 | }) | ||
66 | ], | ||
67 | exitOnError: true | ||
68 | }) | ||
69 | |||
70 | const logLevels = { | ||
71 | error: logger.error.bind(logger), | ||
72 | warn: logger.warn.bind(logger), | ||
73 | info: logger.info.bind(logger), | ||
74 | debug: logger.debug.bind(logger) | ||
75 | } | ||
76 | |||
77 | run() | ||
78 | .then(() => process.exit(0)) | ||
79 | .catch(err => console.error(err)) | ||
80 | |||
81 | async function run () { | ||
82 | const files = await getFiles() | ||
83 | |||
84 | for (const file of files) { | ||
85 | if (file === 'peertube-audit.log') continue | ||
86 | |||
87 | await readFile(file) | ||
88 | } | ||
89 | } | ||
90 | |||
91 | function readFile (file: string) { | ||
92 | console.log('Opening %s.', file) | ||
93 | |||
94 | const stream = file === '-' ? stdin : createReadStream(file) | ||
95 | |||
96 | const rl = createInterface({ | ||
97 | input: stream | ||
98 | }) | ||
99 | |||
100 | return new Promise<void>(res => { | ||
101 | rl.on('line', line => { | ||
102 | try { | ||
103 | const log = JSON.parse(line) | ||
104 | if (options.tags && !containsTags(log.tags, options.tags)) { | ||
105 | return | ||
106 | } | ||
107 | |||
108 | if (options.notTags && containsTags(log.tags, options.notTags)) { | ||
109 | return | ||
110 | } | ||
111 | |||
112 | // Don't know why but loggerFormat does not remove splat key | ||
113 | Object.assign(log, { splat: undefined }) | ||
114 | |||
115 | logLevels[log.level](log) | ||
116 | } catch (err) { | ||
117 | console.error('Cannot parse line.', inspect(line)) | ||
118 | throw err | ||
119 | } | ||
120 | }) | ||
121 | |||
122 | stream.once('end', () => res()) | ||
123 | }) | ||
124 | } | ||
125 | |||
126 | // Thanks: https://stackoverflow.com/a/37014317 | ||
127 | async function getNewestFile (files: string[], basePath: string) { | ||
128 | const sorted = await mtimeSortFilesDesc(files, basePath) | ||
129 | |||
130 | return (sorted.length > 0) ? sorted[0].file : '' | ||
131 | } | ||
132 | |||
133 | async function getFiles () { | ||
134 | if (options.files) return options.files | ||
135 | |||
136 | const logFiles = await readdir(CONFIG.STORAGE.LOG_DIR) | ||
137 | |||
138 | const filename = await getNewestFile(logFiles, CONFIG.STORAGE.LOG_DIR) | ||
139 | return [ join(CONFIG.STORAGE.LOG_DIR, filename) ] | ||
140 | } | ||
141 | |||
142 | function toTimeFormat (time: string) { | ||
143 | const timestamp = Date.parse(time) | ||
144 | |||
145 | if (isNaN(timestamp) === true) return 'Unknown date' | ||
146 | |||
147 | const d = new Date(timestamp) | ||
148 | return d.toLocaleString() + `.${d.getMilliseconds()}` | ||
149 | } | ||
150 | |||
151 | function containsTags (loggerTags: string[], optionsTags: string[]) { | ||
152 | if (!loggerTags) return false | ||
153 | |||
154 | for (const lt of loggerTags) { | ||
155 | for (const ot of optionsTags) { | ||
156 | if (lt === ot) return true | ||
157 | } | ||
158 | } | ||
159 | |||
160 | return false | ||
161 | } | ||
diff --git a/server/scripts/plugin/install.ts b/server/scripts/plugin/install.ts new file mode 100755 index 000000000..3b13120ff --- /dev/null +++ b/server/scripts/plugin/install.ts | |||
@@ -0,0 +1,41 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { isAbsolute } from 'path' | ||
3 | import { initDatabaseModels } from '../../server/initializers/database.js' | ||
4 | import { PluginManager } from '../../server/lib/plugins/plugin-manager.js' | ||
5 | |||
6 | program | ||
7 | .option('-n, --npm-name [npmName]', 'Plugin to install') | ||
8 | .option('-v, --plugin-version [pluginVersion]', 'Plugin version to install') | ||
9 | .option('-p, --plugin-path [pluginPath]', 'Path of the plugin you want to install') | ||
10 | .parse(process.argv) | ||
11 | |||
12 | const options = program.opts() | ||
13 | |||
14 | if (!options.npmName && !options.pluginPath) { | ||
15 | console.error('You need to specify a plugin name with the desired version, or a plugin path.') | ||
16 | process.exit(-1) | ||
17 | } | ||
18 | |||
19 | if (options.pluginPath && !isAbsolute(options.pluginPath)) { | ||
20 | console.error('Plugin path should be absolute.') | ||
21 | process.exit(-1) | ||
22 | } | ||
23 | |||
24 | run() | ||
25 | .then(() => process.exit(0)) | ||
26 | .catch(err => { | ||
27 | console.error(err) | ||
28 | process.exit(-1) | ||
29 | }) | ||
30 | |||
31 | async function run () { | ||
32 | await initDatabaseModels(true) | ||
33 | |||
34 | const toInstall = options.npmName || options.pluginPath | ||
35 | await PluginManager.Instance.install({ | ||
36 | toInstall, | ||
37 | version: options.pluginVersion, | ||
38 | fromDisk: !!options.pluginPath, | ||
39 | register: false | ||
40 | }) | ||
41 | } | ||
diff --git a/server/scripts/plugin/uninstall.ts b/server/scripts/plugin/uninstall.ts new file mode 100755 index 000000000..baf0422c4 --- /dev/null +++ b/server/scripts/plugin/uninstall.ts | |||
@@ -0,0 +1,29 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
3 | import { PluginManager } from '@server/lib/plugins/plugin-manager.js' | ||
4 | |||
5 | program | ||
6 | .option('-n, --npm-name [npmName]', 'Package name to install') | ||
7 | .parse(process.argv) | ||
8 | |||
9 | const options = program.opts() | ||
10 | |||
11 | if (!options.npmName) { | ||
12 | console.error('You need to specify the plugin name.') | ||
13 | process.exit(-1) | ||
14 | } | ||
15 | |||
16 | run() | ||
17 | .then(() => process.exit(0)) | ||
18 | .catch(err => { | ||
19 | console.error(err) | ||
20 | process.exit(-1) | ||
21 | }) | ||
22 | |||
23 | async function run () { | ||
24 | |||
25 | await initDatabaseModels(true) | ||
26 | |||
27 | const toUninstall = options.npmName | ||
28 | await PluginManager.Instance.uninstall({ npmName: toUninstall, unregister: false }) | ||
29 | } | ||
diff --git a/server/scripts/prune-storage.ts b/server/scripts/prune-storage.ts new file mode 100755 index 000000000..9309724b9 --- /dev/null +++ b/server/scripts/prune-storage.ts | |||
@@ -0,0 +1,187 @@ | |||
1 | import Bluebird from 'bluebird' | ||
2 | import { remove } from 'fs-extra/esm' | ||
3 | import { readdir, stat } from 'fs/promises' | ||
4 | import { basename, join } from 'path' | ||
5 | import prompt from 'prompt' | ||
6 | import { uniqify } from '@peertube/peertube-core-utils' | ||
7 | import { ThumbnailType, ThumbnailType_Type } from '@peertube/peertube-models' | ||
8 | import { DIRECTORIES } from '@server/initializers/constants.js' | ||
9 | import { VideoFileModel } from '@server/models/video/video-file.js' | ||
10 | import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist.js' | ||
11 | import { getUUIDFromFilename } from '../server/helpers/utils.js' | ||
12 | import { CONFIG } from '../server/initializers/config.js' | ||
13 | import { initDatabaseModels } from '../server/initializers/database.js' | ||
14 | import { ActorImageModel } from '../server/models/actor/actor-image.js' | ||
15 | import { VideoRedundancyModel } from '../server/models/redundancy/video-redundancy.js' | ||
16 | import { ThumbnailModel } from '../server/models/video/thumbnail.js' | ||
17 | import { VideoModel } from '../server/models/video/video.js' | ||
18 | |||
19 | run() | ||
20 | .then(() => process.exit(0)) | ||
21 | .catch(err => { | ||
22 | console.error(err) | ||
23 | process.exit(-1) | ||
24 | }) | ||
25 | |||
26 | async function run () { | ||
27 | const dirs = Object.values(CONFIG.STORAGE) | ||
28 | |||
29 | if (uniqify(dirs).length !== dirs.length) { | ||
30 | console.error('Cannot prune storage because you put multiple storage keys in the same directory.') | ||
31 | process.exit(0) | ||
32 | } | ||
33 | |||
34 | await initDatabaseModels(true) | ||
35 | |||
36 | let toDelete: string[] = [] | ||
37 | |||
38 | console.log('Detecting files to remove, it could take a while...') | ||
39 | |||
40 | toDelete = toDelete.concat( | ||
41 | await pruneDirectory(DIRECTORIES.VIDEOS.PUBLIC, doesWebVideoFileExist()), | ||
42 | await pruneDirectory(DIRECTORIES.VIDEOS.PRIVATE, doesWebVideoFileExist()), | ||
43 | |||
44 | await pruneDirectory(DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE, doesHLSPlaylistExist()), | ||
45 | await pruneDirectory(DIRECTORIES.HLS_STREAMING_PLAYLIST.PUBLIC, doesHLSPlaylistExist()), | ||
46 | |||
47 | await pruneDirectory(CONFIG.STORAGE.TORRENTS_DIR, doesTorrentFileExist()), | ||
48 | |||
49 | await pruneDirectory(CONFIG.STORAGE.REDUNDANCY_DIR, doesRedundancyExist), | ||
50 | |||
51 | await pruneDirectory(CONFIG.STORAGE.PREVIEWS_DIR, doesThumbnailExist(true, ThumbnailType.PREVIEW)), | ||
52 | await pruneDirectory(CONFIG.STORAGE.THUMBNAILS_DIR, doesThumbnailExist(false, ThumbnailType.MINIATURE)), | ||
53 | |||
54 | await pruneDirectory(CONFIG.STORAGE.ACTOR_IMAGES_DIR, doesActorImageExist) | ||
55 | ) | ||
56 | |||
57 | const tmpFiles = await readdir(CONFIG.STORAGE.TMP_DIR) | ||
58 | toDelete = toDelete.concat(tmpFiles.map(t => join(CONFIG.STORAGE.TMP_DIR, t))) | ||
59 | |||
60 | if (toDelete.length === 0) { | ||
61 | console.log('No files to delete.') | ||
62 | return | ||
63 | } | ||
64 | |||
65 | console.log('Will delete %d files:\n\n%s\n\n', toDelete.length, toDelete.join('\n')) | ||
66 | |||
67 | const res = await askConfirmation() | ||
68 | if (res === true) { | ||
69 | console.log('Processing delete...\n') | ||
70 | |||
71 | for (const path of toDelete) { | ||
72 | await remove(path) | ||
73 | } | ||
74 | |||
75 | console.log('Done!') | ||
76 | } else { | ||
77 | console.log('Exiting without deleting files.') | ||
78 | } | ||
79 | } | ||
80 | |||
81 | type ExistFun = (file: string) => Promise<boolean> | boolean | ||
82 | async function pruneDirectory (directory: string, existFun: ExistFun) { | ||
83 | const files = await readdir(directory) | ||
84 | |||
85 | const toDelete: string[] = [] | ||
86 | await Bluebird.map(files, async file => { | ||
87 | const filePath = join(directory, file) | ||
88 | |||
89 | if (await existFun(filePath) !== true) { | ||
90 | toDelete.push(filePath) | ||
91 | } | ||
92 | }, { concurrency: 20 }) | ||
93 | |||
94 | return toDelete | ||
95 | } | ||
96 | |||
97 | function doesWebVideoFileExist () { | ||
98 | return (filePath: string) => { | ||
99 | // Don't delete private directory | ||
100 | if (filePath === DIRECTORIES.VIDEOS.PRIVATE) return true | ||
101 | |||
102 | return VideoFileModel.doesOwnedWebVideoFileExist(basename(filePath)) | ||
103 | } | ||
104 | } | ||
105 | |||
106 | function doesHLSPlaylistExist () { | ||
107 | return (hlsPath: string) => { | ||
108 | // Don't delete private directory | ||
109 | if (hlsPath === DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE) return true | ||
110 | |||
111 | return VideoStreamingPlaylistModel.doesOwnedHLSPlaylistExist(basename(hlsPath)) | ||
112 | } | ||
113 | } | ||
114 | |||
115 | function doesTorrentFileExist () { | ||
116 | return (filePath: string) => VideoFileModel.doesOwnedTorrentFileExist(basename(filePath)) | ||
117 | } | ||
118 | |||
119 | function doesThumbnailExist (keepOnlyOwned: boolean, type: ThumbnailType_Type) { | ||
120 | return async (filePath: string) => { | ||
121 | const thumbnail = await ThumbnailModel.loadByFilename(basename(filePath), type) | ||
122 | if (!thumbnail) return false | ||
123 | |||
124 | if (keepOnlyOwned) { | ||
125 | const video = await VideoModel.load(thumbnail.videoId) | ||
126 | if (video.isOwned() === false) return false | ||
127 | } | ||
128 | |||
129 | return true | ||
130 | } | ||
131 | } | ||
132 | |||
133 | async function doesActorImageExist (filePath: string) { | ||
134 | const image = await ActorImageModel.loadByName(basename(filePath)) | ||
135 | |||
136 | return !!image | ||
137 | } | ||
138 | |||
139 | async function doesRedundancyExist (filePath: string) { | ||
140 | const isPlaylist = (await stat(filePath)).isDirectory() | ||
141 | |||
142 | if (isPlaylist) { | ||
143 | // Don't delete HLS redundancy directory | ||
144 | if (filePath === DIRECTORIES.HLS_REDUNDANCY) return true | ||
145 | |||
146 | const uuid = getUUIDFromFilename(filePath) | ||
147 | const video = await VideoModel.loadWithFiles(uuid) | ||
148 | if (!video) return false | ||
149 | |||
150 | const p = video.getHLSPlaylist() | ||
151 | if (!p) return false | ||
152 | |||
153 | const redundancy = await VideoRedundancyModel.loadLocalByStreamingPlaylistId(p.id) | ||
154 | return !!redundancy | ||
155 | } | ||
156 | |||
157 | const file = await VideoFileModel.loadByFilename(basename(filePath)) | ||
158 | if (!file) return false | ||
159 | |||
160 | const redundancy = await VideoRedundancyModel.loadLocalByFileId(file.id) | ||
161 | return !!redundancy | ||
162 | } | ||
163 | |||
164 | async function askConfirmation () { | ||
165 | return new Promise((res, rej) => { | ||
166 | prompt.start() | ||
167 | |||
168 | const schema = { | ||
169 | properties: { | ||
170 | confirm: { | ||
171 | type: 'string', | ||
172 | description: 'These following unused files can be deleted, but please check your backups first (bugs happen).' + | ||
173 | ' Notice PeerTube must have been stopped when your ran this script.' + | ||
174 | ' Can we delete these files?', | ||
175 | default: 'n', | ||
176 | required: true | ||
177 | } | ||
178 | } | ||
179 | } | ||
180 | |||
181 | prompt.get(schema, function (err, result) { | ||
182 | if (err) return rej(err) | ||
183 | |||
184 | return res(result.confirm?.match(/y/) !== null) | ||
185 | }) | ||
186 | }) | ||
187 | } | ||
diff --git a/server/scripts/regenerate-thumbnails.ts b/server/scripts/regenerate-thumbnails.ts new file mode 100644 index 000000000..d4346ce40 --- /dev/null +++ b/server/scripts/regenerate-thumbnails.ts | |||
@@ -0,0 +1,64 @@ | |||
1 | import Bluebird from 'bluebird' | ||
2 | import { program } from 'commander' | ||
3 | import { pathExists, remove } from 'fs-extra/esm' | ||
4 | import { generateImageFilename, processImage } from '@server/helpers/image-utils.js' | ||
5 | import { THUMBNAILS_SIZE } from '@server/initializers/constants.js' | ||
6 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
7 | import { VideoModel } from '@server/models/video/video.js' | ||
8 | |||
9 | program | ||
10 | .description('Regenerate local thumbnails using preview files') | ||
11 | .parse(process.argv) | ||
12 | |||
13 | run() | ||
14 | .then(() => process.exit(0)) | ||
15 | .catch(err => console.error(err)) | ||
16 | |||
17 | async function run () { | ||
18 | await initDatabaseModels(true) | ||
19 | |||
20 | const ids = await VideoModel.listLocalIds() | ||
21 | |||
22 | await Bluebird.map(ids, id => { | ||
23 | return processVideo(id) | ||
24 | .catch(err => console.error('Cannot process video %d.', id, err)) | ||
25 | }, { concurrency: 20 }) | ||
26 | } | ||
27 | |||
28 | async function processVideo (id: number) { | ||
29 | const video = await VideoModel.loadWithFiles(id) | ||
30 | |||
31 | console.log('Processing video %s.', video.name) | ||
32 | |||
33 | const thumbnail = video.getMiniature() | ||
34 | const preview = video.getPreview() | ||
35 | |||
36 | const previewPath = preview.getPath() | ||
37 | |||
38 | if (!await pathExists(previewPath)) { | ||
39 | throw new Error(`Preview ${previewPath} does not exist on disk`) | ||
40 | } | ||
41 | |||
42 | const size = { | ||
43 | width: THUMBNAILS_SIZE.width, | ||
44 | height: THUMBNAILS_SIZE.height | ||
45 | } | ||
46 | |||
47 | const oldPath = thumbnail.getPath() | ||
48 | |||
49 | // Update thumbnail | ||
50 | thumbnail.filename = generateImageFilename() | ||
51 | thumbnail.width = size.width | ||
52 | thumbnail.height = size.height | ||
53 | |||
54 | const thumbnailPath = thumbnail.getPath() | ||
55 | await processImage({ path: previewPath, destination: thumbnailPath, newSize: size, keepOriginal: true }) | ||
56 | |||
57 | // Save new attributes | ||
58 | await thumbnail.save() | ||
59 | |||
60 | // Remove old thumbnail | ||
61 | await remove(oldPath) | ||
62 | |||
63 | // Don't federate, remote instances will refresh the thumbnails after a while | ||
64 | } | ||
diff --git a/server/scripts/reset-password.ts b/server/scripts/reset-password.ts new file mode 100755 index 000000000..96e301ba9 --- /dev/null +++ b/server/scripts/reset-password.ts | |||
@@ -0,0 +1,58 @@ | |||
1 | import { program } from 'commander' | ||
2 | import readline from 'readline' | ||
3 | import { Writable } from 'stream' | ||
4 | import { isUserPasswordValid } from '@server/helpers/custom-validators/users.js' | ||
5 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
6 | import { UserModel } from '@server/models/user/user.js' | ||
7 | |||
8 | program | ||
9 | .option('-u, --user [user]', 'User') | ||
10 | .parse(process.argv) | ||
11 | |||
12 | const options = program.opts() | ||
13 | |||
14 | if (options.user === undefined) { | ||
15 | console.error('All parameters are mandatory.') | ||
16 | process.exit(-1) | ||
17 | } | ||
18 | |||
19 | initDatabaseModels(true) | ||
20 | .then(() => { | ||
21 | return UserModel.loadByUsername(options.user) | ||
22 | }) | ||
23 | .then(user => { | ||
24 | if (!user) { | ||
25 | console.error('Unknown user.') | ||
26 | process.exit(-1) | ||
27 | } | ||
28 | |||
29 | const mutableStdout = new Writable({ | ||
30 | write: function (_chunk, _encoding, callback) { | ||
31 | callback() | ||
32 | } | ||
33 | }) | ||
34 | const rl = readline.createInterface({ | ||
35 | input: process.stdin, | ||
36 | output: mutableStdout, | ||
37 | terminal: true | ||
38 | }) | ||
39 | |||
40 | console.log('New password?') | ||
41 | rl.on('line', function (password) { | ||
42 | if (!isUserPasswordValid(password)) { | ||
43 | console.error('New password is invalid.') | ||
44 | process.exit(-1) | ||
45 | } | ||
46 | |||
47 | user.password = password | ||
48 | |||
49 | user.save() | ||
50 | .then(() => console.log('User password updated.')) | ||
51 | .catch(err => console.error(err)) | ||
52 | .finally(() => process.exit(0)) | ||
53 | }) | ||
54 | }) | ||
55 | .catch(err => { | ||
56 | console.error(err) | ||
57 | process.exit(-1) | ||
58 | }) | ||
diff --git a/server/scripts/update-host.ts b/server/scripts/update-host.ts new file mode 100755 index 000000000..52ac4947a --- /dev/null +++ b/server/scripts/update-host.ts | |||
@@ -0,0 +1,140 @@ | |||
1 | import { updateTorrentMetadata } from '@server/helpers/webtorrent.js' | ||
2 | import { getServerActor } from '@server/models/application/application.js' | ||
3 | import { WEBSERVER } from '@server/initializers/constants.js' | ||
4 | import { initDatabaseModels } from '@server/initializers/database.js' | ||
5 | import { | ||
6 | getLocalAccountActivityPubUrl, | ||
7 | getLocalVideoActivityPubUrl, | ||
8 | getLocalVideoAnnounceActivityPubUrl, | ||
9 | getLocalVideoChannelActivityPubUrl, | ||
10 | getLocalVideoCommentActivityPubUrl | ||
11 | } from '@server/lib/activitypub/url.js' | ||
12 | import { AccountModel } from '@server/models/account/account.js' | ||
13 | import { ActorFollowModel } from '@server/models/actor/actor-follow.js' | ||
14 | import { ActorModel } from '@server/models/actor/actor.js' | ||
15 | import { VideoChannelModel } from '@server/models/video/video-channel.js' | ||
16 | import { VideoCommentModel } from '@server/models/video/video-comment.js' | ||
17 | import { VideoShareModel } from '@server/models/video/video-share.js' | ||
18 | import { VideoModel } from '@server/models/video/video.js' | ||
19 | |||
20 | run() | ||
21 | .then(() => process.exit(0)) | ||
22 | .catch(err => { | ||
23 | console.error(err) | ||
24 | process.exit(-1) | ||
25 | }) | ||
26 | |||
27 | async function run () { | ||
28 | await initDatabaseModels(true) | ||
29 | |||
30 | const serverAccount = await getServerActor() | ||
31 | |||
32 | { | ||
33 | const res = await ActorFollowModel.listAcceptedFollowingUrlsForApi([ serverAccount.id ], undefined) | ||
34 | const hasFollowing = res.total > 0 | ||
35 | |||
36 | if (hasFollowing === true) { | ||
37 | throw new Error('Cannot update host because you follow other servers!') | ||
38 | } | ||
39 | } | ||
40 | |||
41 | console.log('Updating actors.') | ||
42 | |||
43 | const actors: ActorModel[] = await ActorModel.unscoped().findAll({ | ||
44 | include: [ | ||
45 | { | ||
46 | model: VideoChannelModel.unscoped(), | ||
47 | required: false | ||
48 | }, | ||
49 | { | ||
50 | model: AccountModel.unscoped(), | ||
51 | required: false | ||
52 | } | ||
53 | ] | ||
54 | }) | ||
55 | for (const actor of actors) { | ||
56 | if (actor.isOwned() === false) continue | ||
57 | |||
58 | console.log('Updating actor ' + actor.url) | ||
59 | |||
60 | const newUrl = actor.Account | ||
61 | ? getLocalAccountActivityPubUrl(actor.preferredUsername) | ||
62 | : getLocalVideoChannelActivityPubUrl(actor.preferredUsername) | ||
63 | |||
64 | actor.url = newUrl | ||
65 | actor.inboxUrl = newUrl + '/inbox' | ||
66 | actor.outboxUrl = newUrl + '/outbox' | ||
67 | actor.sharedInboxUrl = WEBSERVER.URL + '/inbox' | ||
68 | actor.followersUrl = newUrl + '/followers' | ||
69 | actor.followingUrl = newUrl + '/following' | ||
70 | |||
71 | await actor.save() | ||
72 | } | ||
73 | |||
74 | console.log('Updating video shares.') | ||
75 | |||
76 | const videoShares: VideoShareModel[] = await VideoShareModel.findAll({ | ||
77 | include: [ VideoModel.unscoped(), ActorModel.unscoped() ] | ||
78 | }) | ||
79 | for (const videoShare of videoShares) { | ||
80 | if (videoShare.Video.isOwned() === false) continue | ||
81 | |||
82 | console.log('Updating video share ' + videoShare.url) | ||
83 | |||
84 | videoShare.url = getLocalVideoAnnounceActivityPubUrl(videoShare.Actor, videoShare.Video) | ||
85 | await videoShare.save() | ||
86 | } | ||
87 | |||
88 | console.log('Updating video comments.') | ||
89 | const videoComments: VideoCommentModel[] = await VideoCommentModel.findAll({ | ||
90 | include: [ | ||
91 | { | ||
92 | model: VideoModel.unscoped() | ||
93 | }, | ||
94 | { | ||
95 | model: AccountModel.unscoped(), | ||
96 | include: [ | ||
97 | { | ||
98 | model: ActorModel.unscoped() | ||
99 | } | ||
100 | ] | ||
101 | } | ||
102 | ] | ||
103 | }) | ||
104 | for (const comment of videoComments) { | ||
105 | if (comment.isOwned() === false) continue | ||
106 | |||
107 | console.log('Updating comment ' + comment.url) | ||
108 | |||
109 | comment.url = getLocalVideoCommentActivityPubUrl(comment.Video, comment) | ||
110 | await comment.save() | ||
111 | } | ||
112 | |||
113 | console.log('Updating video and torrent files.') | ||
114 | |||
115 | const ids = await VideoModel.listLocalIds() | ||
116 | for (const id of ids) { | ||
117 | const video = await VideoModel.loadFull(id) | ||
118 | |||
119 | console.log('Updating video ' + video.uuid) | ||
120 | |||
121 | video.url = getLocalVideoActivityPubUrl(video) | ||
122 | await video.save() | ||
123 | |||
124 | for (const file of video.VideoFiles) { | ||
125 | console.log('Updating torrent file %s of video %s.', file.resolution, video.uuid) | ||
126 | await updateTorrentMetadata(video, file) | ||
127 | |||
128 | await file.save() | ||
129 | } | ||
130 | |||
131 | const playlist = video.getHLSPlaylist() | ||
132 | for (const file of (playlist?.VideoFiles || [])) { | ||
133 | console.log('Updating fragmented torrent file %s of video %s.', file.resolution, video.uuid) | ||
134 | |||
135 | await updateTorrentMetadata(playlist, file) | ||
136 | |||
137 | await file.save() | ||
138 | } | ||
139 | } | ||
140 | } | ||
diff --git a/server/scripts/upgrade.sh b/server/scripts/upgrade.sh new file mode 100755 index 000000000..64c7e1581 --- /dev/null +++ b/server/scripts/upgrade.sh | |||
@@ -0,0 +1,108 @@ | |||
1 | #!/bin/sh | ||
2 | |||
3 | set -eu | ||
4 | |||
5 | PEERTUBE_PATH=${1:-/var/www/peertube} | ||
6 | |||
7 | if [ ! -e "$PEERTUBE_PATH" ]; then | ||
8 | echo "Error - path \"$PEERTUBE_PATH\" wasn't found" | ||
9 | echo "" | ||
10 | echo "If peertube was installed in another path, you can specify it with" | ||
11 | echo " ./upgrade.sh <PATH>" | ||
12 | exit 1 | ||
13 | fi | ||
14 | |||
15 | if [ ! -e "$PEERTUBE_PATH/versions" -o ! -e "$PEERTUBE_PATH/config/production.yaml" ]; then | ||
16 | echo "Error - Couldn't find peertube installation in \"$PEERTUBE_PATH\"" | ||
17 | echo "" | ||
18 | echo "If peertube was installed in another path, you can specify it with" | ||
19 | echo " ./upgrade.sh <PATH>" | ||
20 | exit 1 | ||
21 | fi | ||
22 | |||
23 | if [ -x "$(command -v awk)" ] && [ -x "$(command -v sed)" ]; then | ||
24 | REMAINING=$(df -k $PEERTUBE_PATH | awk '{ print $4}' | sed -n 2p) | ||
25 | ONE_GB=$((1024 * 1024)) | ||
26 | |||
27 | if [ "$REMAINING" -lt "$ONE_GB" ]; then | ||
28 | echo "Error - not enough free space for upgrading" | ||
29 | echo "" | ||
30 | echo "Make sure you have at least 1 GB of free space in $PEERTUBE_PATH" | ||
31 | exit 1 | ||
32 | fi | ||
33 | fi | ||
34 | |||
35 | # Backup database | ||
36 | if [ -x "$(command -v pg_dump)" ]; then | ||
37 | mkdir -p $PEERTUBE_PATH/backup | ||
38 | |||
39 | SQL_BACKUP_PATH="$PEERTUBE_PATH/backup/sql-peertube_prod-$(date +"%Y%m%d-%H%M").bak" | ||
40 | |||
41 | echo "Backing up PostgreSQL database in $SQL_BACKUP_PATH" | ||
42 | |||
43 | DB_USER=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['username'])") | ||
44 | DB_PASS=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['password'])") | ||
45 | DB_HOST=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['hostname'])") | ||
46 | DB_PORT=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['port'])") | ||
47 | DB_SUFFIX=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['suffix'])") | ||
48 | DB_NAME=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['name'] || '')") | ||
49 | |||
50 | PGPASSWORD=$DB_PASS pg_dump -U $DB_USER -p $DB_PORT -h $DB_HOST -F c "${DB_NAME:-peertube${DB_SUFFIX}}" -f "$SQL_BACKUP_PATH" | ||
51 | else | ||
52 | echo "pg_dump not found. Cannot make a SQL backup!" | ||
53 | fi | ||
54 | |||
55 | # If there is a pre-release, give the user a choice which one to install. | ||
56 | RELEASE_VERSION=$(curl -s https://api.github.com/repos/chocobozzz/peertube/releases/latest | grep tag_name | cut -d '"' -f 4) | ||
57 | PRE_RELEASE_VERSION=$(curl -s https://api.github.com/repos/chocobozzz/peertube/releases | grep tag_name | head -1 | cut -d '"' -f 4) | ||
58 | |||
59 | if [ "$RELEASE_VERSION" != "$PRE_RELEASE_VERSION" ]; then | ||
60 | echo -e "Which version do you want to install?\n[1] $RELEASE_VERSION (stable) \n[2] $PRE_RELEASE_VERSION (pre-release)" | ||
61 | read choice | ||
62 | case $choice in | ||
63 | [1]* ) VERSION="$RELEASE_VERSION";; | ||
64 | [2]* ) VERSION="$PRE_RELEASE_VERSION";; | ||
65 | * ) exit; | ||
66 | esac | ||
67 | else | ||
68 | VERSION="$RELEASE_VERSION" | ||
69 | fi | ||
70 | |||
71 | echo "Installing Peertube version $VERSION" | ||
72 | wget -q "https://github.com/Chocobozzz/PeerTube/releases/download/${VERSION}/peertube-${VERSION}.zip" -O "$PEERTUBE_PATH/versions/peertube-${VERSION}.zip" | ||
73 | cd $PEERTUBE_PATH/versions | ||
74 | unzip -o "peertube-${VERSION}.zip" | ||
75 | rm -f "peertube-${VERSION}.zip" | ||
76 | |||
77 | RELEASE_PAGE_URL="https://github.com/Chocobozzz/PeerTube/releases/tag/${VERSION}" | ||
78 | LATEST_VERSION_DIRECTORY="$PEERTUBE_PATH/versions/peertube-${VERSION}" | ||
79 | cd "$LATEST_VERSION_DIRECTORY" | ||
80 | |||
81 | # Launch yarn to check if we have all required dependencies | ||
82 | NOCLIENT=1 yarn install --production --pure-lockfile | ||
83 | |||
84 | # Switch to latest code version | ||
85 | rm -rf $PEERTUBE_PATH/peertube-latest | ||
86 | ln -s "$LATEST_VERSION_DIRECTORY" $PEERTUBE_PATH/peertube-latest | ||
87 | cp $PEERTUBE_PATH/peertube-latest/config/default.yaml $PEERTUBE_PATH/config/default.yaml | ||
88 | |||
89 | echo "" | ||
90 | echo "==========================================================" | ||
91 | echo "" | ||
92 | |||
93 | if [ -x "$(command -v git)" ]; then | ||
94 | cd /var/www/peertube | ||
95 | |||
96 | git merge-file -p config/production.yaml "$LATEST_VERSION_DIRECTORY/config/production.yaml.example" "peertube-latest/config/production.yaml.example" | tee "config/production.yaml.new" > /dev/null | ||
97 | echo "/var/www/peertube/config/production.yaml.new generated" | ||
98 | echo "You can review it and replace your existing production.yaml configuration" | ||
99 | else | ||
100 | echo "git command not found: unable to generate config/production.yaml.new configuration file based on your existing production.yaml configuration" | ||
101 | fi | ||
102 | |||
103 | echo "" | ||
104 | echo "==========================================================" | ||
105 | echo "" | ||
106 | echo "Please read the IMPORTANT NOTES on $RELEASE_PAGE_URL" | ||
107 | echo "" | ||
108 | echo "Then restart PeerTube!" | ||