diff options
Diffstat (limited to 'scripts')
32 files changed, 158 insertions, 1421 deletions
diff --git a/scripts/benchmark.ts b/scripts/benchmark.ts index 92fbd5490..c4e4c7275 100644 --- a/scripts/benchmark.ts +++ b/scripts/benchmark.ts | |||
@@ -1,8 +1,14 @@ | |||
1 | import autocannon, { printResult } from 'autocannon' | 1 | import autocannon, { printResult } from 'autocannon' |
2 | import { program } from 'commander' | 2 | import { program } from 'commander' |
3 | import { writeJson } from 'fs-extra' | 3 | import { writeJson } from 'fs-extra/esm' |
4 | import { Video, VideoPrivacy } from '@shared/models' | 4 | import { Video, VideoPrivacy } from '@peertube/peertube-models' |
5 | import { createMultipleServers, doubleFollow, killallServers, PeerTubeServer, setAccessTokensToServers } from '@shared/server-commands' | 5 | import { |
6 | createMultipleServers, | ||
7 | doubleFollow, | ||
8 | killallServers, | ||
9 | PeerTubeServer, | ||
10 | setAccessTokensToServers | ||
11 | } from '@peertube/peertube-server-commands' | ||
6 | 12 | ||
7 | let servers: PeerTubeServer[] | 13 | let servers: PeerTubeServer[] |
8 | // First server | 14 | // First server |
diff --git a/scripts/build/peertube-cli.sh b/scripts/build/peertube-cli.sh new file mode 100644 index 000000000..51886a0db --- /dev/null +++ b/scripts/build/peertube-cli.sh | |||
@@ -0,0 +1,12 @@ | |||
1 | #!/bin/bash | ||
2 | |||
3 | set -eu | ||
4 | |||
5 | cd ./apps/peertube-cli | ||
6 | rm -rf ./dist | ||
7 | |||
8 | ../../node_modules/.bin/tsc -b --verbose | ||
9 | rm -rf ./dist | ||
10 | mkdir ./dist | ||
11 | |||
12 | node ./scripts/build.js | ||
diff --git a/scripts/build/peertube-runner.sh b/scripts/build/peertube-runner.sh index 7f6ad5ede..321d10b4b 100755 --- a/scripts/build/peertube-runner.sh +++ b/scripts/build/peertube-runner.sh | |||
@@ -2,12 +2,11 @@ | |||
2 | 2 | ||
3 | set -eu | 3 | set -eu |
4 | 4 | ||
5 | 5 | cd ./apps/peertube-runner | |
6 | cd ./packages/peertube-runner | ||
7 | rm -rf ./dist | 6 | rm -rf ./dist |
8 | 7 | ||
9 | ../../node_modules/.bin/tsc -b --verbose | 8 | ../../node_modules/.bin/tsc -b --verbose |
10 | rm -rf ./dist | 9 | rm -rf ./dist |
11 | mkdir ./dist | 10 | mkdir ./dist |
12 | 11 | ||
13 | ./node_modules/.bin/esbuild ./peertube-runner.ts --bundle --platform=node --target=node16 --external:"./lib-cov/fluent-ffmpeg" --external:pg-hstore --outfile=dist/peertube-runner.js | 12 | node ./scripts/build.js |
diff --git a/scripts/build/server.sh b/scripts/build/server.sh index a2dfc3dd9..bbab633a2 100755 --- a/scripts/build/server.sh +++ b/scripts/build/server.sh | |||
@@ -2,10 +2,11 @@ | |||
2 | 2 | ||
3 | set -eu | 3 | set -eu |
4 | 4 | ||
5 | rm -rf ./dist | 5 | rm -rf ./dist ./packages/*/dist |
6 | 6 | ||
7 | npm run tsc -- -b --verbose | 7 | npm run tsc -- -b --verbose server/tsconfig.json |
8 | npm run resolve-tspaths:server | 8 | npm run resolve-tspaths:server |
9 | 9 | ||
10 | cp -r "./server/static" "./server/assets" "./dist/server" | 10 | cp -r "./server/server/static" "./server/server/assets" ./dist/server |
11 | cp -r "./server/lib/emails" "./dist/server/lib" | 11 | cp -r "./server/server/lib/emails" "./dist/server/lib" |
12 | cp "./server/scripts/upgrade.sh" "./dist/scripts" | ||
diff --git a/scripts/build/tests.sh b/scripts/build/tests.sh new file mode 100755 index 000000000..f94dde2a4 --- /dev/null +++ b/scripts/build/tests.sh | |||
@@ -0,0 +1,9 @@ | |||
1 | #!/bin/bash | ||
2 | |||
3 | set -eu | ||
4 | |||
5 | rm -rf ./packages/tests/dist | ||
6 | |||
7 | npm run tsc -- -b --verbose ./packages/tests/tsconfig.json | ||
8 | npm run resolve-tspaths:server-lib | ||
9 | npm run resolve-tspaths:tests | ||
diff --git a/scripts/ci.sh b/scripts/ci.sh index 9fb67f634..64968cf94 100755 --- a/scripts/ci.sh +++ b/scripts/ci.sh | |||
@@ -10,7 +10,7 @@ fi | |||
10 | retries=3 | 10 | retries=3 |
11 | speedFactor="${2:-1}" | 11 | speedFactor="${2:-1}" |
12 | 12 | ||
13 | runTest () { | 13 | runJSTest () { |
14 | jobname=$1 | 14 | jobname=$1 |
15 | shift | 15 | shift |
16 | 16 | ||
@@ -24,7 +24,7 @@ runTest () { | |||
24 | joblog="$jobname-ci.log" | 24 | joblog="$jobname-ci.log" |
25 | 25 | ||
26 | parallel -j $jobs --retries $retries \ | 26 | parallel -j $jobs --retries $retries \ |
27 | "echo Trying {} >> $joblog; npm run mocha -- -c --timeout 30000 --exit --bail {}" \ | 27 | "echo Trying {} >> $joblog; npm run mocha -- --timeout 30000 --no-config -c --exit --bail {}" \ |
28 | ::: $files | 28 | ::: $files |
29 | 29 | ||
30 | cat "$joblog" | sort | uniq -c | 30 | cat "$joblog" | sort | uniq -c |
@@ -32,92 +32,116 @@ runTest () { | |||
32 | } | 32 | } |
33 | 33 | ||
34 | findTestFiles () { | 34 | findTestFiles () { |
35 | exception="-not -name index.js" | 35 | exception="-not -name index.js -not -name index.ts -not -name *.d.ts" |
36 | 36 | ||
37 | if [ ! -z ${2+x} ]; then | 37 | if [ ! -z ${2+x} ]; then |
38 | exception="$exception -not -name $2" | 38 | exception="$exception -not -name $2" |
39 | fi | 39 | fi |
40 | 40 | ||
41 | find $1 -type f -name "*.js" $exception | xargs echo | 41 | find $1 -type f \( -name "*.js" -o -name "*.ts" \) $exception | xargs echo |
42 | } | 42 | } |
43 | 43 | ||
44 | if [ "$1" = "types-package" ]; then | 44 | if [ "$1" = "types-package" ]; then |
45 | npm run generate-types-package 0.0.0 | 45 | npm run generate-types-package 0.0.0 |
46 | npm run tsc -- --noEmit --esModuleInterop packages/types/tests/test.ts | 46 | |
47 | # Test on in independent directory | ||
48 | rm -fr /tmp/types-generator | ||
49 | mkdir -p /tmp/types-generator | ||
50 | cp -r packages/types-generator/tests /tmp/types-generator/tests | ||
51 | cp -r packages/types-generator/dist /tmp/types-generator/dist | ||
52 | (cd /tmp/types-generator/dist && npm install) | ||
53 | |||
54 | npm run tsc -- --noEmit --esModuleInterop --moduleResolution node16 /tmp/types-generator/tests/test.ts | ||
55 | rm -r /tmp/types-generator | ||
47 | elif [ "$1" = "client" ]; then | 56 | elif [ "$1" = "client" ]; then |
48 | npm run build | 57 | npm run build |
58 | npm run build:tests | ||
49 | 59 | ||
50 | feedsFiles=$(findTestFiles ./dist/server/tests/feeds) | 60 | feedsFiles=$(findTestFiles ./packages/tests/dist/feeds) |
51 | helperFiles=$(findTestFiles ./dist/server/tests/helpers) | 61 | miscFiles="./packages/tests/dist/client.js ./packages/tests/dist/misc-endpoints.js" |
52 | libFiles=$(findTestFiles ./dist/server/tests/lib) | ||
53 | miscFiles="./dist/server/tests/client.js ./dist/server/tests/misc-endpoints.js" | ||
54 | # Not in their own task, they need an index.html | 62 | # Not in their own task, they need an index.html |
55 | pluginFiles="./dist/server/tests/plugins/html-injection.js ./dist/server/tests/api/server/plugins.js" | 63 | pluginFiles="./packages/tests/dist/plugins/html-injection.js ./packages/tests/dist/api/server/plugins.js" |
64 | |||
65 | MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $feedsFiles $miscFiles $pluginFiles | ||
56 | 66 | ||
57 | MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $feedsFiles $helperFiles $miscFiles $pluginFiles $libFiles | 67 | # Use TS tests directly because we import server files |
68 | helperFiles=$(findTestFiles ./packages/tests/src/server-helpers) | ||
69 | libFiles=$(findTestFiles ./packages/tests/src/server-lib) | ||
70 | |||
71 | npm run mocha -- --timeout 30000 -c --exit --bail $libFiles $helperFiles | ||
58 | elif [ "$1" = "cli-plugin" ]; then | 72 | elif [ "$1" = "cli-plugin" ]; then |
59 | # Simulate HTML | 73 | # Simulate HTML |
60 | mkdir -p "./client/dist/en-US/" | 74 | mkdir -p "./client/dist/en-US/" |
61 | cp "./client/src/index.html" "./client/dist/en-US/index.html" | 75 | cp "./client/src/index.html" "./client/dist/en-US/index.html" |
62 | 76 | ||
63 | npm run build:server | 77 | npm run build:server |
64 | npm run setup:cli | 78 | npm run build:tests |
79 | npm run build:peertube-cli | ||
65 | 80 | ||
66 | pluginsFiles=$(findTestFiles ./dist/server/tests/plugins html-injection.js) | 81 | # html-injection test needs an HTML file |
67 | cliFiles=$(findTestFiles ./dist/server/tests/cli) | 82 | pluginsFiles=$(findTestFiles ./packages/tests/dist/plugins html-injection.js) |
83 | cliFiles=$(findTestFiles ./packages/tests/dist/cli) | ||
68 | 84 | ||
69 | MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $pluginsFiles | 85 | MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $pluginsFiles |
70 | runTest "$1" 1 $cliFiles | 86 | runJSTest "$1" 1 $cliFiles |
71 | elif [ "$1" = "api-1" ]; then | 87 | elif [ "$1" = "api-1" ]; then |
72 | npm run build:server | 88 | npm run build:server |
89 | npm run build:tests | ||
73 | 90 | ||
74 | checkParamFiles=$(findTestFiles ./dist/server/tests/api/check-params) | 91 | checkParamFiles=$(findTestFiles ./packages/tests/dist/api/check-params) |
75 | notificationsFiles=$(findTestFiles ./dist/server/tests/api/notifications) | 92 | notificationsFiles=$(findTestFiles ./packages/tests/dist/api/notifications) |
76 | searchFiles=$(findTestFiles ./dist/server/tests/api/search) | 93 | searchFiles=$(findTestFiles ./packages/tests/dist/api/search) |
77 | 94 | ||
78 | MOCHA_PARALLEL=true runTest "$1" $((3*$speedFactor)) $notificationsFiles $searchFiles $checkParamFiles | 95 | MOCHA_PARALLEL=true runJSTest "$1" $((3*$speedFactor)) $notificationsFiles $searchFiles $checkParamFiles |
79 | elif [ "$1" = "api-2" ]; then | 96 | elif [ "$1" = "api-2" ]; then |
80 | npm run build:server | 97 | npm run build:server |
98 | npm run build:tests | ||
81 | 99 | ||
82 | liveFiles=$(findTestFiles ./dist/server/tests/api/live) | 100 | liveFiles=$(findTestFiles ./packages/tests/dist/api/live) |
83 | serverFiles=$(findTestFiles ./dist/server/tests/api/server plugins.js) | 101 | # plugins test needs an HTML file |
84 | usersFiles=$(findTestFiles ./dist/server/tests/api/users) | 102 | serverFiles=$(findTestFiles ./packages/tests/dist/api/server plugins.js) |
103 | usersFiles=$(findTestFiles ./packages/tests/dist/api/users) | ||
85 | 104 | ||
86 | MOCHA_PARALLEL=true runTest "$1" $((3*$speedFactor)) $liveFiles $serverFiles $usersFiles | 105 | MOCHA_PARALLEL=true runJSTest "$1" $((3*$speedFactor)) $liveFiles $serverFiles $usersFiles |
87 | elif [ "$1" = "api-3" ]; then | 106 | elif [ "$1" = "api-3" ]; then |
88 | npm run build:server | 107 | npm run build:server |
108 | npm run build:tests | ||
89 | 109 | ||
90 | videosFiles=$(findTestFiles ./dist/server/tests/api/videos) | 110 | videosFiles=$(findTestFiles ./packages/tests/dist/api/videos) |
91 | viewsFiles=$(findTestFiles ./dist/server/tests/api/views) | 111 | viewsFiles=$(findTestFiles ./packages/tests/dist/api/views) |
92 | 112 | ||
93 | MOCHA_PARALLEL=true runTest "$1" $((3*$speedFactor)) $viewsFiles $videosFiles | 113 | MOCHA_PARALLEL=true runJSTest "$1" $((3*$speedFactor)) $viewsFiles $videosFiles |
94 | elif [ "$1" = "api-4" ]; then | 114 | elif [ "$1" = "api-4" ]; then |
95 | npm run build:server | 115 | npm run build:server |
116 | npm run build:tests | ||
96 | 117 | ||
97 | moderationFiles=$(findTestFiles ./dist/server/tests/api/moderation) | 118 | moderationFiles=$(findTestFiles ./packages/tests/dist/api/moderation) |
98 | redundancyFiles=$(findTestFiles ./dist/server/tests/api/redundancy) | 119 | redundancyFiles=$(findTestFiles ./packages/tests/dist/api/redundancy) |
99 | objectStorageFiles=$(findTestFiles ./dist/server/tests/api/object-storage) | 120 | objectStorageFiles=$(findTestFiles ./packages/tests/dist/api/object-storage) |
100 | activitypubFiles=$(findTestFiles ./dist/server/tests/api/activitypub) | 121 | activitypubFiles=$(findTestFiles ./packages/tests/dist/api/activitypub) |
101 | 122 | ||
102 | MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $moderationFiles $redundancyFiles $activitypubFiles $objectStorageFiles | 123 | MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $moderationFiles $redundancyFiles $activitypubFiles $objectStorageFiles |
103 | elif [ "$1" = "api-5" ]; then | 124 | elif [ "$1" = "api-5" ]; then |
104 | npm run build:server | 125 | npm run build:server |
126 | npm run build:tests | ||
105 | 127 | ||
106 | transcodingFiles=$(findTestFiles ./dist/server/tests/api/transcoding) | 128 | transcodingFiles=$(findTestFiles ./packages/tests/dist/api/transcoding) |
107 | runnersFiles=$(findTestFiles ./dist/server/tests/api/runners) | 129 | runnersFiles=$(findTestFiles ./packages/tests/dist/api/runners) |
108 | 130 | ||
109 | MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $transcodingFiles $runnersFiles | 131 | MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $transcodingFiles $runnersFiles |
110 | elif [ "$1" = "external-plugins" ]; then | 132 | elif [ "$1" = "external-plugins" ]; then |
111 | npm run build:server | 133 | npm run build:server |
134 | npm run build:tests | ||
112 | npm run build:peertube-runner | 135 | npm run build:peertube-runner |
113 | 136 | ||
114 | externalPluginsFiles=$(findTestFiles ./dist/server/tests/external-plugins) | 137 | externalPluginsFiles=$(findTestFiles ./packages/tests/dist/external-plugins) |
115 | peertubeRunnerFiles=$(findTestFiles ./dist/server/tests/peertube-runner) | 138 | peertubeRunnerFiles=$(findTestFiles ./packages/tests/dist/peertube-runner) |
116 | 139 | ||
117 | runTest "$1" 1 $externalPluginsFiles | 140 | runJSTest "$1" 1 $externalPluginsFiles |
118 | MOCHA_PARALLEL=true runTest "$1" $((2*$speedFactor)) $peertubeRunnerFiles | 141 | MOCHA_PARALLEL=true runJSTest "$1" $((2*$speedFactor)) $peertubeRunnerFiles |
119 | elif [ "$1" = "lint" ]; then | 142 | elif [ "$1" = "lint" ]; then |
120 | npm run eslint -- --ext .ts "./server/**/*.ts" "shared/**/*.ts" "scripts/**/*.ts" | 143 | npm run eslint -- --ext .ts "server/**/*.ts" "scripts/**/*.ts" "packages/**/*.ts" "apps/**/*.ts" |
144 | |||
121 | npm run swagger-cli -- validate support/doc/api/openapi.yaml | 145 | npm run swagger-cli -- validate support/doc/api/openapi.yaml |
122 | 146 | ||
123 | ( cd client | 147 | ( cd client |
diff --git a/scripts/client-build-stats.ts b/scripts/client-build-stats.ts index d5ecd5fea..3b26aa647 100644 --- a/scripts/client-build-stats.ts +++ b/scripts/client-build-stats.ts | |||
@@ -1,6 +1,6 @@ | |||
1 | import { readdir, stat } from 'fs-extra' | 1 | import { readdir, stat } from 'fs/promises' |
2 | import { join } from 'path' | 2 | import { join } from 'path' |
3 | import { root } from '@shared/core-utils' | 3 | import { root } from '@peertube/peertube-node-utils' |
4 | 4 | ||
5 | async function run () { | 5 | async function run () { |
6 | const result = { | 6 | const result = { |
diff --git a/scripts/create-generate-storyboard-job.ts b/scripts/create-generate-storyboard-job.ts deleted file mode 100644 index 47c08edac..000000000 --- a/scripts/create-generate-storyboard-job.ts +++ /dev/null | |||
@@ -1,85 +0,0 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { toCompleteUUID } from '@server/helpers/custom-validators/misc' | ||
3 | import { initDatabaseModels } from '@server/initializers/database' | ||
4 | import { JobQueue } from '@server/lib/job-queue' | ||
5 | import { VideoModel } from '@server/models/video/video' | ||
6 | import { StoryboardModel } from '@server/models/video/storyboard' | ||
7 | |||
8 | program | ||
9 | .description('Generate videos storyboard') | ||
10 | .option('-v, --video [videoUUID]', 'Generate the storyboard of a specific video') | ||
11 | .option('-a, --all-videos', 'Generate missing storyboards of local videos') | ||
12 | .parse(process.argv) | ||
13 | |||
14 | const options = program.opts() | ||
15 | |||
16 | if (!options['video'] && !options['allVideos']) { | ||
17 | console.error('You need to choose videos for storyboard generation.') | ||
18 | process.exit(-1) | ||
19 | } | ||
20 | |||
21 | run() | ||
22 | .then(() => process.exit(0)) | ||
23 | .catch(err => { | ||
24 | console.error(err) | ||
25 | process.exit(-1) | ||
26 | }) | ||
27 | |||
28 | async function run () { | ||
29 | await initDatabaseModels(true) | ||
30 | |||
31 | JobQueue.Instance.init() | ||
32 | |||
33 | let ids: number[] = [] | ||
34 | |||
35 | if (options['video']) { | ||
36 | const video = await VideoModel.load(toCompleteUUID(options['video'])) | ||
37 | |||
38 | if (!video) { | ||
39 | console.error('Unknown video ' + options['video']) | ||
40 | process.exit(-1) | ||
41 | } | ||
42 | |||
43 | if (video.remote === true) { | ||
44 | console.error('Cannot process a remote video') | ||
45 | process.exit(-1) | ||
46 | } | ||
47 | |||
48 | if (video.isLive) { | ||
49 | console.error('Cannot process live video') | ||
50 | process.exit(-1) | ||
51 | } | ||
52 | |||
53 | ids.push(video.id) | ||
54 | } else { | ||
55 | ids = await listLocalMissingStoryboards() | ||
56 | } | ||
57 | |||
58 | for (const id of ids) { | ||
59 | const videoFull = await VideoModel.load(id) | ||
60 | |||
61 | if (videoFull.isLive) continue | ||
62 | |||
63 | await JobQueue.Instance.createJob({ | ||
64 | type: 'generate-video-storyboard', | ||
65 | payload: { | ||
66 | videoUUID: videoFull.uuid, | ||
67 | federate: true | ||
68 | } | ||
69 | }) | ||
70 | |||
71 | console.log(`Created generate-storyboard job for ${videoFull.name}.`) | ||
72 | } | ||
73 | } | ||
74 | |||
75 | async function listLocalMissingStoryboards () { | ||
76 | const ids = await VideoModel.listLocalIds() | ||
77 | const results: number[] = [] | ||
78 | |||
79 | for (const id of ids) { | ||
80 | const storyboard = await StoryboardModel.loadByVideo(id) | ||
81 | if (!storyboard) results.push(id) | ||
82 | } | ||
83 | |||
84 | return results | ||
85 | } | ||
diff --git a/scripts/create-import-video-file-job.ts b/scripts/create-import-video-file-job.ts deleted file mode 100644 index 9cb387d2e..000000000 --- a/scripts/create-import-video-file-job.ts +++ /dev/null | |||
@@ -1,50 +0,0 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { resolve } from 'path' | ||
3 | import { isUUIDValid, toCompleteUUID } from '@server/helpers/custom-validators/misc' | ||
4 | import { initDatabaseModels } from '../server/initializers/database' | ||
5 | import { JobQueue } from '../server/lib/job-queue' | ||
6 | import { VideoModel } from '../server/models/video/video' | ||
7 | |||
8 | program | ||
9 | .option('-v, --video [videoUUID]', 'Video UUID') | ||
10 | .option('-i, --import [videoFile]', 'Video file') | ||
11 | .description('Import a video file to replace an already uploaded file or to add a new resolution') | ||
12 | .parse(process.argv) | ||
13 | |||
14 | const options = program.opts() | ||
15 | |||
16 | if (options.video === undefined || options.import === undefined) { | ||
17 | console.error('All parameters are mandatory.') | ||
18 | process.exit(-1) | ||
19 | } | ||
20 | |||
21 | run() | ||
22 | .then(() => process.exit(0)) | ||
23 | .catch(err => { | ||
24 | console.error(err) | ||
25 | process.exit(-1) | ||
26 | }) | ||
27 | |||
28 | async function run () { | ||
29 | await initDatabaseModels(true) | ||
30 | |||
31 | const uuid = toCompleteUUID(options.video) | ||
32 | |||
33 | if (isUUIDValid(uuid) === false) { | ||
34 | console.error('%s is not a valid video UUID.', options.video) | ||
35 | return | ||
36 | } | ||
37 | |||
38 | const video = await VideoModel.load(uuid) | ||
39 | if (!video) throw new Error('Video not found.') | ||
40 | if (video.isOwned() === false) throw new Error('Cannot import files of a non owned video.') | ||
41 | |||
42 | const dataInput = { | ||
43 | videoUUID: video.uuid, | ||
44 | filePath: resolve(options.import) | ||
45 | } | ||
46 | |||
47 | JobQueue.Instance.init() | ||
48 | await JobQueue.Instance.createJob({ type: 'video-file-import', payload: dataInput }) | ||
49 | console.log('Import job for video %s created.', video.uuid) | ||
50 | } | ||
diff --git a/scripts/create-move-video-storage-job.ts b/scripts/create-move-video-storage-job.ts deleted file mode 100644 index 8537114eb..000000000 --- a/scripts/create-move-video-storage-job.ts +++ /dev/null | |||
@@ -1,99 +0,0 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { toCompleteUUID } from '@server/helpers/custom-validators/misc' | ||
3 | import { CONFIG } from '@server/initializers/config' | ||
4 | import { initDatabaseModels } from '@server/initializers/database' | ||
5 | import { JobQueue } from '@server/lib/job-queue' | ||
6 | import { moveToExternalStorageState } from '@server/lib/video-state' | ||
7 | import { VideoModel } from '@server/models/video/video' | ||
8 | import { VideoState, VideoStorage } from '@shared/models' | ||
9 | |||
10 | program | ||
11 | .description('Move videos to another storage.') | ||
12 | .option('-o, --to-object-storage', 'Move videos in object storage') | ||
13 | .option('-v, --video [videoUUID]', 'Move a specific video') | ||
14 | .option('-a, --all-videos', 'Migrate all videos') | ||
15 | .parse(process.argv) | ||
16 | |||
17 | const options = program.opts() | ||
18 | |||
19 | if (!options['toObjectStorage']) { | ||
20 | console.error('You need to choose where to send video files.') | ||
21 | process.exit(-1) | ||
22 | } | ||
23 | |||
24 | if (!options['video'] && !options['allVideos']) { | ||
25 | console.error('You need to choose which videos to move.') | ||
26 | process.exit(-1) | ||
27 | } | ||
28 | |||
29 | if (options['toObjectStorage'] && !CONFIG.OBJECT_STORAGE.ENABLED) { | ||
30 | console.error('Object storage is not enabled on this instance.') | ||
31 | process.exit(-1) | ||
32 | } | ||
33 | |||
34 | run() | ||
35 | .then(() => process.exit(0)) | ||
36 | .catch(err => { | ||
37 | console.error(err) | ||
38 | process.exit(-1) | ||
39 | }) | ||
40 | |||
41 | async function run () { | ||
42 | await initDatabaseModels(true) | ||
43 | |||
44 | JobQueue.Instance.init() | ||
45 | |||
46 | let ids: number[] = [] | ||
47 | |||
48 | if (options['video']) { | ||
49 | const video = await VideoModel.load(toCompleteUUID(options['video'])) | ||
50 | |||
51 | if (!video) { | ||
52 | console.error('Unknown video ' + options['video']) | ||
53 | process.exit(-1) | ||
54 | } | ||
55 | |||
56 | if (video.remote === true) { | ||
57 | console.error('Cannot process a remote video') | ||
58 | process.exit(-1) | ||
59 | } | ||
60 | |||
61 | if (video.isLive) { | ||
62 | console.error('Cannot process live video') | ||
63 | process.exit(-1) | ||
64 | } | ||
65 | |||
66 | if (video.state === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) { | ||
67 | console.error('This video is already being moved to external storage') | ||
68 | process.exit(-1) | ||
69 | } | ||
70 | |||
71 | ids.push(video.id) | ||
72 | } else { | ||
73 | ids = await VideoModel.listLocalIds() | ||
74 | } | ||
75 | |||
76 | for (const id of ids) { | ||
77 | const videoFull = await VideoModel.loadFull(id) | ||
78 | |||
79 | if (videoFull.isLive) continue | ||
80 | |||
81 | const files = videoFull.VideoFiles || [] | ||
82 | const hls = videoFull.getHLSPlaylist() | ||
83 | |||
84 | if (files.some(f => f.storage === VideoStorage.FILE_SYSTEM) || hls?.storage === VideoStorage.FILE_SYSTEM) { | ||
85 | console.log('Processing video %s.', videoFull.name) | ||
86 | |||
87 | const success = await moveToExternalStorageState({ video: videoFull, isNewVideo: false, transaction: undefined }) | ||
88 | |||
89 | if (!success) { | ||
90 | console.error( | ||
91 | 'Cannot create move job for %s: job creation may have failed or there may be pending transcoding jobs for this video', | ||
92 | videoFull.name | ||
93 | ) | ||
94 | } | ||
95 | } | ||
96 | |||
97 | console.log(`Created move-to-object-storage job for ${videoFull.name}.`) | ||
98 | } | ||
99 | } | ||
diff --git a/scripts/dev/cli.sh b/scripts/dev/cli.sh deleted file mode 100755 index 39ecaad94..000000000 --- a/scripts/dev/cli.sh +++ /dev/null | |||
@@ -1,16 +0,0 @@ | |||
1 | #!/bin/bash | ||
2 | |||
3 | set -eu | ||
4 | |||
5 | rm -rf ./dist/server/tools/ | ||
6 | |||
7 | ( | ||
8 | cd ./server/tools | ||
9 | yarn install --pure-lockfile | ||
10 | ) | ||
11 | |||
12 | mkdir -p "./dist/server/tools" | ||
13 | cp -r "./server/tools/node_modules" "./dist/server/tools" | ||
14 | |||
15 | cd ./server/tools | ||
16 | ../../node_modules/.bin/tsc-watch --build --verbose --onSuccess 'sh -c "cd ../../ && npm run resolve-tspaths:server"' | ||
diff --git a/scripts/dev/peertube-cli.sh b/scripts/dev/peertube-cli.sh new file mode 100755 index 000000000..172bf038e --- /dev/null +++ b/scripts/dev/peertube-cli.sh | |||
@@ -0,0 +1,11 @@ | |||
1 | #!/bin/bash | ||
2 | |||
3 | set -eu | ||
4 | |||
5 | rm -rf ./apps/peertube-cli/dist | ||
6 | |||
7 | cd ./apps/peertube-cli | ||
8 | |||
9 | ../../node_modules/.bin/concurrently -k \ | ||
10 | "../../node_modules/.bin/tsc -w --noEmit" \ | ||
11 | "node ./scripts/watch.js" | ||
diff --git a/scripts/dev/peertube-runner.sh b/scripts/dev/peertube-runner.sh index e39259372..7bd756123 100755 --- a/scripts/dev/peertube-runner.sh +++ b/scripts/dev/peertube-runner.sh | |||
@@ -2,10 +2,10 @@ | |||
2 | 2 | ||
3 | set -eu | 3 | set -eu |
4 | 4 | ||
5 | rm -rf ./packages/peertube-runner/dist | 5 | rm -rf ./apps/peertube-runner/dist |
6 | 6 | ||
7 | cd ./packages/peertube-runner | 7 | cd ./apps/peertube-runner |
8 | 8 | ||
9 | ../../node_modules/.bin/concurrently -k \ | 9 | ../../node_modules/.bin/concurrently -k \ |
10 | "../../node_modules/.bin/tsc -w --noEmit" \ | 10 | "../../node_modules/.bin/tsc -w --noEmit" \ |
11 | "./node_modules/.bin/esbuild ./peertube-runner.ts --bundle --sourcemap --platform=node --external:"./lib-cov/fluent-ffmpeg" --external:pg-hstore --watch --outfile=dist/peertube-runner.js" | 11 | "node ./scripts/watch.js" |
diff --git a/scripts/dev/server.sh b/scripts/dev/server.sh index c52c5124c..4112cb2f8 100755 --- a/scripts/dev/server.sh +++ b/scripts/dev/server.sh | |||
@@ -16,10 +16,10 @@ cp -r "./client/src/locale" "./client/dist/locale" | |||
16 | 16 | ||
17 | mkdir -p "./dist/server/lib" | 17 | mkdir -p "./dist/server/lib" |
18 | 18 | ||
19 | npm run tsc -- -b -v --incremental | 19 | npm run tsc -- -b -v --incremental server/tsconfig.json |
20 | npm run resolve-tspaths:server | 20 | npm run resolve-tspaths:server |
21 | 21 | ||
22 | cp -r ./server/static ./server/assets ./dist/server | 22 | cp -r ./server/server/static ./server/server/assets ./dist/server |
23 | cp -r "./server/lib/emails" "./dist/server/lib" | 23 | cp -r "./server/server/lib/emails" "./dist/server/lib" |
24 | 24 | ||
25 | ./node_modules/.bin/tsc-watch --build --preserveWatchOutput --verbose --onSuccess 'sh -c "npm run resolve-tspaths:server && NODE_ENV=dev node dist/server"' | 25 | ./node_modules/.bin/tsc-watch --build --preserveWatchOutput --verbose --onSuccess 'sh -c "npm run resolve-tspaths:server && NODE_ENV=dev node dist/server"' server/tsconfig.json |
diff --git a/scripts/generate-code-contributors.ts b/scripts/generate-code-contributors.ts index 2fd0ecdf3..408bbec5d 100755 --- a/scripts/generate-code-contributors.ts +++ b/scripts/generate-code-contributors.ts | |||
@@ -1,4 +1,4 @@ | |||
1 | import { CLICommand } from '@shared/server-commands' | 1 | import { CLICommand } from '@peertube/peertube-server-commands' |
2 | 2 | ||
3 | run() | 3 | run() |
4 | .then(() => process.exit(0)) | 4 | .then(() => process.exit(0)) |
diff --git a/scripts/i18n/create-custom-files.ts b/scripts/i18n/create-custom-files.ts index 696a097b1..887ab86af 100755 --- a/scripts/i18n/create-custom-files.ts +++ b/scripts/i18n/create-custom-files.ts | |||
@@ -1,6 +1,7 @@ | |||
1 | import { writeJSON } from 'fs-extra' | 1 | import { readJsonSync, writeJSON } from 'fs-extra/esm' |
2 | import { join } from 'path' | 2 | import { join } from 'path' |
3 | import { root, USER_ROLE_LABELS } from '@shared/core-utils' | 3 | import { I18N_LOCALES, USER_ROLE_LABELS } from '@peertube/peertube-core-utils' |
4 | import { root } from '@peertube/peertube-node-utils' | ||
4 | import { | 5 | import { |
5 | ABUSE_STATES, | 6 | ABUSE_STATES, |
6 | buildLanguages, | 7 | buildLanguages, |
@@ -14,10 +15,9 @@ import { | |||
14 | VIDEO_PLAYLIST_TYPES, | 15 | VIDEO_PLAYLIST_TYPES, |
15 | VIDEO_PRIVACIES, | 16 | VIDEO_PRIVACIES, |
16 | VIDEO_STATES | 17 | VIDEO_STATES |
17 | } from '../../server/initializers/constants' | 18 | } from '../../server/initializers/constants.js' |
18 | import { I18N_LOCALES } from '../../shared/core-utils/i18n' | ||
19 | 19 | ||
20 | const videojs = require(join(root(), 'client', 'src', 'locale', 'videojs.en-US.json')) | 20 | const videojs = readJsonSync(join(root(), 'client', 'src', 'locale', 'videojs.en-US.json')) |
21 | const playerKeys = { | 21 | const playerKeys = { |
22 | 'Quality': 'Quality', | 22 | 'Quality': 'Quality', |
23 | 'Auto': 'Auto', | 23 | 'Auto': 'Auto', |
@@ -131,13 +131,13 @@ async function writeAll () { | |||
131 | 131 | ||
132 | for (const key of Object.keys(I18N_LOCALES)) { | 132 | for (const key of Object.keys(I18N_LOCALES)) { |
133 | const playerJsonPath = join(localePath, `player.${key}.json`) | 133 | const playerJsonPath = join(localePath, `player.${key}.json`) |
134 | const translatedPlayer = require(playerJsonPath) | 134 | const translatedPlayer = readJsonSync(playerJsonPath) |
135 | 135 | ||
136 | const newTranslatedPlayer = Object.assign({}, playerKeys, translatedPlayer) | 136 | const newTranslatedPlayer = Object.assign({}, playerKeys, translatedPlayer) |
137 | await writeJSON(playerJsonPath, newTranslatedPlayer, { spaces: 4 }) | 137 | await writeJSON(playerJsonPath, newTranslatedPlayer, { spaces: 4 }) |
138 | 138 | ||
139 | const serverJsonPath = join(localePath, `server.${key}.json`) | 139 | const serverJsonPath = join(localePath, `server.${key}.json`) |
140 | const translatedServer = require(serverJsonPath) | 140 | const translatedServer = readJsonSync(serverJsonPath) |
141 | 141 | ||
142 | const newTranslatedServer = Object.assign({}, serverKeys, translatedServer) | 142 | const newTranslatedServer = Object.assign({}, serverKeys, translatedServer) |
143 | await writeJSON(serverJsonPath, newTranslatedServer, { spaces: 4 }) | 143 | await writeJSON(serverJsonPath, newTranslatedServer, { spaces: 4 }) |
diff --git a/scripts/migrations/peertube-4.0.ts b/scripts/migrations/peertube-4.0.ts deleted file mode 100644 index b0891c2e6..000000000 --- a/scripts/migrations/peertube-4.0.ts +++ /dev/null | |||
@@ -1,104 +0,0 @@ | |||
1 | import Bluebird from 'bluebird' | ||
2 | import { move, readFile, writeFile } from 'fs-extra' | ||
3 | import { join } from 'path' | ||
4 | import { federateVideoIfNeeded } from '@server/lib/activitypub/videos' | ||
5 | import { JobQueue } from '@server/lib/job-queue' | ||
6 | import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getHlsResolutionPlaylistFilename } from '@server/lib/paths' | ||
7 | import { VideoPathManager } from '@server/lib/video-path-manager' | ||
8 | import { VideoModel } from '@server/models/video/video' | ||
9 | import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist' | ||
10 | import { initDatabaseModels } from '../../server/initializers/database' | ||
11 | |||
12 | run() | ||
13 | .then(() => process.exit(0)) | ||
14 | .catch(err => { | ||
15 | console.error(err) | ||
16 | process.exit(-1) | ||
17 | }) | ||
18 | |||
19 | async function run () { | ||
20 | console.log('Migrate old HLS paths to new format.') | ||
21 | |||
22 | await initDatabaseModels(true) | ||
23 | |||
24 | JobQueue.Instance.init() | ||
25 | |||
26 | const ids = await VideoModel.listLocalIds() | ||
27 | |||
28 | await Bluebird.map(ids, async id => { | ||
29 | try { | ||
30 | await processVideo(id) | ||
31 | } catch (err) { | ||
32 | console.error('Cannot process video %s.', { err }) | ||
33 | } | ||
34 | }, { concurrency: 5 }) | ||
35 | |||
36 | console.log('Migration finished!') | ||
37 | } | ||
38 | |||
39 | async function processVideo (videoId: number) { | ||
40 | const video = await VideoModel.loadWithFiles(videoId) | ||
41 | |||
42 | const hls = video.getHLSPlaylist() | ||
43 | if (video.isLive || !hls || hls.playlistFilename !== 'master.m3u8' || hls.VideoFiles.length === 0) { | ||
44 | return | ||
45 | } | ||
46 | |||
47 | console.log(`Renaming HLS playlist files of video ${video.name}.`) | ||
48 | |||
49 | const playlist = await VideoStreamingPlaylistModel.loadHLSPlaylistByVideo(video.id) | ||
50 | const hlsDirPath = VideoPathManager.Instance.getFSHLSOutputPath(video) | ||
51 | |||
52 | const masterPlaylistPath = join(hlsDirPath, playlist.playlistFilename) | ||
53 | let masterPlaylistContent = await readFile(masterPlaylistPath, 'utf8') | ||
54 | |||
55 | for (const videoFile of hls.VideoFiles) { | ||
56 | const srcName = `${videoFile.resolution}.m3u8` | ||
57 | const dstName = getHlsResolutionPlaylistFilename(videoFile.filename) | ||
58 | |||
59 | const src = join(hlsDirPath, srcName) | ||
60 | const dst = join(hlsDirPath, dstName) | ||
61 | |||
62 | try { | ||
63 | await move(src, dst) | ||
64 | |||
65 | masterPlaylistContent = masterPlaylistContent.replace(new RegExp('^' + srcName + '$', 'm'), dstName) | ||
66 | } catch (err) { | ||
67 | console.error('Cannot move video file %s to %s.', src, dst, err) | ||
68 | } | ||
69 | } | ||
70 | |||
71 | await writeFile(masterPlaylistPath, masterPlaylistContent) | ||
72 | |||
73 | if (playlist.segmentsSha256Filename === 'segments-sha256.json') { | ||
74 | try { | ||
75 | const newName = generateHlsSha256SegmentsFilename(video.isLive) | ||
76 | |||
77 | const dst = join(hlsDirPath, newName) | ||
78 | await move(join(hlsDirPath, playlist.segmentsSha256Filename), dst) | ||
79 | playlist.segmentsSha256Filename = newName | ||
80 | } catch (err) { | ||
81 | console.error(`Cannot rename ${video.name} segments-sha256.json file to a new name`, err) | ||
82 | } | ||
83 | } | ||
84 | |||
85 | if (playlist.playlistFilename === 'master.m3u8') { | ||
86 | try { | ||
87 | const newName = generateHLSMasterPlaylistFilename(video.isLive) | ||
88 | |||
89 | const dst = join(hlsDirPath, newName) | ||
90 | await move(join(hlsDirPath, playlist.playlistFilename), dst) | ||
91 | playlist.playlistFilename = newName | ||
92 | } catch (err) { | ||
93 | console.error(`Cannot rename ${video.name} master.m3u8 file to a new name`, err) | ||
94 | } | ||
95 | } | ||
96 | |||
97 | // Everything worked, we can save the playlist now | ||
98 | await playlist.save() | ||
99 | |||
100 | const allVideo = await VideoModel.loadFull(video.id) | ||
101 | await federateVideoIfNeeded(allVideo, false) | ||
102 | |||
103 | console.log(`Successfully moved HLS files of ${video.name}.`) | ||
104 | } | ||
diff --git a/scripts/migrations/peertube-4.2.ts b/scripts/migrations/peertube-4.2.ts deleted file mode 100644 index d8929692b..000000000 --- a/scripts/migrations/peertube-4.2.ts +++ /dev/null | |||
@@ -1,124 +0,0 @@ | |||
1 | import { minBy } from 'lodash' | ||
2 | import { join } from 'path' | ||
3 | import { getImageSize, processImage } from '@server/helpers/image-utils' | ||
4 | import { CONFIG } from '@server/initializers/config' | ||
5 | import { ACTOR_IMAGES_SIZE } from '@server/initializers/constants' | ||
6 | import { updateActorImages } from '@server/lib/activitypub/actors' | ||
7 | import { sendUpdateActor } from '@server/lib/activitypub/send' | ||
8 | import { getBiggestActorImage } from '@server/lib/actor-image' | ||
9 | import { JobQueue } from '@server/lib/job-queue' | ||
10 | import { AccountModel } from '@server/models/account/account' | ||
11 | import { ActorModel } from '@server/models/actor/actor' | ||
12 | import { VideoChannelModel } from '@server/models/video/video-channel' | ||
13 | import { MAccountDefault, MActorDefault, MChannelDefault } from '@server/types/models' | ||
14 | import { getLowercaseExtension } from '@shared/core-utils' | ||
15 | import { buildUUID } from '@shared/extra-utils' | ||
16 | import { ActorImageType } from '@shared/models' | ||
17 | import { initDatabaseModels } from '../../server/initializers/database' | ||
18 | |||
19 | run() | ||
20 | .then(() => process.exit(0)) | ||
21 | .catch(err => { | ||
22 | console.error(err) | ||
23 | process.exit(-1) | ||
24 | }) | ||
25 | |||
26 | async function run () { | ||
27 | console.log('Generate avatar miniatures from existing avatars.') | ||
28 | |||
29 | await initDatabaseModels(true) | ||
30 | JobQueue.Instance.init() | ||
31 | |||
32 | const accounts: AccountModel[] = await AccountModel.findAll({ | ||
33 | include: [ | ||
34 | { | ||
35 | model: ActorModel, | ||
36 | required: true, | ||
37 | where: { | ||
38 | serverId: null | ||
39 | } | ||
40 | }, | ||
41 | { | ||
42 | model: VideoChannelModel, | ||
43 | include: [ | ||
44 | { | ||
45 | model: AccountModel | ||
46 | } | ||
47 | ] | ||
48 | } | ||
49 | ] | ||
50 | }) | ||
51 | |||
52 | for (const account of accounts) { | ||
53 | try { | ||
54 | await fillAvatarSizeIfNeeded(account) | ||
55 | await generateSmallerAvatarIfNeeded(account) | ||
56 | } catch (err) { | ||
57 | console.error(`Cannot process account avatar ${account.name}`, err) | ||
58 | } | ||
59 | |||
60 | for (const videoChannel of account.VideoChannels) { | ||
61 | try { | ||
62 | await fillAvatarSizeIfNeeded(videoChannel) | ||
63 | await generateSmallerAvatarIfNeeded(videoChannel) | ||
64 | } catch (err) { | ||
65 | console.error(`Cannot process channel avatar ${videoChannel.name}`, err) | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | |||
70 | console.log('Generation finished!') | ||
71 | } | ||
72 | |||
73 | async function fillAvatarSizeIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) { | ||
74 | const avatars = accountOrChannel.Actor.Avatars | ||
75 | |||
76 | for (const avatar of avatars) { | ||
77 | if (avatar.width && avatar.height) continue | ||
78 | |||
79 | console.log('Filling size of avatars of %s.', accountOrChannel.name) | ||
80 | |||
81 | const { width, height } = await getImageSize(join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, avatar.filename)) | ||
82 | avatar.width = width | ||
83 | avatar.height = height | ||
84 | |||
85 | await avatar.save() | ||
86 | } | ||
87 | } | ||
88 | |||
89 | async function generateSmallerAvatarIfNeeded (accountOrChannel: MAccountDefault | MChannelDefault) { | ||
90 | const avatars = accountOrChannel.Actor.Avatars | ||
91 | if (avatars.length !== 1) { | ||
92 | return | ||
93 | } | ||
94 | |||
95 | console.log(`Processing ${accountOrChannel.name}.`) | ||
96 | |||
97 | await generateSmallerAvatar(accountOrChannel.Actor) | ||
98 | accountOrChannel.Actor = Object.assign(accountOrChannel.Actor, { Server: null }) | ||
99 | |||
100 | return sendUpdateActor(accountOrChannel, undefined) | ||
101 | } | ||
102 | |||
103 | async function generateSmallerAvatar (actor: MActorDefault) { | ||
104 | const bigAvatar = getBiggestActorImage(actor.Avatars) | ||
105 | |||
106 | const imageSize = minBy(ACTOR_IMAGES_SIZE[ActorImageType.AVATAR], 'width') | ||
107 | const sourceFilename = bigAvatar.filename | ||
108 | |||
109 | const newImageName = buildUUID() + getLowercaseExtension(sourceFilename) | ||
110 | const source = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, sourceFilename) | ||
111 | const destination = join(CONFIG.STORAGE.ACTOR_IMAGES_DIR, newImageName) | ||
112 | |||
113 | await processImage({ path: source, destination, newSize: imageSize, keepOriginal: true }) | ||
114 | |||
115 | const actorImageInfo = { | ||
116 | name: newImageName, | ||
117 | fileUrl: null, | ||
118 | height: imageSize.height, | ||
119 | width: imageSize.width, | ||
120 | onDisk: true | ||
121 | } | ||
122 | |||
123 | await updateActorImages(actor, ActorImageType.AVATAR, [ actorImageInfo ], undefined) | ||
124 | } | ||
diff --git a/scripts/migrations/peertube-5.0.ts b/scripts/migrations/peertube-5.0.ts deleted file mode 100644 index a0f51a64c..000000000 --- a/scripts/migrations/peertube-5.0.ts +++ /dev/null | |||
@@ -1,71 +0,0 @@ | |||
1 | import { ensureDir } from 'fs-extra' | ||
2 | import { Op } from 'sequelize' | ||
3 | import { updateTorrentMetadata } from '@server/helpers/webtorrent' | ||
4 | import { DIRECTORIES } from '@server/initializers/constants' | ||
5 | import { moveFilesIfPrivacyChanged } from '@server/lib/video-privacy' | ||
6 | import { VideoModel } from '@server/models/video/video' | ||
7 | import { MVideoFullLight } from '@server/types/models' | ||
8 | import { VideoPrivacy } from '@shared/models' | ||
9 | import { initDatabaseModels } from '../../server/initializers/database' | ||
10 | |||
11 | run() | ||
12 | .then(() => process.exit(0)) | ||
13 | .catch(err => { | ||
14 | console.error(err) | ||
15 | process.exit(-1) | ||
16 | }) | ||
17 | |||
18 | async function run () { | ||
19 | console.log('Moving private video files in dedicated folders.') | ||
20 | |||
21 | await ensureDir(DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE) | ||
22 | await ensureDir(DIRECTORIES.VIDEOS.PRIVATE) | ||
23 | |||
24 | await initDatabaseModels(true) | ||
25 | |||
26 | const videos = await VideoModel.unscoped().findAll({ | ||
27 | attributes: [ 'uuid' ], | ||
28 | where: { | ||
29 | privacy: { | ||
30 | [Op.in]: [ VideoPrivacy.PRIVATE, VideoPrivacy.INTERNAL ] | ||
31 | } | ||
32 | } | ||
33 | }) | ||
34 | |||
35 | for (const { uuid } of videos) { | ||
36 | try { | ||
37 | console.log('Moving files of video %s.', uuid) | ||
38 | |||
39 | const video = await VideoModel.loadFull(uuid) | ||
40 | |||
41 | try { | ||
42 | await moveFilesIfPrivacyChanged(video, VideoPrivacy.PUBLIC) | ||
43 | } catch (err) { | ||
44 | console.error('Cannot move files of video %s.', uuid, err) | ||
45 | } | ||
46 | |||
47 | try { | ||
48 | await updateTorrents(video) | ||
49 | } catch (err) { | ||
50 | console.error('Cannot regenerate torrents of video %s.', uuid, err) | ||
51 | } | ||
52 | } catch (err) { | ||
53 | console.error('Cannot process video %s.', uuid, err) | ||
54 | } | ||
55 | } | ||
56 | } | ||
57 | |||
58 | async function updateTorrents (video: MVideoFullLight) { | ||
59 | for (const file of video.VideoFiles) { | ||
60 | await updateTorrentMetadata(video, file) | ||
61 | |||
62 | await file.save() | ||
63 | } | ||
64 | |||
65 | const playlist = video.getHLSPlaylist() | ||
66 | for (const file of (playlist?.VideoFiles || [])) { | ||
67 | await updateTorrentMetadata(playlist, file) | ||
68 | |||
69 | await file.save() | ||
70 | } | ||
71 | } | ||
diff --git a/scripts/nightly.sh b/scripts/nightly.sh index 572277f9d..e911c549d 100755 --- a/scripts/nightly.sh +++ b/scripts/nightly.sh | |||
@@ -20,6 +20,13 @@ tar_name="peertube-nightly-$today.tar.xz" | |||
20 | 20 | ||
21 | npm run build -- --source-map | 21 | npm run build -- --source-map |
22 | 22 | ||
23 | # Clean up declaration files | ||
24 | find dist/ packages/core-utils/dist/ \ | ||
25 | packages/ffmpeg/dist/ \ | ||
26 | packages/node-utils/dist/ \ | ||
27 | packages/models/dist/ \ | ||
28 | \( -name '*.d.ts' -o -name '*.d.ts.map' \) -type f -delete | ||
29 | |||
23 | nightly_version="nightly-$today" | 30 | nightly_version="nightly-$today" |
24 | sed -i 's/"version": "\([^"]\+\)"/"version": "\1-'"$nightly_version"'"/' ./package.json | 31 | sed -i 's/"version": "\([^"]\+\)"/"version": "\1-'"$nightly_version"'"/' ./package.json |
25 | 32 | ||
@@ -28,6 +35,10 @@ sed -i 's/"version": "\([^"]\+\)"/"version": "\1-'"$nightly_version"'"/' ./packa | |||
28 | # local variables | 35 | # local variables |
29 | directories_to_archive=("$directory_name/CREDITS.md" "$directory_name/FAQ.md" \ | 36 | directories_to_archive=("$directory_name/CREDITS.md" "$directory_name/FAQ.md" \ |
30 | "$directory_name/LICENSE" "$directory_name/README.md" \ | 37 | "$directory_name/LICENSE" "$directory_name/README.md" \ |
38 | "$directory_name/packages/core-utils/dist/" "$directory_name/packages/core-utils/package.json" \ | ||
39 | "$directory_name/packages/ffmpeg/dist/" "$directory_name/packages/ffmpeg/package.json" \ | ||
40 | "$directory_name/packages/node-utils/dist/" "$directory_name/packages/node-utils/package.json" \ | ||
41 | "$directory_name/packages/models/dist/" "$directory_name/packages/models/package.json" \ | ||
31 | "$directory_name/client/dist/" "$directory_name/client/yarn.lock" \ | 42 | "$directory_name/client/dist/" "$directory_name/client/yarn.lock" \ |
32 | "$directory_name/client/package.json" "$directory_name/config" \ | 43 | "$directory_name/client/package.json" "$directory_name/config" \ |
33 | "$directory_name/dist" "$directory_name/package.json" \ | 44 | "$directory_name/dist" "$directory_name/package.json" \ |
diff --git a/scripts/parse-log.ts b/scripts/parse-log.ts deleted file mode 100755 index 6770f090b..000000000 --- a/scripts/parse-log.ts +++ /dev/null | |||
@@ -1,160 +0,0 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { createReadStream, readdir } from 'fs-extra' | ||
3 | import { join } from 'path' | ||
4 | import { stdin } from 'process' | ||
5 | import { createInterface } from 'readline' | ||
6 | import { format as sqlFormat } from 'sql-formatter' | ||
7 | import { inspect } from 'util' | ||
8 | import * as winston from 'winston' | ||
9 | import { labelFormatter, mtimeSortFilesDesc } from '../server/helpers/logger' | ||
10 | import { CONFIG } from '../server/initializers/config' | ||
11 | |||
12 | program | ||
13 | .option('-l, --level [level]', 'Level log (debug/info/warn/error)') | ||
14 | .option('-f, --files [file...]', 'Files to parse. If not provided, the script will parse the latest log file from config)') | ||
15 | .option('-t, --tags [tags...]', 'Display only lines with these tags') | ||
16 | .option('-nt, --not-tags [tags...]', 'Donrt display lines containing these tags') | ||
17 | .parse(process.argv) | ||
18 | |||
19 | const options = program.opts() | ||
20 | |||
21 | const excludedKeys = { | ||
22 | level: true, | ||
23 | message: true, | ||
24 | splat: true, | ||
25 | timestamp: true, | ||
26 | tags: true, | ||
27 | label: true, | ||
28 | sql: true | ||
29 | } | ||
30 | function keysExcluder (key, value) { | ||
31 | return excludedKeys[key] === true ? undefined : value | ||
32 | } | ||
33 | |||
34 | const loggerFormat = winston.format.printf((info) => { | ||
35 | let additionalInfos = JSON.stringify(info, keysExcluder, 2) | ||
36 | if (additionalInfos === '{}') additionalInfos = '' | ||
37 | else additionalInfos = ' ' + additionalInfos | ||
38 | |||
39 | if (info.sql) { | ||
40 | if (CONFIG.LOG.PRETTIFY_SQL) { | ||
41 | additionalInfos += '\n' + sqlFormat(info.sql, { | ||
42 | language: 'sql', | ||
43 | tabWidth: 2 | ||
44 | }) | ||
45 | } else { | ||
46 | additionalInfos += ' - ' + info.sql | ||
47 | } | ||
48 | } | ||
49 | |||
50 | return `[${info.label}] ${toTimeFormat(info.timestamp)} ${info.level}: ${info.message}${additionalInfos}` | ||
51 | }) | ||
52 | |||
53 | const logger = winston.createLogger({ | ||
54 | transports: [ | ||
55 | new winston.transports.Console({ | ||
56 | level: options.level || 'debug', | ||
57 | stderrLevels: [], | ||
58 | format: winston.format.combine( | ||
59 | winston.format.splat(), | ||
60 | labelFormatter(), | ||
61 | winston.format.colorize(), | ||
62 | loggerFormat | ||
63 | ) | ||
64 | }) | ||
65 | ], | ||
66 | exitOnError: true | ||
67 | }) | ||
68 | |||
69 | const logLevels = { | ||
70 | error: logger.error.bind(logger), | ||
71 | warn: logger.warn.bind(logger), | ||
72 | info: logger.info.bind(logger), | ||
73 | debug: logger.debug.bind(logger) | ||
74 | } | ||
75 | |||
76 | run() | ||
77 | .then(() => process.exit(0)) | ||
78 | .catch(err => console.error(err)) | ||
79 | |||
80 | async function run () { | ||
81 | const files = await getFiles() | ||
82 | |||
83 | for (const file of files) { | ||
84 | if (file === 'peertube-audit.log') continue | ||
85 | |||
86 | await readFile(file) | ||
87 | } | ||
88 | } | ||
89 | |||
90 | function readFile (file: string) { | ||
91 | console.log('Opening %s.', file) | ||
92 | |||
93 | const stream = file === '-' ? stdin : createReadStream(file) | ||
94 | |||
95 | const rl = createInterface({ | ||
96 | input: stream | ||
97 | }) | ||
98 | |||
99 | return new Promise<void>(res => { | ||
100 | rl.on('line', line => { | ||
101 | try { | ||
102 | const log = JSON.parse(line) | ||
103 | if (options.tags && !containsTags(log.tags, options.tags)) { | ||
104 | return | ||
105 | } | ||
106 | |||
107 | if (options.notTags && containsTags(log.tags, options.notTags)) { | ||
108 | return | ||
109 | } | ||
110 | |||
111 | // Don't know why but loggerFormat does not remove splat key | ||
112 | Object.assign(log, { splat: undefined }) | ||
113 | |||
114 | logLevels[log.level](log) | ||
115 | } catch (err) { | ||
116 | console.error('Cannot parse line.', inspect(line)) | ||
117 | throw err | ||
118 | } | ||
119 | }) | ||
120 | |||
121 | stream.once('end', () => res()) | ||
122 | }) | ||
123 | } | ||
124 | |||
125 | // Thanks: https://stackoverflow.com/a/37014317 | ||
126 | async function getNewestFile (files: string[], basePath: string) { | ||
127 | const sorted = await mtimeSortFilesDesc(files, basePath) | ||
128 | |||
129 | return (sorted.length > 0) ? sorted[0].file : '' | ||
130 | } | ||
131 | |||
132 | async function getFiles () { | ||
133 | if (options.files) return options.files | ||
134 | |||
135 | const logFiles = await readdir(CONFIG.STORAGE.LOG_DIR) | ||
136 | |||
137 | const filename = await getNewestFile(logFiles, CONFIG.STORAGE.LOG_DIR) | ||
138 | return [ join(CONFIG.STORAGE.LOG_DIR, filename) ] | ||
139 | } | ||
140 | |||
141 | function toTimeFormat (time: string) { | ||
142 | const timestamp = Date.parse(time) | ||
143 | |||
144 | if (isNaN(timestamp) === true) return 'Unknown date' | ||
145 | |||
146 | const d = new Date(timestamp) | ||
147 | return d.toLocaleString() + `.${d.getMilliseconds()}` | ||
148 | } | ||
149 | |||
150 | function containsTags (loggerTags: string[], optionsTags: string[]) { | ||
151 | if (!loggerTags) return false | ||
152 | |||
153 | for (const lt of loggerTags) { | ||
154 | for (const ot of optionsTags) { | ||
155 | if (lt === ot) return true | ||
156 | } | ||
157 | } | ||
158 | |||
159 | return false | ||
160 | } | ||
diff --git a/scripts/plugin/install.ts b/scripts/plugin/install.ts deleted file mode 100755 index 138f34446..000000000 --- a/scripts/plugin/install.ts +++ /dev/null | |||
@@ -1,41 +0,0 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { isAbsolute } from 'path' | ||
3 | import { initDatabaseModels } from '../../server/initializers/database' | ||
4 | import { PluginManager } from '../../server/lib/plugins/plugin-manager' | ||
5 | |||
6 | program | ||
7 | .option('-n, --npm-name [npmName]', 'Plugin to install') | ||
8 | .option('-v, --plugin-version [pluginVersion]', 'Plugin version to install') | ||
9 | .option('-p, --plugin-path [pluginPath]', 'Path of the plugin you want to install') | ||
10 | .parse(process.argv) | ||
11 | |||
12 | const options = program.opts() | ||
13 | |||
14 | if (!options.npmName && !options.pluginPath) { | ||
15 | console.error('You need to specify a plugin name with the desired version, or a plugin path.') | ||
16 | process.exit(-1) | ||
17 | } | ||
18 | |||
19 | if (options.pluginPath && !isAbsolute(options.pluginPath)) { | ||
20 | console.error('Plugin path should be absolute.') | ||
21 | process.exit(-1) | ||
22 | } | ||
23 | |||
24 | run() | ||
25 | .then(() => process.exit(0)) | ||
26 | .catch(err => { | ||
27 | console.error(err) | ||
28 | process.exit(-1) | ||
29 | }) | ||
30 | |||
31 | async function run () { | ||
32 | await initDatabaseModels(true) | ||
33 | |||
34 | const toInstall = options.npmName || options.pluginPath | ||
35 | await PluginManager.Instance.install({ | ||
36 | toInstall, | ||
37 | version: options.pluginVersion, | ||
38 | fromDisk: !!options.pluginPath, | ||
39 | register: false | ||
40 | }) | ||
41 | } | ||
diff --git a/scripts/plugin/uninstall.ts b/scripts/plugin/uninstall.ts deleted file mode 100755 index 770594685..000000000 --- a/scripts/plugin/uninstall.ts +++ /dev/null | |||
@@ -1,29 +0,0 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { initDatabaseModels } from '../../server/initializers/database' | ||
3 | import { PluginManager } from '../../server/lib/plugins/plugin-manager' | ||
4 | |||
5 | program | ||
6 | .option('-n, --npm-name [npmName]', 'Package name to install') | ||
7 | .parse(process.argv) | ||
8 | |||
9 | const options = program.opts() | ||
10 | |||
11 | if (!options.npmName) { | ||
12 | console.error('You need to specify the plugin name.') | ||
13 | process.exit(-1) | ||
14 | } | ||
15 | |||
16 | run() | ||
17 | .then(() => process.exit(0)) | ||
18 | .catch(err => { | ||
19 | console.error(err) | ||
20 | process.exit(-1) | ||
21 | }) | ||
22 | |||
23 | async function run () { | ||
24 | |||
25 | await initDatabaseModels(true) | ||
26 | |||
27 | const toUninstall = options.npmName | ||
28 | await PluginManager.Instance.uninstall({ npmName: toUninstall, unregister: false }) | ||
29 | } | ||
diff --git a/scripts/prune-storage.ts b/scripts/prune-storage.ts deleted file mode 100755 index 9a73a8600..000000000 --- a/scripts/prune-storage.ts +++ /dev/null | |||
@@ -1,184 +0,0 @@ | |||
1 | import { map } from 'bluebird' | ||
2 | import { readdir, remove, stat } from 'fs-extra' | ||
3 | import { basename, join } from 'path' | ||
4 | import { get, start } from 'prompt' | ||
5 | import { DIRECTORIES } from '@server/initializers/constants' | ||
6 | import { VideoFileModel } from '@server/models/video/video-file' | ||
7 | import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist' | ||
8 | import { uniqify } from '@shared/core-utils' | ||
9 | import { ThumbnailType } from '@shared/models' | ||
10 | import { getUUIDFromFilename } from '../server/helpers/utils' | ||
11 | import { CONFIG } from '../server/initializers/config' | ||
12 | import { initDatabaseModels } from '../server/initializers/database' | ||
13 | import { ActorImageModel } from '../server/models/actor/actor-image' | ||
14 | import { VideoRedundancyModel } from '../server/models/redundancy/video-redundancy' | ||
15 | import { ThumbnailModel } from '../server/models/video/thumbnail' | ||
16 | import { VideoModel } from '../server/models/video/video' | ||
17 | |||
18 | run() | ||
19 | .then(() => process.exit(0)) | ||
20 | .catch(err => { | ||
21 | console.error(err) | ||
22 | process.exit(-1) | ||
23 | }) | ||
24 | |||
25 | async function run () { | ||
26 | const dirs = Object.values(CONFIG.STORAGE) | ||
27 | |||
28 | if (uniqify(dirs).length !== dirs.length) { | ||
29 | console.error('Cannot prune storage because you put multiple storage keys in the same directory.') | ||
30 | process.exit(0) | ||
31 | } | ||
32 | |||
33 | await initDatabaseModels(true) | ||
34 | |||
35 | let toDelete: string[] = [] | ||
36 | |||
37 | console.log('Detecting files to remove, it could take a while...') | ||
38 | |||
39 | toDelete = toDelete.concat( | ||
40 | await pruneDirectory(DIRECTORIES.VIDEOS.PUBLIC, doesWebVideoFileExist()), | ||
41 | await pruneDirectory(DIRECTORIES.VIDEOS.PRIVATE, doesWebVideoFileExist()), | ||
42 | |||
43 | await pruneDirectory(DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE, doesHLSPlaylistExist()), | ||
44 | await pruneDirectory(DIRECTORIES.HLS_STREAMING_PLAYLIST.PUBLIC, doesHLSPlaylistExist()), | ||
45 | |||
46 | await pruneDirectory(CONFIG.STORAGE.TORRENTS_DIR, doesTorrentFileExist()), | ||
47 | |||
48 | await pruneDirectory(CONFIG.STORAGE.REDUNDANCY_DIR, doesRedundancyExist), | ||
49 | |||
50 | await pruneDirectory(CONFIG.STORAGE.PREVIEWS_DIR, doesThumbnailExist(true, ThumbnailType.PREVIEW)), | ||
51 | await pruneDirectory(CONFIG.STORAGE.THUMBNAILS_DIR, doesThumbnailExist(false, ThumbnailType.MINIATURE)), | ||
52 | |||
53 | await pruneDirectory(CONFIG.STORAGE.ACTOR_IMAGES_DIR, doesActorImageExist) | ||
54 | ) | ||
55 | |||
56 | const tmpFiles = await readdir(CONFIG.STORAGE.TMP_DIR) | ||
57 | toDelete = toDelete.concat(tmpFiles.map(t => join(CONFIG.STORAGE.TMP_DIR, t))) | ||
58 | |||
59 | if (toDelete.length === 0) { | ||
60 | console.log('No files to delete.') | ||
61 | return | ||
62 | } | ||
63 | |||
64 | console.log('Will delete %d files:\n\n%s\n\n', toDelete.length, toDelete.join('\n')) | ||
65 | |||
66 | const res = await askConfirmation() | ||
67 | if (res === true) { | ||
68 | console.log('Processing delete...\n') | ||
69 | |||
70 | for (const path of toDelete) { | ||
71 | await remove(path) | ||
72 | } | ||
73 | |||
74 | console.log('Done!') | ||
75 | } else { | ||
76 | console.log('Exiting without deleting files.') | ||
77 | } | ||
78 | } | ||
79 | |||
80 | type ExistFun = (file: string) => Promise<boolean> | boolean | ||
81 | async function pruneDirectory (directory: string, existFun: ExistFun) { | ||
82 | const files = await readdir(directory) | ||
83 | |||
84 | const toDelete: string[] = [] | ||
85 | await map(files, async file => { | ||
86 | const filePath = join(directory, file) | ||
87 | |||
88 | if (await existFun(filePath) !== true) { | ||
89 | toDelete.push(filePath) | ||
90 | } | ||
91 | }, { concurrency: 20 }) | ||
92 | |||
93 | return toDelete | ||
94 | } | ||
95 | |||
96 | function doesWebVideoFileExist () { | ||
97 | return (filePath: string) => { | ||
98 | // Don't delete private directory | ||
99 | if (filePath === DIRECTORIES.VIDEOS.PRIVATE) return true | ||
100 | |||
101 | return VideoFileModel.doesOwnedWebVideoFileExist(basename(filePath)) | ||
102 | } | ||
103 | } | ||
104 | |||
105 | function doesHLSPlaylistExist () { | ||
106 | return (hlsPath: string) => { | ||
107 | // Don't delete private directory | ||
108 | if (hlsPath === DIRECTORIES.HLS_STREAMING_PLAYLIST.PRIVATE) return true | ||
109 | |||
110 | return VideoStreamingPlaylistModel.doesOwnedHLSPlaylistExist(basename(hlsPath)) | ||
111 | } | ||
112 | } | ||
113 | |||
114 | function doesTorrentFileExist () { | ||
115 | return (filePath: string) => VideoFileModel.doesOwnedTorrentFileExist(basename(filePath)) | ||
116 | } | ||
117 | |||
118 | function doesThumbnailExist (keepOnlyOwned: boolean, type: ThumbnailType) { | ||
119 | return async (filePath: string) => { | ||
120 | const thumbnail = await ThumbnailModel.loadByFilename(basename(filePath), type) | ||
121 | if (!thumbnail) return false | ||
122 | |||
123 | if (keepOnlyOwned) { | ||
124 | const video = await VideoModel.load(thumbnail.videoId) | ||
125 | if (video.isOwned() === false) return false | ||
126 | } | ||
127 | |||
128 | return true | ||
129 | } | ||
130 | } | ||
131 | |||
132 | async function doesActorImageExist (filePath: string) { | ||
133 | const image = await ActorImageModel.loadByName(basename(filePath)) | ||
134 | |||
135 | return !!image | ||
136 | } | ||
137 | |||
138 | async function doesRedundancyExist (filePath: string) { | ||
139 | const isPlaylist = (await stat(filePath)).isDirectory() | ||
140 | |||
141 | if (isPlaylist) { | ||
142 | // Don't delete HLS redundancy directory | ||
143 | if (filePath === DIRECTORIES.HLS_REDUNDANCY) return true | ||
144 | |||
145 | const uuid = getUUIDFromFilename(filePath) | ||
146 | const video = await VideoModel.loadWithFiles(uuid) | ||
147 | if (!video) return false | ||
148 | |||
149 | const p = video.getHLSPlaylist() | ||
150 | if (!p) return false | ||
151 | |||
152 | const redundancy = await VideoRedundancyModel.loadLocalByStreamingPlaylistId(p.id) | ||
153 | return !!redundancy | ||
154 | } | ||
155 | |||
156 | const file = await VideoFileModel.loadByFilename(basename(filePath)) | ||
157 | if (!file) return false | ||
158 | |||
159 | const redundancy = await VideoRedundancyModel.loadLocalByFileId(file.id) | ||
160 | return !!redundancy | ||
161 | } | ||
162 | |||
163 | async function askConfirmation () { | ||
164 | return new Promise((res, rej) => { | ||
165 | start() | ||
166 | const schema = { | ||
167 | properties: { | ||
168 | confirm: { | ||
169 | type: 'string', | ||
170 | description: 'These following unused files can be deleted, but please check your backups first (bugs happen).' + | ||
171 | ' Notice PeerTube must have been stopped when your ran this script.' + | ||
172 | ' Can we delete these files?', | ||
173 | default: 'n', | ||
174 | required: true | ||
175 | } | ||
176 | } | ||
177 | } | ||
178 | get(schema, function (err, result) { | ||
179 | if (err) return rej(err) | ||
180 | |||
181 | return res(result.confirm?.match(/y/) !== null) | ||
182 | }) | ||
183 | }) | ||
184 | } | ||
diff --git a/scripts/regenerate-thumbnails.ts b/scripts/regenerate-thumbnails.ts deleted file mode 100644 index 061819387..000000000 --- a/scripts/regenerate-thumbnails.ts +++ /dev/null | |||
@@ -1,64 +0,0 @@ | |||
1 | import { map } from 'bluebird' | ||
2 | import { program } from 'commander' | ||
3 | import { pathExists, remove } from 'fs-extra' | ||
4 | import { generateImageFilename, processImage } from '@server/helpers/image-utils' | ||
5 | import { THUMBNAILS_SIZE } from '@server/initializers/constants' | ||
6 | import { initDatabaseModels } from '@server/initializers/database' | ||
7 | import { VideoModel } from '@server/models/video/video' | ||
8 | |||
9 | program | ||
10 | .description('Regenerate local thumbnails using preview files') | ||
11 | .parse(process.argv) | ||
12 | |||
13 | run() | ||
14 | .then(() => process.exit(0)) | ||
15 | .catch(err => console.error(err)) | ||
16 | |||
17 | async function run () { | ||
18 | await initDatabaseModels(true) | ||
19 | |||
20 | const ids = await VideoModel.listLocalIds() | ||
21 | |||
22 | await map(ids, id => { | ||
23 | return processVideo(id) | ||
24 | .catch(err => console.error('Cannot process video %d.', id, err)) | ||
25 | }, { concurrency: 20 }) | ||
26 | } | ||
27 | |||
28 | async function processVideo (id: number) { | ||
29 | const video = await VideoModel.loadWithFiles(id) | ||
30 | |||
31 | console.log('Processing video %s.', video.name) | ||
32 | |||
33 | const thumbnail = video.getMiniature() | ||
34 | const preview = video.getPreview() | ||
35 | |||
36 | const previewPath = preview.getPath() | ||
37 | |||
38 | if (!await pathExists(previewPath)) { | ||
39 | throw new Error(`Preview ${previewPath} does not exist on disk`) | ||
40 | } | ||
41 | |||
42 | const size = { | ||
43 | width: THUMBNAILS_SIZE.width, | ||
44 | height: THUMBNAILS_SIZE.height | ||
45 | } | ||
46 | |||
47 | const oldPath = thumbnail.getPath() | ||
48 | |||
49 | // Update thumbnail | ||
50 | thumbnail.filename = generateImageFilename() | ||
51 | thumbnail.width = size.width | ||
52 | thumbnail.height = size.height | ||
53 | |||
54 | const thumbnailPath = thumbnail.getPath() | ||
55 | await processImage({ path: previewPath, destination: thumbnailPath, newSize: size, keepOriginal: true }) | ||
56 | |||
57 | // Save new attributes | ||
58 | await thumbnail.save() | ||
59 | |||
60 | // Remove old thumbnail | ||
61 | await remove(oldPath) | ||
62 | |||
63 | // Don't federate, remote instances will refresh the thumbnails after a while | ||
64 | } | ||
diff --git a/scripts/release.sh b/scripts/release.sh index 2b922a749..0df9efa1d 100755 --- a/scripts/release.sh +++ b/scripts/release.sh | |||
@@ -69,11 +69,22 @@ npm run build -- --source-map | |||
69 | rm -f "./client/dist/en-US/stats.json" | 69 | rm -f "./client/dist/en-US/stats.json" |
70 | rm -f "./client/dist/embed-stats.json" | 70 | rm -f "./client/dist/embed-stats.json" |
71 | 71 | ||
72 | # Clean up declaration files | ||
73 | find dist/ packages/core-utils/dist/ \ | ||
74 | packages/ffmpeg/dist/ \ | ||
75 | packages/node-utils/dist/ \ | ||
76 | packages/models/dist/ \ | ||
77 | \( -name '*.d.ts' -o -name '*.d.ts.map' \) -type f -delete | ||
78 | |||
72 | # Creating the archives | 79 | # Creating the archives |
73 | ( | 80 | ( |
74 | # local variables | 81 | # local variables |
75 | directories_to_archive=("$directory_name/CREDITS.md" "$directory_name/FAQ.md" \ | 82 | directories_to_archive=("$directory_name/CREDITS.md" "$directory_name/FAQ.md" \ |
76 | "$directory_name/LICENSE" "$directory_name/README.md" \ | 83 | "$directory_name/LICENSE" "$directory_name/README.md" \ |
84 | "$directory_name/packages/core-utils/dist/" "$directory_name/packages/core-utils/package.json" \ | ||
85 | "$directory_name/packages/ffmpeg/dist/" "$directory_name/packages/ffmpeg/package.json" \ | ||
86 | "$directory_name/packages/node-utils/dist/" "$directory_name/packages/node-utils/package.json" \ | ||
87 | "$directory_name/packages/models/dist/" "$directory_name/packages/models/package.json" \ | ||
77 | "$directory_name/client/dist/" "$directory_name/client/yarn.lock" \ | 88 | "$directory_name/client/dist/" "$directory_name/client/yarn.lock" \ |
78 | "$directory_name/client/package.json" "$directory_name/config" \ | 89 | "$directory_name/client/package.json" "$directory_name/config" \ |
79 | "$directory_name/dist" "$directory_name/package.json" \ | 90 | "$directory_name/dist" "$directory_name/package.json" \ |
@@ -124,7 +135,7 @@ rm -f "./client/dist/embed-stats.json" | |||
124 | 135 | ||
125 | # Release types package | 136 | # Release types package |
126 | npm run generate-types-package "$version" | 137 | npm run generate-types-package "$version" |
127 | cd packages/types/dist | 138 | cd packages/types-generator/dist |
128 | npm publish --access public | 139 | npm publish --access public |
129 | fi | 140 | fi |
130 | ) | 141 | ) |
diff --git a/scripts/reset-password.ts b/scripts/reset-password.ts deleted file mode 100755 index b2e5639fb..000000000 --- a/scripts/reset-password.ts +++ /dev/null | |||
@@ -1,58 +0,0 @@ | |||
1 | import { program } from 'commander' | ||
2 | import { isUserPasswordValid } from '../server/helpers/custom-validators/users' | ||
3 | import { initDatabaseModels } from '../server/initializers/database' | ||
4 | import { UserModel } from '../server/models/user/user' | ||
5 | |||
6 | program | ||
7 | .option('-u, --user [user]', 'User') | ||
8 | .parse(process.argv) | ||
9 | |||
10 | const options = program.opts() | ||
11 | |||
12 | if (options.user === undefined) { | ||
13 | console.error('All parameters are mandatory.') | ||
14 | process.exit(-1) | ||
15 | } | ||
16 | |||
17 | initDatabaseModels(true) | ||
18 | .then(() => { | ||
19 | return UserModel.loadByUsername(options.user) | ||
20 | }) | ||
21 | .then(user => { | ||
22 | if (!user) { | ||
23 | console.error('Unknown user.') | ||
24 | process.exit(-1) | ||
25 | } | ||
26 | |||
27 | const readline = require('readline') | ||
28 | const Writable = require('stream').Writable | ||
29 | const mutableStdout = new Writable({ | ||
30 | write: function (_chunk, _encoding, callback) { | ||
31 | callback() | ||
32 | } | ||
33 | }) | ||
34 | const rl = readline.createInterface({ | ||
35 | input: process.stdin, | ||
36 | output: mutableStdout, | ||
37 | terminal: true | ||
38 | }) | ||
39 | |||
40 | console.log('New password?') | ||
41 | rl.on('line', function (password) { | ||
42 | if (!isUserPasswordValid(password)) { | ||
43 | console.error('New password is invalid.') | ||
44 | process.exit(-1) | ||
45 | } | ||
46 | |||
47 | user.password = password | ||
48 | |||
49 | user.save() | ||
50 | .then(() => console.log('User password updated.')) | ||
51 | .catch(err => console.error(err)) | ||
52 | .finally(() => process.exit(0)) | ||
53 | }) | ||
54 | }) | ||
55 | .catch(err => { | ||
56 | console.error(err) | ||
57 | process.exit(-1) | ||
58 | }) | ||
diff --git a/scripts/setup/cli.sh b/scripts/setup/cli.sh deleted file mode 100755 index 2e9b8a505..000000000 --- a/scripts/setup/cli.sh +++ /dev/null | |||
@@ -1,17 +0,0 @@ | |||
1 | #!/bin/sh | ||
2 | |||
3 | set -eu | ||
4 | |||
5 | NOCLIENT=1 yarn install --pure-lockfile | ||
6 | |||
7 | rm -rf ./dist/server/tools/ | ||
8 | |||
9 | ( | ||
10 | cd ./server/tools | ||
11 | yarn install --pure-lockfile | ||
12 | ../../node_modules/.bin/tsc --build --verbose | ||
13 | ) | ||
14 | |||
15 | cp -r "./server/tools/node_modules" "./dist/server/tools" | ||
16 | |||
17 | npm run resolve-tspaths:server | ||
diff --git a/scripts/simulate-many-viewers.ts b/scripts/simulate-many-viewers.ts index a993e175a..d6d9fd69e 100644 --- a/scripts/simulate-many-viewers.ts +++ b/scripts/simulate-many-viewers.ts | |||
@@ -1,5 +1,5 @@ | |||
1 | import Bluebird from 'bluebird' | 1 | import Bluebird from 'bluebird' |
2 | import { wait } from '@shared/core-utils' | 2 | import { wait } from '@peertube/peertube-core-utils' |
3 | import { | 3 | import { |
4 | createSingleServer, | 4 | createSingleServer, |
5 | doubleFollow, | 5 | doubleFollow, |
@@ -7,7 +7,7 @@ import { | |||
7 | PeerTubeServer, | 7 | PeerTubeServer, |
8 | setAccessTokensToServers, | 8 | setAccessTokensToServers, |
9 | waitJobs | 9 | waitJobs |
10 | } from '@shared/server-commands' | 10 | } from '@peertube/peertube-server-commands' |
11 | 11 | ||
12 | let servers: PeerTubeServer[] | 12 | let servers: PeerTubeServer[] |
13 | const viewers: { xForwardedFor: string }[] = [] | 13 | const viewers: { xForwardedFor: string }[] = [] |
diff --git a/scripts/tsconfig.json b/scripts/tsconfig.json index 0cfd927a6..0a7e07599 100644 --- a/scripts/tsconfig.json +++ b/scripts/tsconfig.json | |||
@@ -4,7 +4,10 @@ | |||
4 | "outDir": "../dist/scripts" | 4 | "outDir": "../dist/scripts" |
5 | }, | 5 | }, |
6 | "references": [ | 6 | "references": [ |
7 | { "path": "../shared" }, | 7 | { "path": "../packages/core-utils" }, |
8 | { "path": "../packages/models" }, | ||
9 | { "path": "../packages/node-utils" }, | ||
10 | { "path": "../packages/server-commands" }, | ||
8 | { "path": "../server" } | 11 | { "path": "../server" } |
9 | ] | 12 | ] |
10 | } | 13 | } |
diff --git a/scripts/update-host.ts b/scripts/update-host.ts deleted file mode 100755 index 1d17ce152..000000000 --- a/scripts/update-host.ts +++ /dev/null | |||
@@ -1,140 +0,0 @@ | |||
1 | import { updateTorrentMetadata } from '@server/helpers/webtorrent' | ||
2 | import { getServerActor } from '@server/models/application/application' | ||
3 | import { WEBSERVER } from '../server/initializers/constants' | ||
4 | import { initDatabaseModels } from '../server/initializers/database' | ||
5 | import { | ||
6 | getLocalAccountActivityPubUrl, | ||
7 | getLocalVideoActivityPubUrl, | ||
8 | getLocalVideoAnnounceActivityPubUrl, | ||
9 | getLocalVideoChannelActivityPubUrl, | ||
10 | getLocalVideoCommentActivityPubUrl | ||
11 | } from '../server/lib/activitypub/url' | ||
12 | import { AccountModel } from '../server/models/account/account' | ||
13 | import { ActorModel } from '../server/models/actor/actor' | ||
14 | import { ActorFollowModel } from '../server/models/actor/actor-follow' | ||
15 | import { VideoModel } from '../server/models/video/video' | ||
16 | import { VideoChannelModel } from '../server/models/video/video-channel' | ||
17 | import { VideoCommentModel } from '../server/models/video/video-comment' | ||
18 | import { VideoShareModel } from '../server/models/video/video-share' | ||
19 | |||
20 | run() | ||
21 | .then(() => process.exit(0)) | ||
22 | .catch(err => { | ||
23 | console.error(err) | ||
24 | process.exit(-1) | ||
25 | }) | ||
26 | |||
27 | async function run () { | ||
28 | await initDatabaseModels(true) | ||
29 | |||
30 | const serverAccount = await getServerActor() | ||
31 | |||
32 | { | ||
33 | const res = await ActorFollowModel.listAcceptedFollowingUrlsForApi([ serverAccount.id ], undefined) | ||
34 | const hasFollowing = res.total > 0 | ||
35 | |||
36 | if (hasFollowing === true) { | ||
37 | throw new Error('Cannot update host because you follow other servers!') | ||
38 | } | ||
39 | } | ||
40 | |||
41 | console.log('Updating actors.') | ||
42 | |||
43 | const actors: ActorModel[] = await ActorModel.unscoped().findAll({ | ||
44 | include: [ | ||
45 | { | ||
46 | model: VideoChannelModel.unscoped(), | ||
47 | required: false | ||
48 | }, | ||
49 | { | ||
50 | model: AccountModel.unscoped(), | ||
51 | required: false | ||
52 | } | ||
53 | ] | ||
54 | }) | ||
55 | for (const actor of actors) { | ||
56 | if (actor.isOwned() === false) continue | ||
57 | |||
58 | console.log('Updating actor ' + actor.url) | ||
59 | |||
60 | const newUrl = actor.Account | ||
61 | ? getLocalAccountActivityPubUrl(actor.preferredUsername) | ||
62 | : getLocalVideoChannelActivityPubUrl(actor.preferredUsername) | ||
63 | |||
64 | actor.url = newUrl | ||
65 | actor.inboxUrl = newUrl + '/inbox' | ||
66 | actor.outboxUrl = newUrl + '/outbox' | ||
67 | actor.sharedInboxUrl = WEBSERVER.URL + '/inbox' | ||
68 | actor.followersUrl = newUrl + '/followers' | ||
69 | actor.followingUrl = newUrl + '/following' | ||
70 | |||
71 | await actor.save() | ||
72 | } | ||
73 | |||
74 | console.log('Updating video shares.') | ||
75 | |||
76 | const videoShares: VideoShareModel[] = await VideoShareModel.findAll({ | ||
77 | include: [ VideoModel.unscoped(), ActorModel.unscoped() ] | ||
78 | }) | ||
79 | for (const videoShare of videoShares) { | ||
80 | if (videoShare.Video.isOwned() === false) continue | ||
81 | |||
82 | console.log('Updating video share ' + videoShare.url) | ||
83 | |||
84 | videoShare.url = getLocalVideoAnnounceActivityPubUrl(videoShare.Actor, videoShare.Video) | ||
85 | await videoShare.save() | ||
86 | } | ||
87 | |||
88 | console.log('Updating video comments.') | ||
89 | const videoComments: VideoCommentModel[] = await VideoCommentModel.findAll({ | ||
90 | include: [ | ||
91 | { | ||
92 | model: VideoModel.unscoped() | ||
93 | }, | ||
94 | { | ||
95 | model: AccountModel.unscoped(), | ||
96 | include: [ | ||
97 | { | ||
98 | model: ActorModel.unscoped() | ||
99 | } | ||
100 | ] | ||
101 | } | ||
102 | ] | ||
103 | }) | ||
104 | for (const comment of videoComments) { | ||
105 | if (comment.isOwned() === false) continue | ||
106 | |||
107 | console.log('Updating comment ' + comment.url) | ||
108 | |||
109 | comment.url = getLocalVideoCommentActivityPubUrl(comment.Video, comment) | ||
110 | await comment.save() | ||
111 | } | ||
112 | |||
113 | console.log('Updating video and torrent files.') | ||
114 | |||
115 | const ids = await VideoModel.listLocalIds() | ||
116 | for (const id of ids) { | ||
117 | const video = await VideoModel.loadFull(id) | ||
118 | |||
119 | console.log('Updating video ' + video.uuid) | ||
120 | |||
121 | video.url = getLocalVideoActivityPubUrl(video) | ||
122 | await video.save() | ||
123 | |||
124 | for (const file of video.VideoFiles) { | ||
125 | console.log('Updating torrent file %s of video %s.', file.resolution, video.uuid) | ||
126 | await updateTorrentMetadata(video, file) | ||
127 | |||
128 | await file.save() | ||
129 | } | ||
130 | |||
131 | const playlist = video.getHLSPlaylist() | ||
132 | for (const file of (playlist?.VideoFiles || [])) { | ||
133 | console.log('Updating fragmented torrent file %s of video %s.', file.resolution, video.uuid) | ||
134 | |||
135 | await updateTorrentMetadata(playlist, file) | ||
136 | |||
137 | await file.save() | ||
138 | } | ||
139 | } | ||
140 | } | ||
diff --git a/scripts/upgrade.sh b/scripts/upgrade.sh deleted file mode 100755 index 64c7e1581..000000000 --- a/scripts/upgrade.sh +++ /dev/null | |||
@@ -1,108 +0,0 @@ | |||
1 | #!/bin/sh | ||
2 | |||
3 | set -eu | ||
4 | |||
5 | PEERTUBE_PATH=${1:-/var/www/peertube} | ||
6 | |||
7 | if [ ! -e "$PEERTUBE_PATH" ]; then | ||
8 | echo "Error - path \"$PEERTUBE_PATH\" wasn't found" | ||
9 | echo "" | ||
10 | echo "If peertube was installed in another path, you can specify it with" | ||
11 | echo " ./upgrade.sh <PATH>" | ||
12 | exit 1 | ||
13 | fi | ||
14 | |||
15 | if [ ! -e "$PEERTUBE_PATH/versions" -o ! -e "$PEERTUBE_PATH/config/production.yaml" ]; then | ||
16 | echo "Error - Couldn't find peertube installation in \"$PEERTUBE_PATH\"" | ||
17 | echo "" | ||
18 | echo "If peertube was installed in another path, you can specify it with" | ||
19 | echo " ./upgrade.sh <PATH>" | ||
20 | exit 1 | ||
21 | fi | ||
22 | |||
23 | if [ -x "$(command -v awk)" ] && [ -x "$(command -v sed)" ]; then | ||
24 | REMAINING=$(df -k $PEERTUBE_PATH | awk '{ print $4}' | sed -n 2p) | ||
25 | ONE_GB=$((1024 * 1024)) | ||
26 | |||
27 | if [ "$REMAINING" -lt "$ONE_GB" ]; then | ||
28 | echo "Error - not enough free space for upgrading" | ||
29 | echo "" | ||
30 | echo "Make sure you have at least 1 GB of free space in $PEERTUBE_PATH" | ||
31 | exit 1 | ||
32 | fi | ||
33 | fi | ||
34 | |||
35 | # Backup database | ||
36 | if [ -x "$(command -v pg_dump)" ]; then | ||
37 | mkdir -p $PEERTUBE_PATH/backup | ||
38 | |||
39 | SQL_BACKUP_PATH="$PEERTUBE_PATH/backup/sql-peertube_prod-$(date +"%Y%m%d-%H%M").bak" | ||
40 | |||
41 | echo "Backing up PostgreSQL database in $SQL_BACKUP_PATH" | ||
42 | |||
43 | DB_USER=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['username'])") | ||
44 | DB_PASS=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['password'])") | ||
45 | DB_HOST=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['hostname'])") | ||
46 | DB_PORT=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['port'])") | ||
47 | DB_SUFFIX=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['suffix'])") | ||
48 | DB_NAME=$(node -e "console.log(require('js-yaml').load(fs.readFileSync('$PEERTUBE_PATH/config/production.yaml', 'utf8'))['database']['name'] || '')") | ||
49 | |||
50 | PGPASSWORD=$DB_PASS pg_dump -U $DB_USER -p $DB_PORT -h $DB_HOST -F c "${DB_NAME:-peertube${DB_SUFFIX}}" -f "$SQL_BACKUP_PATH" | ||
51 | else | ||
52 | echo "pg_dump not found. Cannot make a SQL backup!" | ||
53 | fi | ||
54 | |||
55 | # If there is a pre-release, give the user a choice which one to install. | ||
56 | RELEASE_VERSION=$(curl -s https://api.github.com/repos/chocobozzz/peertube/releases/latest | grep tag_name | cut -d '"' -f 4) | ||
57 | PRE_RELEASE_VERSION=$(curl -s https://api.github.com/repos/chocobozzz/peertube/releases | grep tag_name | head -1 | cut -d '"' -f 4) | ||
58 | |||
59 | if [ "$RELEASE_VERSION" != "$PRE_RELEASE_VERSION" ]; then | ||
60 | echo -e "Which version do you want to install?\n[1] $RELEASE_VERSION (stable) \n[2] $PRE_RELEASE_VERSION (pre-release)" | ||
61 | read choice | ||
62 | case $choice in | ||
63 | [1]* ) VERSION="$RELEASE_VERSION";; | ||
64 | [2]* ) VERSION="$PRE_RELEASE_VERSION";; | ||
65 | * ) exit; | ||
66 | esac | ||
67 | else | ||
68 | VERSION="$RELEASE_VERSION" | ||
69 | fi | ||
70 | |||
71 | echo "Installing Peertube version $VERSION" | ||
72 | wget -q "https://github.com/Chocobozzz/PeerTube/releases/download/${VERSION}/peertube-${VERSION}.zip" -O "$PEERTUBE_PATH/versions/peertube-${VERSION}.zip" | ||
73 | cd $PEERTUBE_PATH/versions | ||
74 | unzip -o "peertube-${VERSION}.zip" | ||
75 | rm -f "peertube-${VERSION}.zip" | ||
76 | |||
77 | RELEASE_PAGE_URL="https://github.com/Chocobozzz/PeerTube/releases/tag/${VERSION}" | ||
78 | LATEST_VERSION_DIRECTORY="$PEERTUBE_PATH/versions/peertube-${VERSION}" | ||
79 | cd "$LATEST_VERSION_DIRECTORY" | ||
80 | |||
81 | # Launch yarn to check if we have all required dependencies | ||
82 | NOCLIENT=1 yarn install --production --pure-lockfile | ||
83 | |||
84 | # Switch to latest code version | ||
85 | rm -rf $PEERTUBE_PATH/peertube-latest | ||
86 | ln -s "$LATEST_VERSION_DIRECTORY" $PEERTUBE_PATH/peertube-latest | ||
87 | cp $PEERTUBE_PATH/peertube-latest/config/default.yaml $PEERTUBE_PATH/config/default.yaml | ||
88 | |||
89 | echo "" | ||
90 | echo "==========================================================" | ||
91 | echo "" | ||
92 | |||
93 | if [ -x "$(command -v git)" ]; then | ||
94 | cd /var/www/peertube | ||
95 | |||
96 | git merge-file -p config/production.yaml "$LATEST_VERSION_DIRECTORY/config/production.yaml.example" "peertube-latest/config/production.yaml.example" | tee "config/production.yaml.new" > /dev/null | ||
97 | echo "/var/www/peertube/config/production.yaml.new generated" | ||
98 | echo "You can review it and replace your existing production.yaml configuration" | ||
99 | else | ||
100 | echo "git command not found: unable to generate config/production.yaml.new configuration file based on your existing production.yaml configuration" | ||
101 | fi | ||
102 | |||
103 | echo "" | ||
104 | echo "==========================================================" | ||
105 | echo "" | ||
106 | echo "Please read the IMPORTANT NOTES on $RELEASE_PAGE_URL" | ||
107 | echo "" | ||
108 | echo "Then restart PeerTube!" | ||