aboutsummaryrefslogtreecommitdiffhomepage
path: root/server/lib/activitypub/crawl.ts
diff options
context:
space:
mode:
authorChocobozzz <me@florianbigard.com>2023-07-31 14:34:36 +0200
committerChocobozzz <me@florianbigard.com>2023-08-11 15:02:33 +0200
commit3a4992633ee62d5edfbb484d9c6bcb3cf158489d (patch)
treee4510b39bdac9c318fdb4b47018d08f15368b8f0 /server/lib/activitypub/crawl.ts
parent04d1da5621d25d59bd5fa1543b725c497bf5d9a8 (diff)
downloadPeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.gz
PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.zst
PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.zip
Migrate server to ESM
Sorry for the very big commit that may lead to git log issues and merge conflicts, but it's a major step forward: * Server can be faster at startup because imports() are async and we can easily lazy import big modules * Angular doesn't seem to support ES import (with .js extension), so we had to correctly organize peertube into a monorepo: * Use yarn workspace feature * Use typescript reference projects for dependencies * Shared projects have been moved into "packages", each one is now a node module (with a dedicated package.json/tsconfig.json) * server/tools have been moved into apps/ and is now a dedicated app bundled and published on NPM so users don't have to build peertube cli tools manually * server/tests have been moved into packages/ so we don't compile them every time we want to run the server * Use isolatedModule option: * Had to move from const enum to const (https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums) * Had to explictely specify "type" imports when used in decorators * Prefer tsx (that uses esbuild under the hood) instead of ts-node to load typescript files (tests with mocha or scripts): * To reduce test complexity as esbuild doesn't support decorator metadata, we only test server files that do not import server models * We still build tests files into js files for a faster CI * Remove unmaintained peertube CLI import script * Removed some barrels to speed up execution (less imports)
Diffstat (limited to 'server/lib/activitypub/crawl.ts')
-rw-r--r--server/lib/activitypub/crawl.ts58
1 files changed, 0 insertions, 58 deletions
diff --git a/server/lib/activitypub/crawl.ts b/server/lib/activitypub/crawl.ts
deleted file mode 100644
index b8348e8cf..000000000
--- a/server/lib/activitypub/crawl.ts
+++ /dev/null
@@ -1,58 +0,0 @@
1import Bluebird from 'bluebird'
2import { URL } from 'url'
3import { retryTransactionWrapper } from '@server/helpers/database-utils'
4import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub'
5import { logger } from '../../helpers/logger'
6import { ACTIVITY_PUB, WEBSERVER } from '../../initializers/constants'
7import { fetchAP } from './activity'
8
9type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>)
10type CleanerFunction = (startedDate: Date) => Promise<any>
11
12async function crawlCollectionPage <T> (argUrl: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) {
13 let url = argUrl
14
15 logger.info('Crawling ActivityPub data on %s.', url)
16
17 const startDate = new Date()
18
19 const response = await fetchAP<ActivityPubOrderedCollection<T>>(url)
20 const firstBody = response.body
21
22 const limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
23 let i = 0
24 let nextLink = firstBody.first
25 while (nextLink && i < limit) {
26 let body: any
27
28 if (typeof nextLink === 'string') {
29 // Don't crawl ourselves
30 const remoteHost = new URL(nextLink).host
31 if (remoteHost === WEBSERVER.HOST) continue
32
33 url = nextLink
34
35 const res = await fetchAP<ActivityPubOrderedCollection<T>>(url)
36 body = res.body
37 } else {
38 // nextLink is already the object we want
39 body = nextLink
40 }
41
42 nextLink = body.next
43 i++
44
45 if (Array.isArray(body.orderedItems)) {
46 const items = body.orderedItems
47 logger.info('Processing %i ActivityPub items for %s.', items.length, url)
48
49 await handler(items)
50 }
51 }
52
53 if (cleaner) await retryTransactionWrapper(cleaner, startDate)
54}
55
56export {
57 crawlCollectionPage
58}