]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/lib/activitypub/crawl.ts
Merge branch 'master' into release/3.3.0
[github/Chocobozzz/PeerTube.git] / server / lib / activitypub / crawl.ts
1 import * as Bluebird from 'bluebird'
2 import { URL } from 'url'
3 import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub'
4 import { logger } from '../../helpers/logger'
5 import { doJSONRequest } from '../../helpers/requests'
6 import { ACTIVITY_PUB, WEBSERVER } from '../../initializers/constants'
7
8 type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>)
9 type CleanerFunction = (startedDate: Date) => (Promise<any> | Bluebird<any>)
10
11 async function crawlCollectionPage <T> (argUrl: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) {
12 let url = argUrl
13
14 logger.info('Crawling ActivityPub data on %s.', url)
15
16 const options = { activityPub: true }
17
18 const startDate = new Date()
19
20 const response = await doJSONRequest<ActivityPubOrderedCollection<T>>(url, options)
21 const firstBody = response.body
22
23 const limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
24 let i = 0
25 let nextLink = firstBody.first
26 while (nextLink && i < limit) {
27 let body: any
28
29 if (typeof nextLink === 'string') {
30 // Don't crawl ourselves
31 const remoteHost = new URL(nextLink).host
32 if (remoteHost === WEBSERVER.HOST) continue
33
34 url = nextLink
35
36 const res = await doJSONRequest<ActivityPubOrderedCollection<T>>(url, options)
37 body = res.body
38 } else {
39 // nextLink is already the object we want
40 body = nextLink
41 }
42
43 nextLink = body.next
44 i++
45
46 if (Array.isArray(body.orderedItems)) {
47 const items = body.orderedItems
48 logger.info('Processing %i ActivityPub items for %s.', items.length, url)
49
50 await handler(items)
51 }
52 }
53
54 if (cleaner) await cleaner(startDate)
55 }
56
57 export {
58 crawlCollectionPage
59 }