]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/lib/activitypub/crawl.ts
686eef04d9d0f4b77c5cdae05debe5c878320463
[github/Chocobozzz/PeerTube.git] / server / lib / activitypub / crawl.ts
1 import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT, WEBSERVER } from '../../initializers/constants'
2 import { doRequest } from '../../helpers/requests'
3 import { logger } from '../../helpers/logger'
4 import * as Bluebird from 'bluebird'
5 import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub'
6 import { parse } from 'url'
7
8 type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>)
9 type CleanerFunction = (startedDate: Date) => (Promise<any> | Bluebird<any>)
10
11 async function crawlCollectionPage <T> (uri: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) {
12 logger.info('Crawling ActivityPub data on %s.', uri)
13
14 const options = {
15 method: 'GET',
16 uri,
17 json: true,
18 activityPub: true,
19 timeout: JOB_REQUEST_TIMEOUT
20 }
21
22 const startDate = new Date()
23
24 const response = await doRequest<ActivityPubOrderedCollection<T>>(options)
25 const firstBody = response.body
26
27 let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
28 let i = 0
29 let nextLink = firstBody.first
30 while (nextLink && i < limit) {
31 // Don't crawl ourselves
32 const remoteHost = parse(nextLink).host
33 if (remoteHost === WEBSERVER.HOST) continue
34
35 options.uri = nextLink
36
37 const { body } = await doRequest<ActivityPubOrderedCollection<T>>(options)
38 nextLink = body.next
39 i++
40
41 if (Array.isArray(body.orderedItems)) {
42 const items = body.orderedItems
43 logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri)
44
45 await handler(items)
46 }
47 }
48
49 if (cleaner) await cleaner(startDate)
50 }
51
52 export {
53 crawlCollectionPage
54 }