]>
Commit | Line | Data |
---|---|---|
1 | import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT, WEBSERVER } from '../../initializers/constants' | |
2 | import { doRequest } from '../../helpers/requests' | |
3 | import { logger } from '../../helpers/logger' | |
4 | import * as Bluebird from 'bluebird' | |
5 | import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub' | |
6 | import { URL } from 'url' | |
7 | ||
8 | type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>) | |
9 | type CleanerFunction = (startedDate: Date) => (Promise<any> | Bluebird<any>) | |
10 | ||
11 | async function crawlCollectionPage <T> (uri: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) { | |
12 | logger.info('Crawling ActivityPub data on %s.', uri) | |
13 | ||
14 | const options = { | |
15 | method: 'GET', | |
16 | uri, | |
17 | json: true, | |
18 | activityPub: true, | |
19 | timeout: JOB_REQUEST_TIMEOUT | |
20 | } | |
21 | ||
22 | const startDate = new Date() | |
23 | ||
24 | const response = await doRequest<ActivityPubOrderedCollection<T>>(options) | |
25 | const firstBody = response.body | |
26 | ||
27 | const limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT | |
28 | let i = 0 | |
29 | let nextLink = firstBody.first | |
30 | while (nextLink && i < limit) { | |
31 | let body: any | |
32 | ||
33 | if (typeof nextLink === 'string') { | |
34 | // Don't crawl ourselves | |
35 | const remoteHost = new URL(nextLink).host | |
36 | if (remoteHost === WEBSERVER.HOST) continue | |
37 | ||
38 | options.uri = nextLink | |
39 | ||
40 | const res = await doRequest<ActivityPubOrderedCollection<T>>(options) | |
41 | body = res.body | |
42 | } else { | |
43 | // nextLink is already the object we want | |
44 | body = nextLink | |
45 | } | |
46 | ||
47 | nextLink = body.next | |
48 | i++ | |
49 | ||
50 | if (Array.isArray(body.orderedItems)) { | |
51 | const items = body.orderedItems | |
52 | logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri) | |
53 | ||
54 | await handler(items) | |
55 | } | |
56 | } | |
57 | ||
58 | if (cleaner) await cleaner(startDate) | |
59 | } | |
60 | ||
61 | export { | |
62 | crawlCollectionPage | |
63 | } |