1 import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT, WEBSERVER } from '../../initializers/constants'
2 import { doRequest } from '../../helpers/requests'
3 import { logger } from '../../helpers/logger'
4 import * as Bluebird from 'bluebird'
5 import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub'
6 import { parse } from 'url'
8 type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>)
9 type CleanerFunction = (startedDate: Date) => (Promise<any> | Bluebird<any>)
11 async function crawlCollectionPage <T> (uri: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) {
12 logger.info('Crawling ActivityPub data on %s.', uri)
19 timeout: JOB_REQUEST_TIMEOUT
22 const startDate = new Date()
24 const response = await doRequest<ActivityPubOrderedCollection<T>>(options)
25 const firstBody = response.body
27 let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
29 let nextLink = firstBody.first
30 while (nextLink && i < limit) {
33 if (typeof nextLink === 'string') {
34 // Don't crawl ourselves
35 const remoteHost = parse(nextLink).host
36 if (remoteHost === WEBSERVER.HOST) continue
38 options.uri = nextLink
40 const res = await doRequest<ActivityPubOrderedCollection<T>>(options)
43 // nextLink is already the object we want
50 if (Array.isArray(body.orderedItems)) {
51 const items = body.orderedItems
52 logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri)
58 if (cleaner) await cleaner(startDate)