X-Git-Url: https://git.immae.eu/?a=blobdiff_plain;f=server%2Flib%2Factivitypub%2Fcrawl.ts;h=336129b822013a0e3e01786069d6923005d1de27;hb=e1a570abff3ebf375433e58e7362d56bd32d4cd8;hp=55912341cb82aaba55c43fa27ad8d502d2771543;hpb=f6eebcb336c067e160a62020a5140d8d992ba384;p=github%2FChocobozzz%2FPeerTube.git diff --git a/server/lib/activitypub/crawl.ts b/server/lib/activitypub/crawl.ts index 55912341c..336129b82 100644 --- a/server/lib/activitypub/crawl.ts +++ b/server/lib/activitypub/crawl.ts @@ -1,39 +1,58 @@ -import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT } from '../../initializers' -import { doRequest } from '../../helpers/requests' +import Bluebird from 'bluebird' +import { URL } from 'url' +import { retryTransactionWrapper } from '@server/helpers/database-utils' +import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub' import { logger } from '../../helpers/logger' -import Bluebird = require('bluebird') +import { doJSONRequest } from '../../helpers/requests' +import { ACTIVITY_PUB, WEBSERVER } from '../../initializers/constants' -async function crawlCollectionPage (uri: string, handler: (items: T[]) => Promise | Bluebird) { - logger.info('Crawling ActivityPub data on %s.', uri) +type HandlerFunction = (items: T[]) => (Promise | Bluebird) +type CleanerFunction = (startedDate: Date) => Promise - const options = { - method: 'GET', - uri, - json: true, - activityPub: true, - timeout: JOB_REQUEST_TIMEOUT - } +async function crawlCollectionPage (argUrl: string, handler: HandlerFunction, cleaner?: CleanerFunction) { + let url = argUrl + + logger.info('Crawling ActivityPub data on %s.', url) + + const options = { activityPub: true } - const response = await doRequest(options) + const startDate = new Date() + + const response = await doJSONRequest>(url, options) const firstBody = response.body - let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT + const limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT let i = 0 let nextLink = firstBody.first while (nextLink && i < limit) { - options.uri = nextLink + let body: any + + if (typeof nextLink === 'string') { + // Don't crawl ourselves + const remoteHost = new URL(nextLink).host + if (remoteHost === WEBSERVER.HOST) continue + + url = nextLink + + const res = await doJSONRequest>(url, options) + body = res.body + } else { + // nextLink is already the object we want + body = nextLink + } - const { body } = await doRequest(options) nextLink = body.next i++ if (Array.isArray(body.orderedItems)) { const items = body.orderedItems - logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri) + logger.info('Processing %i ActivityPub items for %s.', items.length, url) await handler(items) } } + + if (cleaner) await retryTransactionWrapper(cleaner, startDate) } export {