X-Git-Url: https://git.immae.eu/?a=blobdiff_plain;f=server%2Flib%2Factivitypub%2Fcrawl.ts;h=278abf7de0b3a9637b380521dd9f78158535686f;hb=fd1b2d695320be5d86456c939b371b2e9b4f457b;hp=eeafdf4ba8d04848242020af795f8fb4388cda8b;hpb=a30a136c9896c656cab98d2c92cde32c534dc098;p=github%2FChocobozzz%2FPeerTube.git diff --git a/server/lib/activitypub/crawl.ts b/server/lib/activitypub/crawl.ts index eeafdf4ba..278abf7de 100644 --- a/server/lib/activitypub/crawl.ts +++ b/server/lib/activitypub/crawl.ts @@ -1,27 +1,26 @@ -import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT, WEBSERVER } from '../../initializers/constants' -import { doRequest } from '../../helpers/requests' -import { logger } from '../../helpers/logger' import * as Bluebird from 'bluebird' -import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub' import { URL } from 'url' +import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub' +import { logger } from '../../helpers/logger' +import { doJSONRequest } from '../../helpers/requests' +import { ACTIVITY_PUB, REQUEST_TIMEOUT, WEBSERVER } from '../../initializers/constants' type HandlerFunction = (items: T[]) => (Promise | Bluebird) type CleanerFunction = (startedDate: Date) => (Promise | Bluebird) -async function crawlCollectionPage (uri: string, handler: HandlerFunction, cleaner?: CleanerFunction) { - logger.info('Crawling ActivityPub data on %s.', uri) +async function crawlCollectionPage (argUrl: string, handler: HandlerFunction, cleaner?: CleanerFunction) { + let url = argUrl + + logger.info('Crawling ActivityPub data on %s.', url) const options = { - method: 'GET', - uri, - json: true, activityPub: true, - timeout: JOB_REQUEST_TIMEOUT + timeout: REQUEST_TIMEOUT } const startDate = new Date() - const response = await doRequest>(options) + const response = await doJSONRequest>(url, options) const firstBody = response.body const limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT @@ -35,9 +34,9 @@ async function crawlCollectionPage (uri: string, handler: HandlerFunction const remoteHost = new URL(nextLink).host if (remoteHost === WEBSERVER.HOST) continue - options.uri = nextLink + url = nextLink - const res = await doRequest>(options) + const res = await doJSONRequest>(url, options) body = res.body } else { // nextLink is already the object we want @@ -49,7 +48,7 @@ async function crawlCollectionPage (uri: string, handler: HandlerFunction if (Array.isArray(body.orderedItems)) { const items = body.orderedItems - logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri) + logger.info('Processing %i ActivityPub items for %s.', items.length, url) await handler(items) }