PeerTube/server/lib/activitypub/crawl.ts

50 lines
1.4 KiB
TypeScript
Raw Normal View History

import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT } from '../../initializers'
import { doRequest } from '../../helpers/requests'
import { logger } from '../../helpers/logger'
import * as Bluebird from 'bluebird'
2018-11-14 08:01:28 -06:00
import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub'
2019-03-19 10:23:02 -05:00
type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>)
type CleanerFunction = (startedDate: Date) => (Promise<any> | Bluebird<any>)
async function crawlCollectionPage <T> (uri: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) {
logger.info('Crawling ActivityPub data on %s.', uri)
const options = {
method: 'GET',
uri,
json: true,
activityPub: true,
timeout: JOB_REQUEST_TIMEOUT
}
2019-03-19 10:23:02 -05:00
const startDate = new Date()
2018-11-14 08:01:28 -06:00
const response = await doRequest<ActivityPubOrderedCollection<T>>(options)
const firstBody = response.body
let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
let i = 0
let nextLink = firstBody.first
while (nextLink && i < limit) {
options.uri = nextLink
2018-11-14 08:01:28 -06:00
const { body } = await doRequest<ActivityPubOrderedCollection<T>>(options)
nextLink = body.next
i++
if (Array.isArray(body.orderedItems)) {
const items = body.orderedItems
logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri)
await handler(items)
}
}
2019-03-19 10:23:02 -05:00
if (cleaner) await cleaner(startDate)
}
export {
crawlCollectionPage
}