PeerTube_original/server/lib/activitypub/crawl.ts
Felix Ableitner edb4ffc7e0 Set bitrate limits for transcoding (fixes #638) (#1135)
* Set bitrate limits for transcoding (fixes #638)

* added optimization script and test, changed stuff

* fix test, improve docs

* re-add optimize-old-videos script

* added documentation

* Don't optimize videos without valid UUID, or redundancy videos

* move getUUIDFromFilename

* fix tests?

* update torrent and file size, some more fixes/improvements

* use higher bitrate for high fps video, adjust bitrates

* add test video

* don't throw error if resolution is undefined

* generate test fixture on the fly

* use random noise video for bitrate test, add promise

* shorten test video to avoid timeout

* use existing function to optimize video

* various fixes

* increase test timeout

* limit test fixture size, add link

* test fixes

* add await

* more test fixes, add -b:v parameter

* replace ffmpeg wiki link

* fix ffmpeg params

* fix unit test

* add test fixture to .gitgnore

* add video transcoding fps model

* add missing file
2018-10-08 16:26:04 +02:00

42 lines
1.0 KiB
TypeScript

import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT } from '../../initializers'
import { doRequest } from '../../helpers/requests'
import { logger } from '../../helpers/logger'
import * as Bluebird from 'bluebird'
async function crawlCollectionPage <T> (uri: string, handler: (items: T[]) => Promise<any> | Bluebird<any>) {
logger.info('Crawling ActivityPub data on %s.', uri)
const options = {
method: 'GET',
uri,
json: true,
activityPub: true,
timeout: JOB_REQUEST_TIMEOUT
}
const response = await doRequest(options)
const firstBody = response.body
let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
let i = 0
let nextLink = firstBody.first
while (nextLink && i < limit) {
options.uri = nextLink
const { body } = await doRequest(options)
nextLink = body.next
i++
if (Array.isArray(body.orderedItems)) {
const items = body.orderedItems
logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri)
await handler(items)
}
}
}
export {
crawlCollectionPage
}