Safely remove webtorrent files
Only remove them on max quality HLS playlist generation
This commit is contained in:
parent
77d7e851dc
commit
6939cbac48
|
@ -73,11 +73,11 @@ export class JobsComponent extends RestTable implements OnInit {
|
||||||
}
|
}
|
||||||
|
|
||||||
getColspan () {
|
getColspan () {
|
||||||
if (this.jobState === 'all' && this.hasProgress()) return 6
|
if (this.jobState === 'all' && this.hasProgress()) return 7
|
||||||
|
|
||||||
if (this.jobState === 'all' || this.hasProgress()) return 5
|
if (this.jobState === 'all' || this.hasProgress()) return 6
|
||||||
|
|
||||||
return 4
|
return 5
|
||||||
}
|
}
|
||||||
|
|
||||||
onJobStateOrTypeChanged () {
|
onJobStateOrTypeChanged () {
|
||||||
|
|
|
@ -93,7 +93,7 @@ async function handleHLSJob (job: Bull.Job, payload: HLSTranscodingPayload, vide
|
||||||
job
|
job
|
||||||
})
|
})
|
||||||
|
|
||||||
await retryTransactionWrapper(onHlsPlaylistGeneration, video)
|
await retryTransactionWrapper(onHlsPlaylistGeneration, video, payload.resolution)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleNewWebTorrentResolutionJob (
|
async function handleNewWebTorrentResolutionJob (
|
||||||
|
@ -121,11 +121,13 @@ async function handleWebTorrentOptimizeJob (job: Bull.Job, payload: OptimizeTran
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
async function onHlsPlaylistGeneration (video: MVideoFullLight) {
|
async function onHlsPlaylistGeneration (video: MVideoFullLight, resolution: number) {
|
||||||
if (video === undefined) return undefined
|
if (video === undefined) return undefined
|
||||||
|
|
||||||
// We generated the HLS playlist, we don't need the webtorrent files anymore if the admin disabled it
|
const maxQualityFile = video.getMaxQualityFile()
|
||||||
if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED === false) {
|
|
||||||
|
// We generated the max quality HLS playlist, we don't need the webtorrent files anymore if the admin disabled it
|
||||||
|
if (CONFIG.TRANSCODING.WEBTORRENT.ENABLED === false && video.hasWebTorrentFiles() && maxQualityFile.resolution === resolution) {
|
||||||
for (const file of video.VideoFiles) {
|
for (const file of video.VideoFiles) {
|
||||||
await video.removeFile(file)
|
await video.removeFile(file)
|
||||||
await file.destroy()
|
await file.destroy()
|
||||||
|
|
|
@ -1804,6 +1804,10 @@ export class VideoModel extends Model {
|
||||||
return Object.assign(file, { Video: this })
|
return Object.assign(file, { Video: this })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
hasWebTorrentFiles () {
|
||||||
|
return Array.isArray(this.VideoFiles) === true && this.VideoFiles.length !== 0
|
||||||
|
}
|
||||||
|
|
||||||
async addAndSaveThumbnail (thumbnail: MThumbnail, transaction: Transaction) {
|
async addAndSaveThumbnail (thumbnail: MThumbnail, transaction: Transaction) {
|
||||||
thumbnail.videoId = this.id
|
thumbnail.videoId = this.id
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,9 @@ import * as chai from 'chai'
|
||||||
import { FfprobeData } from 'fluent-ffmpeg'
|
import { FfprobeData } from 'fluent-ffmpeg'
|
||||||
import { omit } from 'lodash'
|
import { omit } from 'lodash'
|
||||||
import { join } from 'path'
|
import { join } from 'path'
|
||||||
|
import { Job } from '@shared/models'
|
||||||
import { VIDEO_TRANSCODING_FPS } from '../../../../server/initializers/constants'
|
import { VIDEO_TRANSCODING_FPS } from '../../../../server/initializers/constants'
|
||||||
|
import { HttpStatusCode } from '../../../../shared/core-utils/miscs/http-error-codes'
|
||||||
import {
|
import {
|
||||||
buildAbsoluteFixturePath,
|
buildAbsoluteFixturePath,
|
||||||
buildServerDirectory,
|
buildServerDirectory,
|
||||||
|
@ -14,6 +16,7 @@ import {
|
||||||
flushAndRunMultipleServers,
|
flushAndRunMultipleServers,
|
||||||
generateHighBitrateVideo,
|
generateHighBitrateVideo,
|
||||||
generateVideoWithFramerate,
|
generateVideoWithFramerate,
|
||||||
|
getJobsListPaginationAndSort,
|
||||||
getMyVideos,
|
getMyVideos,
|
||||||
getServerFileSize,
|
getServerFileSize,
|
||||||
getVideo,
|
getVideo,
|
||||||
|
@ -37,12 +40,12 @@ import {
|
||||||
getVideoFileFPS,
|
getVideoFileFPS,
|
||||||
getVideoFileResolution
|
getVideoFileResolution
|
||||||
} from '../../../helpers/ffprobe-utils'
|
} from '../../../helpers/ffprobe-utils'
|
||||||
import { HttpStatusCode } from '../../../../shared/core-utils/miscs/http-error-codes'
|
|
||||||
|
|
||||||
const expect = chai.expect
|
const expect = chai.expect
|
||||||
|
|
||||||
describe('Test video transcoding', function () {
|
describe('Test video transcoding', function () {
|
||||||
let servers: ServerInfo[] = []
|
let servers: ServerInfo[] = []
|
||||||
|
let video4k: string
|
||||||
|
|
||||||
before(async function () {
|
before(async function () {
|
||||||
this.timeout(30_000)
|
this.timeout(30_000)
|
||||||
|
@ -578,14 +581,14 @@ describe('Test video transcoding', function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
const resUpload = await uploadVideo(servers[1].url, servers[1].accessToken, videoAttributes)
|
const resUpload = await uploadVideo(servers[1].url, servers[1].accessToken, videoAttributes)
|
||||||
const videoUUID = resUpload.body.video.uuid
|
video4k = resUpload.body.video.uuid
|
||||||
|
|
||||||
await waitJobs(servers)
|
await waitJobs(servers)
|
||||||
|
|
||||||
const resolutions = [ 240, 360, 480, 720, 1080, 1440, 2160 ]
|
const resolutions = [ 240, 360, 480, 720, 1080, 1440, 2160 ]
|
||||||
|
|
||||||
for (const server of servers) {
|
for (const server of servers) {
|
||||||
const res = await getVideo(server.url, videoUUID)
|
const res = await getVideo(server.url, video4k)
|
||||||
const videoDetails: VideoDetails = res.body
|
const videoDetails: VideoDetails = res.body
|
||||||
|
|
||||||
expect(videoDetails.files).to.have.lengthOf(resolutions.length)
|
expect(videoDetails.files).to.have.lengthOf(resolutions.length)
|
||||||
|
@ -597,6 +600,41 @@ describe('Test video transcoding', function () {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('Should have the appropriate priorities for transcoding jobs', async function () {
|
||||||
|
const res = await getJobsListPaginationAndSort({
|
||||||
|
url: servers[1].url,
|
||||||
|
accessToken: servers[1].accessToken,
|
||||||
|
start: 0,
|
||||||
|
count: 100,
|
||||||
|
sort: '-createdAt',
|
||||||
|
jobType: 'video-transcoding'
|
||||||
|
})
|
||||||
|
|
||||||
|
const jobs = res.body.data as Job[]
|
||||||
|
|
||||||
|
const transcodingJobs = jobs.filter(j => j.data.videoUUID === video4k)
|
||||||
|
|
||||||
|
expect(transcodingJobs).to.have.lengthOf(14)
|
||||||
|
|
||||||
|
const hlsJobs = transcodingJobs.filter(j => j.data.type === 'new-resolution-to-hls')
|
||||||
|
const webtorrentJobs = transcodingJobs.filter(j => j.data.type === 'new-resolution-to-webtorrent')
|
||||||
|
const optimizeJobs = transcodingJobs.filter(j => j.data.type === 'optimize-to-webtorrent')
|
||||||
|
|
||||||
|
expect(hlsJobs).to.have.lengthOf(7)
|
||||||
|
expect(webtorrentJobs).to.have.lengthOf(6)
|
||||||
|
expect(optimizeJobs).to.have.lengthOf(1)
|
||||||
|
|
||||||
|
for (const j of optimizeJobs) {
|
||||||
|
expect(j.priority).to.be.greaterThan(11)
|
||||||
|
expect(j.priority).to.be.lessThan(50)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const j of hlsJobs.concat(webtorrentJobs)) {
|
||||||
|
expect(j.priority).to.be.greaterThan(100)
|
||||||
|
expect(j.priority).to.be.lessThan(150)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
after(async function () {
|
after(async function () {
|
||||||
await cleanupTests(servers)
|
await cleanupTests(servers)
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in New Issue
Block a user