Split ffmpeg utils with ffprobe utils
This commit is contained in:
parent
123f619336
commit
daf6e48010
|
@ -5,7 +5,7 @@ import * as program from 'commander'
|
|||
import { VideoModel } from '../server/models/video/video'
|
||||
import { initDatabaseModels } from '../server/initializers/database'
|
||||
import { JobQueue } from '../server/lib/job-queue'
|
||||
import { computeResolutionsToTranscode } from '@server/helpers/ffmpeg-utils'
|
||||
import { computeResolutionsToTranscode } from '@server/helpers/ffprobe-utils'
|
||||
import { VideoTranscodingPayload } from '@shared/models'
|
||||
|
||||
program
|
||||
|
|
|
@ -2,7 +2,7 @@ import { registerTSPaths } from '../server/helpers/register-ts-paths'
|
|||
registerTSPaths()
|
||||
|
||||
import { VIDEO_TRANSCODING_FPS } from '../server/initializers/constants'
|
||||
import { getDurationFromVideoFile, getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../server/helpers/ffmpeg-utils'
|
||||
import { getDurationFromVideoFile, getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../server/helpers/ffprobe-utils'
|
||||
import { getMaxBitrate } from '../shared/models/videos'
|
||||
import { VideoModel } from '../server/models/video/video'
|
||||
import { optimizeOriginalVideofile } from '../server/lib/video-transcoding'
|
||||
|
|
|
@ -16,7 +16,7 @@ import { VideoFilter } from '../../../../shared/models/videos/video-query.type'
|
|||
import { auditLoggerFactory, getAuditIdFromRes, VideoAuditView } from '../../../helpers/audit-logger'
|
||||
import { resetSequelizeInstance } from '../../../helpers/database-utils'
|
||||
import { buildNSFWFilter, createReqFiles, getCountVideos } from '../../../helpers/express-utils'
|
||||
import { getMetadataFromFile, getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffmpeg-utils'
|
||||
import { getMetadataFromFile, getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffprobe-utils'
|
||||
import { logger } from '../../../helpers/logger'
|
||||
import { getFormattedObjects } from '../../../helpers/utils'
|
||||
import { CONFIG } from '../../../initializers/config'
|
||||
|
@ -195,7 +195,7 @@ async function addVideo (req: express.Request, res: express.Response) {
|
|||
extname: extname(videoPhysicalFile.filename),
|
||||
size: videoPhysicalFile.size,
|
||||
videoStreamingPlaylistId: null,
|
||||
metadata: await getMetadataFromFile<any>(videoPhysicalFile.path)
|
||||
metadata: await getMetadataFromFile(videoPhysicalFile.path)
|
||||
})
|
||||
|
||||
if (videoFile.isAudio()) {
|
||||
|
|
|
@ -1,201 +1,14 @@
|
|||
import * as ffmpeg from 'fluent-ffmpeg'
|
||||
import { readFile, remove, writeFile } from 'fs-extra'
|
||||
import { dirname, join } from 'path'
|
||||
import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata'
|
||||
import { getMaxBitrate, getTargetBitrate, VideoResolution } from '../../shared/models/videos'
|
||||
import { getTargetBitrate, VideoResolution } from '../../shared/models/videos'
|
||||
import { checkFFmpegEncoders } from '../initializers/checker-before-init'
|
||||
import { CONFIG } from '../initializers/config'
|
||||
import { FFMPEG_NICE, VIDEO_LIVE, VIDEO_TRANSCODING_FPS } from '../initializers/constants'
|
||||
import { getAudioStream, getClosestFramerateStandard, getMaxAudioBitrate, getVideoFileFPS } from './ffprobe-utils'
|
||||
import { processImage } from './image-utils'
|
||||
import { logger } from './logger'
|
||||
|
||||
/**
|
||||
* A toolbox to play with audio
|
||||
*/
|
||||
namespace audio {
|
||||
export const get = (videoPath: string) => {
|
||||
// without position, ffprobe considers the last input only
|
||||
// we make it consider the first input only
|
||||
// if you pass a file path to pos, then ffprobe acts on that file directly
|
||||
return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => {
|
||||
|
||||
function parseFfprobe (err: any, data: ffmpeg.FfprobeData) {
|
||||
if (err) return rej(err)
|
||||
|
||||
if ('streams' in data) {
|
||||
const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio')
|
||||
if (audioStream) {
|
||||
return res({
|
||||
absolutePath: data.format.filename,
|
||||
audioStream
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return res({ absolutePath: data.format.filename })
|
||||
}
|
||||
|
||||
return ffmpeg.ffprobe(videoPath, parseFfprobe)
|
||||
})
|
||||
}
|
||||
|
||||
export namespace bitrate {
|
||||
const baseKbitrate = 384
|
||||
|
||||
const toBits = (kbits: number) => kbits * 8000
|
||||
|
||||
export const aac = (bitrate: number): number => {
|
||||
switch (true) {
|
||||
case bitrate > toBits(baseKbitrate):
|
||||
return baseKbitrate
|
||||
|
||||
default:
|
||||
return -1 // we interpret it as a signal to copy the audio stream as is
|
||||
}
|
||||
}
|
||||
|
||||
export const mp3 = (bitrate: number): number => {
|
||||
/*
|
||||
a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac.
|
||||
That's why, when using aac, we can go to lower kbit/sec. The equivalences
|
||||
made here are not made to be accurate, especially with good mp3 encoders.
|
||||
*/
|
||||
switch (true) {
|
||||
case bitrate <= toBits(192):
|
||||
return 128
|
||||
|
||||
case bitrate <= toBits(384):
|
||||
return 256
|
||||
|
||||
default:
|
||||
return baseKbitrate
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function computeResolutionsToTranscode (videoFileResolution: number, type: 'vod' | 'live') {
|
||||
const configResolutions = type === 'vod'
|
||||
? CONFIG.TRANSCODING.RESOLUTIONS
|
||||
: CONFIG.LIVE.TRANSCODING.RESOLUTIONS
|
||||
|
||||
const resolutionsEnabled: number[] = []
|
||||
|
||||
// Put in the order we want to proceed jobs
|
||||
const resolutions = [
|
||||
VideoResolution.H_NOVIDEO,
|
||||
VideoResolution.H_480P,
|
||||
VideoResolution.H_360P,
|
||||
VideoResolution.H_720P,
|
||||
VideoResolution.H_240P,
|
||||
VideoResolution.H_1080P,
|
||||
VideoResolution.H_4K
|
||||
]
|
||||
|
||||
for (const resolution of resolutions) {
|
||||
if (configResolutions[resolution + 'p'] === true && videoFileResolution > resolution) {
|
||||
resolutionsEnabled.push(resolution)
|
||||
}
|
||||
}
|
||||
|
||||
return resolutionsEnabled
|
||||
}
|
||||
|
||||
async function getVideoStreamSize (path: string) {
|
||||
const videoStream = await getVideoStreamFromFile(path)
|
||||
|
||||
return videoStream === null
|
||||
? { width: 0, height: 0 }
|
||||
: { width: videoStream.width, height: videoStream.height }
|
||||
}
|
||||
|
||||
async function getVideoStreamCodec (path: string) {
|
||||
const videoStream = await getVideoStreamFromFile(path)
|
||||
|
||||
if (!videoStream) return ''
|
||||
|
||||
const videoCodec = videoStream.codec_tag_string
|
||||
|
||||
const baseProfileMatrix = {
|
||||
High: '6400',
|
||||
Main: '4D40',
|
||||
Baseline: '42E0'
|
||||
}
|
||||
|
||||
let baseProfile = baseProfileMatrix[videoStream.profile]
|
||||
if (!baseProfile) {
|
||||
logger.warn('Cannot get video profile codec of %s.', path, { videoStream })
|
||||
baseProfile = baseProfileMatrix['High'] // Fallback
|
||||
}
|
||||
|
||||
let level = videoStream.level.toString(16)
|
||||
if (level.length === 1) level = `0${level}`
|
||||
|
||||
return `${videoCodec}.${baseProfile}${level}`
|
||||
}
|
||||
|
||||
async function getAudioStreamCodec (path: string) {
|
||||
const { audioStream } = await audio.get(path)
|
||||
|
||||
if (!audioStream) return ''
|
||||
|
||||
const audioCodec = audioStream.codec_name
|
||||
if (audioCodec === 'aac') return 'mp4a.40.2'
|
||||
|
||||
logger.warn('Cannot get audio codec of %s.', path, { audioStream })
|
||||
|
||||
return 'mp4a.40.2' // Fallback
|
||||
}
|
||||
|
||||
async function getVideoFileResolution (path: string) {
|
||||
const size = await getVideoStreamSize(path)
|
||||
|
||||
return {
|
||||
videoFileResolution: Math.min(size.height, size.width),
|
||||
isPortraitMode: size.height > size.width
|
||||
}
|
||||
}
|
||||
|
||||
async function getVideoFileFPS (path: string) {
|
||||
const videoStream = await getVideoStreamFromFile(path)
|
||||
if (videoStream === null) return 0
|
||||
|
||||
for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) {
|
||||
const valuesText: string = videoStream[key]
|
||||
if (!valuesText) continue
|
||||
|
||||
const [ frames, seconds ] = valuesText.split('/')
|
||||
if (!frames || !seconds) continue
|
||||
|
||||
const result = parseInt(frames, 10) / parseInt(seconds, 10)
|
||||
if (result > 0) return Math.round(result)
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
async function getMetadataFromFile <T> (path: string, cb = metadata => metadata) {
|
||||
return new Promise<T>((res, rej) => {
|
||||
ffmpeg.ffprobe(path, (err, metadata) => {
|
||||
if (err) return rej(err)
|
||||
|
||||
return res(cb(new VideoFileMetadata(metadata)))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function getVideoFileBitrate (path: string) {
|
||||
return getMetadataFromFile<number>(path, metadata => metadata.format.bit_rate)
|
||||
}
|
||||
|
||||
function getDurationFromVideoFile (path: string) {
|
||||
return getMetadataFromFile<number>(path, metadata => Math.floor(metadata.format.duration))
|
||||
}
|
||||
|
||||
function getVideoStreamFromFile (path: string) {
|
||||
return getMetadataFromFile<any>(path, metadata => metadata.streams.find(s => s.codec_type === 'video') || null)
|
||||
}
|
||||
|
||||
async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) {
|
||||
const pendingImageName = 'pending-' + imageName
|
||||
|
||||
|
@ -228,6 +41,10 @@ async function generateImageFromVideoFile (fromPath: string, folder: string, ima
|
|||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Transcode meta function
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio'
|
||||
|
||||
interface BaseTranscodeOptions {
|
||||
|
@ -270,72 +87,27 @@ type TranscodeOptions =
|
|||
| OnlyAudioTranscodeOptions
|
||||
| QuickTranscodeOptions
|
||||
|
||||
function transcode (options: TranscodeOptions) {
|
||||
const builders: {
|
||||
[ type in TranscodeOptionsType ]: (c: ffmpeg.FfmpegCommand, o?: TranscodeOptions) => Promise<ffmpeg.FfmpegCommand> | ffmpeg.FfmpegCommand
|
||||
} = {
|
||||
'quick-transcode': buildQuickTranscodeCommand,
|
||||
'hls': buildHLSVODCommand,
|
||||
'merge-audio': buildAudioMergeCommand,
|
||||
'only-audio': buildOnlyAudioCommand,
|
||||
'video': buildx264Command
|
||||
}
|
||||
|
||||
async function transcode (options: TranscodeOptions) {
|
||||
logger.debug('Will run transcode.', { options })
|
||||
|
||||
return new Promise<void>(async (res, rej) => {
|
||||
try {
|
||||
let command = getFFmpeg(options.inputPath)
|
||||
.output(options.outputPath)
|
||||
let command = getFFmpeg(options.inputPath)
|
||||
.output(options.outputPath)
|
||||
|
||||
if (options.type === 'quick-transcode') {
|
||||
command = buildQuickTranscodeCommand(command)
|
||||
} else if (options.type === 'hls') {
|
||||
command = await buildHLSVODCommand(command, options)
|
||||
} else if (options.type === 'merge-audio') {
|
||||
command = await buildAudioMergeCommand(command, options)
|
||||
} else if (options.type === 'only-audio') {
|
||||
command = buildOnlyAudioCommand(command, options)
|
||||
} else {
|
||||
command = await buildx264Command(command, options)
|
||||
}
|
||||
command = await builders[options.type](command, options)
|
||||
|
||||
command
|
||||
.on('error', (err, stdout, stderr) => {
|
||||
logger.error('Error in transcoding job.', { stdout, stderr })
|
||||
return rej(err)
|
||||
})
|
||||
.on('end', () => {
|
||||
return fixHLSPlaylistIfNeeded(options)
|
||||
.then(() => res())
|
||||
.catch(err => rej(err))
|
||||
})
|
||||
.run()
|
||||
} catch (err) {
|
||||
return rej(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
await runCommand(command)
|
||||
|
||||
async function canDoQuickTranscode (path: string): Promise<boolean> {
|
||||
// NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway)
|
||||
const videoStream = await getVideoStreamFromFile(path)
|
||||
const parsedAudio = await audio.get(path)
|
||||
const fps = await getVideoFileFPS(path)
|
||||
const bitRate = await getVideoFileBitrate(path)
|
||||
const resolution = await getVideoFileResolution(path)
|
||||
|
||||
// check video params
|
||||
if (videoStream == null) return false
|
||||
if (videoStream['codec_name'] !== 'h264') return false
|
||||
if (videoStream['pix_fmt'] !== 'yuv420p') return false
|
||||
if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false
|
||||
if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false
|
||||
|
||||
// check audio params (if audio stream exists)
|
||||
if (parsedAudio.audioStream) {
|
||||
if (parsedAudio.audioStream['codec_name'] !== 'aac') return false
|
||||
|
||||
const maxAudioBitrate = audio.bitrate['aac'](parsedAudio.audioStream['bit_rate'])
|
||||
if (maxAudioBitrate !== -1 && parsedAudio.audioStream['bit_rate'] > maxAudioBitrate) return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number {
|
||||
return VIDEO_TRANSCODING_FPS[type].slice(0)
|
||||
.sort((a, b) => fps % a - fps % b)[0]
|
||||
await fixHLSPlaylistIfNeeded(options)
|
||||
}
|
||||
|
||||
function convertWebPToJPG (path: string, destination: string): Promise<void> {
|
||||
|
@ -484,12 +256,11 @@ async function hlsPlaylistToFragmentedMP4 (hlsDirectory: string, segmentFiles: s
|
|||
}
|
||||
|
||||
async function runCommand (command: ffmpeg.FfmpegCommand, onEnd?: Function) {
|
||||
command.run()
|
||||
|
||||
return new Promise<string>((res, rej) => {
|
||||
command.on('error', err => {
|
||||
command.on('error', (err, stdout, stderr) => {
|
||||
if (onEnd) onEnd()
|
||||
|
||||
logger.error('Error in transcoding job.', { stdout, stderr })
|
||||
rej(err)
|
||||
})
|
||||
|
||||
|
@ -498,32 +269,23 @@ async function runCommand (command: ffmpeg.FfmpegCommand, onEnd?: Function) {
|
|||
|
||||
res()
|
||||
})
|
||||
|
||||
command.run()
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
getVideoStreamCodec,
|
||||
getAudioStreamCodec,
|
||||
runLiveMuxing,
|
||||
convertWebPToJPG,
|
||||
processGIF,
|
||||
getVideoStreamSize,
|
||||
getVideoFileResolution,
|
||||
getMetadataFromFile,
|
||||
getDurationFromVideoFile,
|
||||
runLiveTranscoding,
|
||||
generateImageFromVideoFile,
|
||||
TranscodeOptions,
|
||||
TranscodeOptionsType,
|
||||
transcode,
|
||||
getVideoFileFPS,
|
||||
computeResolutionsToTranscode,
|
||||
audio,
|
||||
hlsPlaylistToFragmentedMP4,
|
||||
getVideoFileBitrate,
|
||||
canDoQuickTranscode
|
||||
hlsPlaylistToFragmentedMP4
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
@ -595,7 +357,7 @@ async function buildAudioMergeCommand (command: ffmpeg.FfmpegCommand, options: M
|
|||
return command
|
||||
}
|
||||
|
||||
function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, options: OnlyAudioTranscodeOptions) {
|
||||
function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, _options: OnlyAudioTranscodeOptions) {
|
||||
command = presetOnlyAudio(command)
|
||||
|
||||
return command
|
||||
|
@ -684,7 +446,7 @@ async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolut
|
|||
|
||||
addDefaultX264Params(localCommand)
|
||||
|
||||
const parsedAudio = await audio.get(input)
|
||||
const parsedAudio = await getAudioStream(input)
|
||||
|
||||
if (!parsedAudio.audioStream) {
|
||||
localCommand = localCommand.noAudio()
|
||||
|
@ -699,22 +461,16 @@ async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolut
|
|||
|
||||
const audioCodecName = parsedAudio.audioStream['codec_name']
|
||||
|
||||
if (audio.bitrate[audioCodecName]) {
|
||||
const bitrate = audio.bitrate[audioCodecName](parsedAudio.audioStream['bit_rate'])
|
||||
if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate)
|
||||
}
|
||||
const bitrate = getMaxAudioBitrate(audioCodecName, parsedAudio.bitrate)
|
||||
|
||||
if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate)
|
||||
}
|
||||
|
||||
if (fps) {
|
||||
// Constrained Encoding (VBV)
|
||||
// https://slhck.info/video/2017/03/01/rate-control.html
|
||||
// https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate
|
||||
let targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS)
|
||||
|
||||
// Don't transcode to an higher bitrate than the original file
|
||||
const fileBitrate = await getVideoFileBitrate(input)
|
||||
targetBitrate = Math.min(targetBitrate, fileBitrate)
|
||||
|
||||
const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS)
|
||||
localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ])
|
||||
|
||||
// Keyframe interval of 2 seconds for faster seeking and resolution switching.
|
||||
|
|
|
@ -0,0 +1,249 @@
|
|||
import * as ffmpeg from 'fluent-ffmpeg'
|
||||
import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata'
|
||||
import { getMaxBitrate, VideoResolution } from '../../shared/models/videos'
|
||||
import { CONFIG } from '../initializers/config'
|
||||
import { VIDEO_TRANSCODING_FPS } from '../initializers/constants'
|
||||
import { logger } from './logger'
|
||||
|
||||
function ffprobePromise (path: string) {
|
||||
return new Promise<ffmpeg.FfprobeData>((res, rej) => {
|
||||
ffmpeg.ffprobe(path, (err, data) => {
|
||||
if (err) return rej(err)
|
||||
|
||||
return res(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function getAudioStream (videoPath: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
// without position, ffprobe considers the last input only
|
||||
// we make it consider the first input only
|
||||
// if you pass a file path to pos, then ffprobe acts on that file directly
|
||||
const data = existingProbe || await ffprobePromise(videoPath)
|
||||
|
||||
if (Array.isArray(data.streams)) {
|
||||
const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio')
|
||||
|
||||
if (audioStream) {
|
||||
return {
|
||||
absolutePath: data.format.filename,
|
||||
audioStream,
|
||||
bitrate: parseInt(audioStream['bit_rate'] + '', 10)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { absolutePath: data.format.filename }
|
||||
}
|
||||
|
||||
function getMaxAudioBitrate (type: 'aac' | 'mp3' | string, bitrate: number) {
|
||||
const baseKbitrate = 384
|
||||
const toBits = (kbits: number) => kbits * 8000
|
||||
|
||||
if (type === 'aac') {
|
||||
switch (true) {
|
||||
case bitrate > toBits(baseKbitrate):
|
||||
return baseKbitrate
|
||||
|
||||
default:
|
||||
return -1 // we interpret it as a signal to copy the audio stream as is
|
||||
}
|
||||
}
|
||||
|
||||
if (type === 'mp3') {
|
||||
/*
|
||||
a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac.
|
||||
That's why, when using aac, we can go to lower kbit/sec. The equivalences
|
||||
made here are not made to be accurate, especially with good mp3 encoders.
|
||||
*/
|
||||
switch (true) {
|
||||
case bitrate <= toBits(192):
|
||||
return 128
|
||||
|
||||
case bitrate <= toBits(384):
|
||||
return 256
|
||||
|
||||
default:
|
||||
return baseKbitrate
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
async function getVideoStreamSize (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const videoStream = await getVideoStreamFromFile(path, existingProbe)
|
||||
|
||||
return videoStream === null
|
||||
? { width: 0, height: 0 }
|
||||
: { width: videoStream.width, height: videoStream.height }
|
||||
}
|
||||
|
||||
async function getVideoStreamCodec (path: string) {
|
||||
const videoStream = await getVideoStreamFromFile(path)
|
||||
|
||||
if (!videoStream) return ''
|
||||
|
||||
const videoCodec = videoStream.codec_tag_string
|
||||
|
||||
const baseProfileMatrix = {
|
||||
High: '6400',
|
||||
Main: '4D40',
|
||||
Baseline: '42E0'
|
||||
}
|
||||
|
||||
let baseProfile = baseProfileMatrix[videoStream.profile]
|
||||
if (!baseProfile) {
|
||||
logger.warn('Cannot get video profile codec of %s.', path, { videoStream })
|
||||
baseProfile = baseProfileMatrix['High'] // Fallback
|
||||
}
|
||||
|
||||
let level = videoStream.level.toString(16)
|
||||
if (level.length === 1) level = `0${level}`
|
||||
|
||||
return `${videoCodec}.${baseProfile}${level}`
|
||||
}
|
||||
|
||||
async function getAudioStreamCodec (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const { audioStream } = await getAudioStream(path, existingProbe)
|
||||
|
||||
if (!audioStream) return ''
|
||||
|
||||
const audioCodec = audioStream.codec_name
|
||||
if (audioCodec === 'aac') return 'mp4a.40.2'
|
||||
|
||||
logger.warn('Cannot get audio codec of %s.', path, { audioStream })
|
||||
|
||||
return 'mp4a.40.2' // Fallback
|
||||
}
|
||||
|
||||
async function getVideoFileResolution (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const size = await getVideoStreamSize(path, existingProbe)
|
||||
|
||||
return {
|
||||
videoFileResolution: Math.min(size.height, size.width),
|
||||
isPortraitMode: size.height > size.width
|
||||
}
|
||||
}
|
||||
|
||||
async function getVideoFileFPS (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const videoStream = await getVideoStreamFromFile(path, existingProbe)
|
||||
if (videoStream === null) return 0
|
||||
|
||||
for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) {
|
||||
const valuesText: string = videoStream[key]
|
||||
if (!valuesText) continue
|
||||
|
||||
const [ frames, seconds ] = valuesText.split('/')
|
||||
if (!frames || !seconds) continue
|
||||
|
||||
const result = parseInt(frames, 10) / parseInt(seconds, 10)
|
||||
if (result > 0) return Math.round(result)
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
async function getMetadataFromFile (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const metadata = existingProbe || await ffprobePromise(path)
|
||||
|
||||
return new VideoFileMetadata(metadata)
|
||||
}
|
||||
|
||||
async function getVideoFileBitrate (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const metadata = await getMetadataFromFile(path, existingProbe)
|
||||
|
||||
return metadata.format.bit_rate as number
|
||||
}
|
||||
|
||||
async function getDurationFromVideoFile (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const metadata = await getMetadataFromFile(path, existingProbe)
|
||||
|
||||
return Math.floor(metadata.format.duration)
|
||||
}
|
||||
|
||||
async function getVideoStreamFromFile (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
||||
const metadata = await getMetadataFromFile(path, existingProbe)
|
||||
|
||||
return metadata.streams.find(s => s.codec_type === 'video') || null
|
||||
}
|
||||
|
||||
function computeResolutionsToTranscode (videoFileResolution: number, type: 'vod' | 'live') {
|
||||
const configResolutions = type === 'vod'
|
||||
? CONFIG.TRANSCODING.RESOLUTIONS
|
||||
: CONFIG.LIVE.TRANSCODING.RESOLUTIONS
|
||||
|
||||
const resolutionsEnabled: number[] = []
|
||||
|
||||
// Put in the order we want to proceed jobs
|
||||
const resolutions = [
|
||||
VideoResolution.H_NOVIDEO,
|
||||
VideoResolution.H_480P,
|
||||
VideoResolution.H_360P,
|
||||
VideoResolution.H_720P,
|
||||
VideoResolution.H_240P,
|
||||
VideoResolution.H_1080P,
|
||||
VideoResolution.H_4K
|
||||
]
|
||||
|
||||
for (const resolution of resolutions) {
|
||||
if (configResolutions[resolution + 'p'] === true && videoFileResolution > resolution) {
|
||||
resolutionsEnabled.push(resolution)
|
||||
}
|
||||
}
|
||||
|
||||
return resolutionsEnabled
|
||||
}
|
||||
|
||||
async function canDoQuickTranscode (path: string): Promise<boolean> {
|
||||
const probe = await ffprobePromise(path)
|
||||
|
||||
// NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway)
|
||||
const videoStream = await getVideoStreamFromFile(path, probe)
|
||||
const parsedAudio = await getAudioStream(path, probe)
|
||||
const fps = await getVideoFileFPS(path, probe)
|
||||
const bitRate = await getVideoFileBitrate(path, probe)
|
||||
const resolution = await getVideoFileResolution(path, probe)
|
||||
|
||||
// check video params
|
||||
if (videoStream == null) return false
|
||||
if (videoStream['codec_name'] !== 'h264') return false
|
||||
if (videoStream['pix_fmt'] !== 'yuv420p') return false
|
||||
if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false
|
||||
if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false
|
||||
|
||||
// check audio params (if audio stream exists)
|
||||
if (parsedAudio.audioStream) {
|
||||
if (parsedAudio.audioStream['codec_name'] !== 'aac') return false
|
||||
|
||||
const audioBitrate = parsedAudio.bitrate
|
||||
|
||||
const maxAudioBitrate = getMaxAudioBitrate('aac', audioBitrate)
|
||||
if (maxAudioBitrate !== -1 && audioBitrate > maxAudioBitrate) return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number {
|
||||
return VIDEO_TRANSCODING_FPS[type].slice(0)
|
||||
.sort((a, b) => fps % a - fps % b)[0]
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
getVideoStreamCodec,
|
||||
getAudioStreamCodec,
|
||||
getVideoStreamSize,
|
||||
getVideoFileResolution,
|
||||
getMetadataFromFile,
|
||||
getMaxAudioBitrate,
|
||||
getDurationFromVideoFile,
|
||||
getAudioStream,
|
||||
getVideoFileFPS,
|
||||
getClosestFramerateStandard,
|
||||
computeResolutionsToTranscode,
|
||||
getVideoFileBitrate,
|
||||
canDoQuickTranscode
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import * as Sequelize from 'sequelize'
|
||||
import { join } from 'path'
|
||||
import { CONFIG } from '../../initializers/config'
|
||||
import { getVideoFileResolution } from '../../helpers/ffmpeg-utils'
|
||||
import { getVideoFileResolution } from '../../helpers/ffprobe-utils'
|
||||
import { readdir, rename } from 'fs-extra'
|
||||
|
||||
function up (utils: {
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import { basename, dirname, join } from 'path'
|
||||
import { HLS_STREAMING_PLAYLIST_DIRECTORY, P2P_MEDIA_LOADER_PEER_VERSION } from '../initializers/constants'
|
||||
import { close, ensureDir, move, open, outputJSON, pathExists, read, readFile, remove, writeFile } from 'fs-extra'
|
||||
import { getVideoStreamSize, getAudioStreamCodec, getVideoStreamCodec } from '../helpers/ffmpeg-utils'
|
||||
import { flatten, uniq } from 'lodash'
|
||||
import { basename, dirname, join } from 'path'
|
||||
import { MVideoWithFile } from '@server/types/models'
|
||||
import { sha256 } from '../helpers/core-utils'
|
||||
import { VideoStreamingPlaylistModel } from '../models/video/video-streaming-playlist'
|
||||
import { getAudioStreamCodec, getVideoStreamCodec, getVideoStreamSize } from '../helpers/ffprobe-utils'
|
||||
import { logger } from '../helpers/logger'
|
||||
import { doRequest, doRequestAndSaveToFile } from '../helpers/requests'
|
||||
import { generateRandomString } from '../helpers/utils'
|
||||
import { flatten, uniq } from 'lodash'
|
||||
import { VideoFileModel } from '../models/video/video-file'
|
||||
import { CONFIG } from '../initializers/config'
|
||||
import { HLS_STREAMING_PLAYLIST_DIRECTORY, P2P_MEDIA_LOADER_PEER_VERSION } from '../initializers/constants'
|
||||
import { sequelizeTypescript } from '../initializers/database'
|
||||
import { MVideoWithFile } from '@server/types/models'
|
||||
import { VideoFileModel } from '../models/video/video-file'
|
||||
import { VideoStreamingPlaylistModel } from '../models/video/video-streaming-playlist'
|
||||
import { getVideoFilename, getVideoFilePath } from './video-paths'
|
||||
|
||||
async function updateStreamingPlaylistsInfohashesIfNeeded () {
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import * as Bull from 'bull'
|
||||
import { logger } from '../../../helpers/logger'
|
||||
import { VideoModel } from '../../../models/video/video'
|
||||
import { publishNewResolutionIfNeeded } from './video-transcoding'
|
||||
import { getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffmpeg-utils'
|
||||
import { copy, stat } from 'fs-extra'
|
||||
import { VideoFileModel } from '../../../models/video/video-file'
|
||||
import { extname } from 'path'
|
||||
import { MVideoFile, MVideoWithFile } from '@server/types/models'
|
||||
import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent'
|
||||
import { getVideoFilePath } from '@server/lib/video-paths'
|
||||
import { MVideoFile, MVideoWithFile } from '@server/types/models'
|
||||
import { VideoFileImportPayload } from '@shared/models'
|
||||
import { getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffprobe-utils'
|
||||
import { logger } from '../../../helpers/logger'
|
||||
import { VideoModel } from '../../../models/video/video'
|
||||
import { VideoFileModel } from '../../../models/video/video-file'
|
||||
import { publishNewResolutionIfNeeded } from './video-transcoding'
|
||||
|
||||
async function processVideoFileImport (job: Bull.Job) {
|
||||
const payload = job.data as VideoFileImportPayload
|
||||
|
|
|
@ -17,7 +17,7 @@ import {
|
|||
} from '../../../../shared'
|
||||
import { VideoImportState } from '../../../../shared/models/videos'
|
||||
import { ThumbnailType } from '../../../../shared/models/videos/thumbnail.type'
|
||||
import { getDurationFromVideoFile, getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffmpeg-utils'
|
||||
import { getDurationFromVideoFile, getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffprobe-utils'
|
||||
import { logger } from '../../../helpers/logger'
|
||||
import { getSecureTorrentName } from '../../../helpers/utils'
|
||||
import { createTorrentAndSetInfoHash, downloadWebTorrentVideo } from '../../../helpers/webtorrent'
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import * as Bull from 'bull'
|
||||
import { readdir, remove } from 'fs-extra'
|
||||
import { join } from 'path'
|
||||
import { getDurationFromVideoFile, getVideoFileResolution, hlsPlaylistToFragmentedMP4 } from '@server/helpers/ffmpeg-utils'
|
||||
import { hlsPlaylistToFragmentedMP4 } from '@server/helpers/ffmpeg-utils'
|
||||
import { getDurationFromVideoFile, getVideoFileResolution } from '@server/helpers/ffprobe-utils'
|
||||
import { generateVideoMiniature } from '@server/lib/thumbnail'
|
||||
import { publishAndFederateIfNeeded } from '@server/lib/video'
|
||||
import { getHLSDirectory } from '@server/lib/video-paths'
|
||||
import { generateHlsPlaylist } from '@server/lib/video-transcoding'
|
||||
|
@ -12,7 +14,6 @@ import { VideoStreamingPlaylistModel } from '@server/models/video/video-streamin
|
|||
import { MStreamingPlaylist, MVideo, MVideoLive } from '@server/types/models'
|
||||
import { ThumbnailType, VideoLiveEndingPayload, VideoState } from '@shared/models'
|
||||
import { logger } from '../../../helpers/logger'
|
||||
import { generateVideoMiniature } from '@server/lib/thumbnail'
|
||||
|
||||
async function processVideoLiveEnding (job: Bull.Job) {
|
||||
const payload = job.data as VideoLiveEndingPayload
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
VideoTranscodingPayload
|
||||
} from '../../../../shared'
|
||||
import { retryTransactionWrapper } from '../../../helpers/database-utils'
|
||||
import { computeResolutionsToTranscode } from '../../../helpers/ffmpeg-utils'
|
||||
import { computeResolutionsToTranscode } from '../../../helpers/ffprobe-utils'
|
||||
import { logger } from '../../../helpers/logger'
|
||||
import { CONFIG } from '../../../initializers/config'
|
||||
import { sequelizeTypescript } from '../../../initializers/database'
|
||||
|
|
|
@ -4,13 +4,8 @@ import { FfmpegCommand } from 'fluent-ffmpeg'
|
|||
import { ensureDir, stat } from 'fs-extra'
|
||||
import { basename } from 'path'
|
||||
import { isTestInstance } from '@server/helpers/core-utils'
|
||||
import {
|
||||
computeResolutionsToTranscode,
|
||||
getVideoFileFPS,
|
||||
getVideoFileResolution,
|
||||
runLiveMuxing,
|
||||
runLiveTranscoding
|
||||
} from '@server/helpers/ffmpeg-utils'
|
||||
import { runLiveMuxing, runLiveTranscoding } from '@server/helpers/ffmpeg-utils'
|
||||
import { computeResolutionsToTranscode, getVideoFileFPS, getVideoFileResolution } from '@server/helpers/ffprobe-utils'
|
||||
import { logger } from '@server/helpers/logger'
|
||||
import { CONFIG, registerConfigChangedHandler } from '@server/initializers/config'
|
||||
import { MEMOIZE_TTL, P2P_MEDIA_LOADER_PEER_VERSION, VIDEO_LIVE, VIEW_LIFETIME, WEBSERVER } from '@server/initializers/constants'
|
||||
|
|
|
@ -4,15 +4,8 @@ import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent'
|
|||
import { MStreamingPlaylistFilesVideo, MVideoFile, MVideoWithAllFiles, MVideoWithFile } from '@server/types/models'
|
||||
import { VideoResolution } from '../../shared/models/videos'
|
||||
import { VideoStreamingPlaylistType } from '../../shared/models/videos/video-streaming-playlist.type'
|
||||
import {
|
||||
canDoQuickTranscode,
|
||||
getDurationFromVideoFile,
|
||||
getMetadataFromFile,
|
||||
getVideoFileFPS,
|
||||
transcode,
|
||||
TranscodeOptions,
|
||||
TranscodeOptionsType
|
||||
} from '../helpers/ffmpeg-utils'
|
||||
import { transcode, TranscodeOptions, TranscodeOptionsType } from '../helpers/ffmpeg-utils'
|
||||
import { canDoQuickTranscode, getDurationFromVideoFile, getMetadataFromFile, getVideoFileFPS } from '../helpers/ffprobe-utils'
|
||||
import { logger } from '../helpers/logger'
|
||||
import { CONFIG } from '../initializers/config'
|
||||
import { HLS_STREAMING_PLAYLIST_DIRECTORY, P2P_MEDIA_LOADER_PEER_VERSION, WEBSERVER } from '../initializers/constants'
|
||||
|
|
|
@ -34,7 +34,7 @@ import {
|
|||
isVideoTagsValid
|
||||
} from '../../../helpers/custom-validators/videos'
|
||||
import { cleanUpReqFiles } from '../../../helpers/express-utils'
|
||||
import { getDurationFromVideoFile } from '../../../helpers/ffmpeg-utils'
|
||||
import { getDurationFromVideoFile } from '../../../helpers/ffprobe-utils'
|
||||
import { logger } from '../../../helpers/logger'
|
||||
import {
|
||||
checkUserCanManageVideo,
|
||||
|
|
|
@ -51,7 +51,7 @@ import {
|
|||
isVideoStateValid,
|
||||
isVideoSupportValid
|
||||
} from '../../helpers/custom-validators/videos'
|
||||
import { getVideoFileResolution } from '../../helpers/ffmpeg-utils'
|
||||
import { getVideoFileResolution } from '../../helpers/ffprobe-utils'
|
||||
import { logger } from '../../helpers/logger'
|
||||
import { CONFIG } from '../../initializers/config'
|
||||
import {
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import * as chai from 'chai'
|
||||
import 'mocha'
|
||||
import * as chai from 'chai'
|
||||
import { join } from 'path'
|
||||
import { getAudioStream, getVideoStreamSize } from '@server/helpers/ffprobe-utils'
|
||||
import {
|
||||
cleanupTests,
|
||||
doubleFollow,
|
||||
|
@ -14,8 +16,6 @@ import {
|
|||
waitJobs
|
||||
} from '../../../../shared/extra-utils'
|
||||
import { VideoDetails } from '../../../../shared/models/videos'
|
||||
import { join } from 'path'
|
||||
import { audio, getVideoStreamSize } from '@server/helpers/ffmpeg-utils'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
|
@ -85,7 +85,7 @@ describe('Test audio only video transcoding', function () {
|
|||
]
|
||||
|
||||
for (const path of paths) {
|
||||
const { audioStream } = await audio.get(path)
|
||||
const { audioStream } = await getAudioStream(path)
|
||||
expect(audioStream['codec_name']).to.be.equal('aac')
|
||||
expect(audioStream['bit_rate']).to.be.at.most(384 * 8000)
|
||||
|
||||
|
|
|
@ -1,17 +1,12 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import * as chai from 'chai'
|
||||
import 'mocha'
|
||||
import * as chai from 'chai'
|
||||
import { FfprobeData } from 'fluent-ffmpeg'
|
||||
import { omit } from 'lodash'
|
||||
import { getMaxBitrate, VideoDetails, VideoResolution, VideoState } from '../../../../shared/models/videos'
|
||||
import {
|
||||
audio,
|
||||
canDoQuickTranscode,
|
||||
getVideoFileBitrate,
|
||||
getVideoFileFPS,
|
||||
getVideoFileResolution,
|
||||
getMetadataFromFile
|
||||
} from '../../../helpers/ffmpeg-utils'
|
||||
import { join } from 'path'
|
||||
|
||||
import { VIDEO_TRANSCODING_FPS } from '../../../../server/initializers/constants'
|
||||
import {
|
||||
buildAbsoluteFixturePath,
|
||||
cleanupTests,
|
||||
|
@ -29,14 +24,20 @@ import {
|
|||
ServerInfo,
|
||||
setAccessTokensToServers,
|
||||
updateCustomSubConfig,
|
||||
uploadVideo, uploadVideoAndGetId,
|
||||
uploadVideo,
|
||||
uploadVideoAndGetId,
|
||||
waitJobs,
|
||||
webtorrentAdd
|
||||
} from '../../../../shared/extra-utils'
|
||||
import { join } from 'path'
|
||||
import { VIDEO_TRANSCODING_FPS } from '../../../../server/initializers/constants'
|
||||
import { FfprobeData } from 'fluent-ffmpeg'
|
||||
import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata'
|
||||
import { getMaxBitrate, VideoDetails, VideoResolution, VideoState } from '../../../../shared/models/videos'
|
||||
import {
|
||||
canDoQuickTranscode,
|
||||
getAudioStream,
|
||||
getMetadataFromFile,
|
||||
getVideoFileBitrate,
|
||||
getVideoFileFPS,
|
||||
getVideoFileResolution
|
||||
} from '../../../helpers/ffprobe-utils'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
|
@ -136,7 +137,7 @@ describe('Test video transcoding', function () {
|
|||
expect(videoDetails.files).to.have.lengthOf(4)
|
||||
|
||||
const path = join(root(), 'test' + servers[1].internalServerNumber, 'videos', video.uuid + '-240.mp4')
|
||||
const probe = await audio.get(path)
|
||||
const probe = await getAudioStream(path)
|
||||
|
||||
if (probe.audioStream) {
|
||||
expect(probe.audioStream['codec_name']).to.be.equal('aac')
|
||||
|
@ -167,7 +168,7 @@ describe('Test video transcoding', function () {
|
|||
|
||||
expect(videoDetails.files).to.have.lengthOf(4)
|
||||
const path = join(root(), 'test' + servers[1].internalServerNumber, 'videos', video.uuid + '-240.mp4')
|
||||
const probe = await audio.get(path)
|
||||
const probe = await getAudioStream(path)
|
||||
expect(probe).to.not.have.property('audioStream')
|
||||
}
|
||||
})
|
||||
|
@ -192,9 +193,9 @@ describe('Test video transcoding', function () {
|
|||
|
||||
expect(videoDetails.files).to.have.lengthOf(4)
|
||||
const fixturePath = buildAbsoluteFixturePath(videoAttributes.fixture)
|
||||
const fixtureVideoProbe = await audio.get(fixturePath)
|
||||
const fixtureVideoProbe = await getAudioStream(fixturePath)
|
||||
const path = join(root(), 'test' + servers[1].internalServerNumber, 'videos', video.uuid + '-240.mp4')
|
||||
const videoProbe = await audio.get(path)
|
||||
const videoProbe = await getAudioStream(path)
|
||||
if (videoProbe.audioStream && fixtureVideoProbe.audioStream) {
|
||||
const toOmit = [ 'max_bit_rate', 'duration', 'duration_ts', 'nb_frames', 'start_time', 'start_pts' ]
|
||||
expect(omit(videoProbe.audioStream, toOmit)).to.be.deep.equal(omit(fixtureVideoProbe.audioStream, toOmit))
|
||||
|
@ -513,7 +514,7 @@ describe('Test video transcoding', function () {
|
|||
|
||||
{
|
||||
const path = join(root(), 'test' + servers[1].internalServerNumber, 'videos', videoUUID + '-240.mp4')
|
||||
const metadata = await getMetadataFromFile<VideoFileMetadata>(path)
|
||||
const metadata = await getMetadataFromFile(path)
|
||||
|
||||
// expected format properties
|
||||
for (const p of [
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import 'mocha'
|
||||
import * as chai from 'chai'
|
||||
import { getMaxBitrate, Video, VideoDetails, VideoResolution } from '../../../shared/models/videos'
|
||||
import { join } from 'path'
|
||||
import {
|
||||
cleanupTests,
|
||||
doubleFollow,
|
||||
|
@ -20,9 +20,9 @@ import {
|
|||
wait
|
||||
} from '../../../shared/extra-utils'
|
||||
import { waitJobs } from '../../../shared/extra-utils/server/jobs'
|
||||
import { getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../../helpers/ffmpeg-utils'
|
||||
import { getMaxBitrate, Video, VideoDetails, VideoResolution } from '../../../shared/models/videos'
|
||||
import { getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../../helpers/ffprobe-utils'
|
||||
import { VIDEO_TRANSCODING_FPS } from '../../initializers/constants'
|
||||
import { join } from 'path'
|
||||
|
||||
const expect = chai.expect
|
||||
|
||||
|
|
Loading…
Reference in New Issue