2017-10-09 04:06:13 -05:00
|
|
|
import * as ffmpeg from 'fluent-ffmpeg'
|
2019-01-29 01:37:25 -06:00
|
|
|
import { dirname, join } from 'path'
|
2020-01-31 09:56:52 -06:00
|
|
|
import { getMaxBitrate, getTargetBitrate, VideoResolution } from '../../shared/models/videos'
|
2019-04-11 04:33:44 -05:00
|
|
|
import { FFMPEG_NICE, VIDEO_TRANSCODING_FPS } from '../initializers/constants'
|
2018-02-27 04:29:24 -06:00
|
|
|
import { processImage } from './image-utils'
|
2018-02-27 06:46:56 -06:00
|
|
|
import { logger } from './logger'
|
2018-09-24 06:07:33 -05:00
|
|
|
import { checkFFmpegEncoders } from '../initializers/checker-before-init'
|
2019-04-11 04:33:44 -05:00
|
|
|
import { readFile, remove, writeFile } from 'fs-extra'
|
|
|
|
import { CONFIG } from '../initializers/config'
|
2020-03-10 08:39:40 -05:00
|
|
|
import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata'
|
2017-10-09 04:06:13 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
/**
|
|
|
|
* A toolbox to play with audio
|
|
|
|
*/
|
|
|
|
namespace audio {
|
|
|
|
export const get = (videoPath: string) => {
|
|
|
|
// without position, ffprobe considers the last input only
|
|
|
|
// we make it consider the first input only
|
|
|
|
// if you pass a file path to pos, then ffprobe acts on that file directly
|
|
|
|
return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => {
|
|
|
|
|
|
|
|
function parseFfprobe (err: any, data: ffmpeg.FfprobeData) {
|
|
|
|
if (err) return rej(err)
|
|
|
|
|
|
|
|
if ('streams' in data) {
|
|
|
|
const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio')
|
|
|
|
if (audioStream) {
|
|
|
|
return res({
|
|
|
|
absolutePath: data.format.filename,
|
|
|
|
audioStream
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return res({ absolutePath: data.format.filename })
|
|
|
|
}
|
|
|
|
|
|
|
|
return ffmpeg.ffprobe(videoPath, parseFfprobe)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
export namespace bitrate {
|
|
|
|
const baseKbitrate = 384
|
|
|
|
|
|
|
|
const toBits = (kbits: number) => kbits * 8000
|
|
|
|
|
|
|
|
export const aac = (bitrate: number): number => {
|
|
|
|
switch (true) {
|
|
|
|
case bitrate > toBits(baseKbitrate):
|
|
|
|
return baseKbitrate
|
|
|
|
|
|
|
|
default:
|
|
|
|
return -1 // we interpret it as a signal to copy the audio stream as is
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export const mp3 = (bitrate: number): number => {
|
|
|
|
/*
|
|
|
|
a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac.
|
|
|
|
That's why, when using aac, we can go to lower kbit/sec. The equivalences
|
|
|
|
made here are not made to be accurate, especially with good mp3 encoders.
|
|
|
|
*/
|
|
|
|
switch (true) {
|
|
|
|
case bitrate <= toBits(192):
|
|
|
|
return 128
|
|
|
|
|
|
|
|
case bitrate <= toBits(384):
|
|
|
|
return 256
|
|
|
|
|
|
|
|
default:
|
|
|
|
return baseKbitrate
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-14 08:28:30 -05:00
|
|
|
function computeResolutionsToTranscode (videoFileHeight: number) {
|
|
|
|
const resolutionsEnabled: number[] = []
|
|
|
|
const configResolutions = CONFIG.TRANSCODING.RESOLUTIONS
|
|
|
|
|
|
|
|
// Put in the order we want to proceed jobs
|
|
|
|
const resolutions = [
|
2019-10-31 20:06:19 -05:00
|
|
|
VideoResolution.H_NOVIDEO,
|
2018-08-14 08:28:30 -05:00
|
|
|
VideoResolution.H_480P,
|
|
|
|
VideoResolution.H_360P,
|
|
|
|
VideoResolution.H_720P,
|
|
|
|
VideoResolution.H_240P,
|
2019-06-06 07:45:57 -05:00
|
|
|
VideoResolution.H_1080P,
|
|
|
|
VideoResolution.H_4K
|
2018-08-14 08:28:30 -05:00
|
|
|
]
|
|
|
|
|
|
|
|
for (const resolution of resolutions) {
|
2020-01-31 09:56:52 -06:00
|
|
|
if (configResolutions[resolution + 'p'] === true && videoFileHeight > resolution) {
|
2018-08-14 08:28:30 -05:00
|
|
|
resolutionsEnabled.push(resolution)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return resolutionsEnabled
|
|
|
|
}
|
|
|
|
|
2019-11-26 09:25:36 -06:00
|
|
|
async function getVideoStreamSize (path: string) {
|
2019-04-24 13:27:05 -05:00
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
2018-02-27 08:57:28 -06:00
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
return videoStream === null
|
|
|
|
? { width: 0, height: 0 }
|
|
|
|
: { width: videoStream.width, height: videoStream.height }
|
2019-01-29 01:37:25 -06:00
|
|
|
}
|
|
|
|
|
2019-11-26 09:25:36 -06:00
|
|
|
async function getVideoStreamCodec (path: string) {
|
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
|
|
|
|
|
|
|
if (!videoStream) return ''
|
|
|
|
|
|
|
|
const videoCodec = videoStream.codec_tag_string
|
|
|
|
|
|
|
|
const baseProfileMatrix = {
|
2020-01-31 09:56:52 -06:00
|
|
|
High: '6400',
|
|
|
|
Main: '4D40',
|
|
|
|
Baseline: '42E0'
|
2019-11-26 09:25:36 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
let baseProfile = baseProfileMatrix[videoStream.profile]
|
|
|
|
if (!baseProfile) {
|
|
|
|
logger.warn('Cannot get video profile codec of %s.', path, { videoStream })
|
|
|
|
baseProfile = baseProfileMatrix['High'] // Fallback
|
|
|
|
}
|
|
|
|
|
2020-04-03 07:06:31 -05:00
|
|
|
let level = videoStream.level.toString(16)
|
|
|
|
if (level.length === 1) level = `0${level}`
|
2019-11-26 09:25:36 -06:00
|
|
|
|
|
|
|
return `${videoCodec}.${baseProfile}${level}`
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getAudioStreamCodec (path: string) {
|
|
|
|
const { audioStream } = await audio.get(path)
|
|
|
|
|
|
|
|
if (!audioStream) return ''
|
|
|
|
|
|
|
|
const audioCodec = audioStream.codec_name
|
2019-11-26 09:36:48 -06:00
|
|
|
if (audioCodec === 'aac') return 'mp4a.40.2'
|
2019-11-26 09:25:36 -06:00
|
|
|
|
|
|
|
logger.warn('Cannot get audio codec of %s.', path, { audioStream })
|
|
|
|
|
|
|
|
return 'mp4a.40.2' // Fallback
|
|
|
|
}
|
|
|
|
|
2019-01-29 01:37:25 -06:00
|
|
|
async function getVideoFileResolution (path: string) {
|
2019-11-26 09:25:36 -06:00
|
|
|
const size = await getVideoStreamSize(path)
|
2019-01-29 01:37:25 -06:00
|
|
|
|
|
|
|
return {
|
|
|
|
videoFileResolution: Math.min(size.height, size.width),
|
|
|
|
isPortraitMode: size.height > size.width
|
2018-02-27 08:57:28 -06:00
|
|
|
}
|
2018-02-26 03:48:53 -06:00
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
|
2018-02-26 03:48:53 -06:00
|
|
|
async function getVideoFileFPS (path: string) {
|
2019-04-24 13:27:05 -05:00
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
2019-11-22 04:43:17 -06:00
|
|
|
if (videoStream === null) return 0
|
2019-10-31 20:06:19 -05:00
|
|
|
|
2019-01-17 07:03:32 -06:00
|
|
|
for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) {
|
2020-01-31 09:56:52 -06:00
|
|
|
const valuesText: string = videoStream[key]
|
2018-02-26 03:48:53 -06:00
|
|
|
if (!valuesText) continue
|
|
|
|
|
|
|
|
const [ frames, seconds ] = valuesText.split('/')
|
|
|
|
if (!frames || !seconds) continue
|
|
|
|
|
|
|
|
const result = parseInt(frames, 10) / parseInt(seconds, 10)
|
2018-06-29 09:41:29 -05:00
|
|
|
if (result > 0) return Math.round(result)
|
2018-02-26 03:48:53 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
return 0
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2020-03-10 08:49:02 -05:00
|
|
|
async function getMetadataFromFile <T> (path: string, cb = metadata => metadata) {
|
2020-03-10 08:39:40 -05:00
|
|
|
return new Promise<T>((res, rej) => {
|
2018-10-08 09:26:04 -05:00
|
|
|
ffmpeg.ffprobe(path, (err, metadata) => {
|
|
|
|
if (err) return rej(err)
|
|
|
|
|
2020-03-10 08:39:40 -05:00
|
|
|
return res(cb(new VideoFileMetadata(metadata)))
|
2018-10-08 09:26:04 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-03-10 08:39:40 -05:00
|
|
|
async function getVideoFileBitrate (path: string) {
|
|
|
|
return getMetadataFromFile<number>(path, metadata => metadata.format.bit_rate)
|
|
|
|
}
|
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
function getDurationFromVideoFile (path: string) {
|
2020-03-10 08:39:40 -05:00
|
|
|
return getMetadataFromFile<number>(path, metadata => Math.floor(metadata.format.duration))
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
|
2020-03-10 08:39:40 -05:00
|
|
|
function getVideoStreamFromFile (path: string) {
|
|
|
|
return getMetadataFromFile<any>(path, metadata => metadata.streams.find(s => s.codec_type === 'video') || null)
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2018-02-27 04:29:24 -06:00
|
|
|
async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) {
|
|
|
|
const pendingImageName = 'pending-' + imageName
|
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
const options = {
|
2018-02-27 04:29:24 -06:00
|
|
|
filename: pendingImageName,
|
2017-10-09 04:06:13 -05:00
|
|
|
count: 1,
|
|
|
|
folder
|
|
|
|
}
|
|
|
|
|
2018-02-27 04:29:24 -06:00
|
|
|
const pendingImagePath = join(folder, pendingImageName)
|
2018-02-27 06:46:56 -06:00
|
|
|
|
|
|
|
try {
|
|
|
|
await new Promise<string>((res, rej) => {
|
2018-07-27 19:59:01 -05:00
|
|
|
ffmpeg(fromPath, { niceness: FFMPEG_NICE.THUMBNAIL })
|
2018-02-27 06:46:56 -06:00
|
|
|
.on('error', rej)
|
|
|
|
.on('end', () => res(imageName))
|
|
|
|
.thumbnail(options)
|
|
|
|
})
|
|
|
|
|
|
|
|
const destination = join(folder, imageName)
|
2019-04-24 02:56:25 -05:00
|
|
|
await processImage(pendingImagePath, destination, size)
|
2018-02-27 06:46:56 -06:00
|
|
|
} catch (err) {
|
2018-03-26 08:54:13 -05:00
|
|
|
logger.error('Cannot generate image from video %s.', fromPath, { err })
|
2018-02-27 06:46:56 -06:00
|
|
|
|
|
|
|
try {
|
2018-08-27 09:23:34 -05:00
|
|
|
await remove(pendingImagePath)
|
2018-02-27 06:46:56 -06:00
|
|
|
} catch (err) {
|
2018-03-26 08:54:13 -05:00
|
|
|
logger.debug('Cannot remove pending image path after generation error.', { err })
|
2018-02-27 06:46:56 -06:00
|
|
|
}
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio'
|
2019-05-16 09:55:34 -05:00
|
|
|
|
|
|
|
interface BaseTranscodeOptions {
|
|
|
|
type: TranscodeOptionsType
|
2017-10-09 04:06:13 -05:00
|
|
|
inputPath: string
|
|
|
|
outputPath: string
|
2019-01-29 01:37:25 -06:00
|
|
|
resolution: VideoResolution
|
2018-02-27 08:57:28 -06:00
|
|
|
isPortraitMode?: boolean
|
2019-05-16 09:55:34 -05:00
|
|
|
}
|
2019-01-29 01:37:25 -06:00
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
interface HLSTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'hls'
|
2019-11-15 08:06:03 -06:00
|
|
|
copyCodecs: boolean
|
2019-05-16 09:55:34 -05:00
|
|
|
hlsPlaylist: {
|
2019-02-07 08:08:19 -06:00
|
|
|
videoFilename: string
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
interface QuickTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'quick-transcode'
|
|
|
|
}
|
|
|
|
|
|
|
|
interface VideoTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'video'
|
|
|
|
}
|
|
|
|
|
|
|
|
interface MergeAudioTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'merge-audio'
|
|
|
|
audioPath: string
|
|
|
|
}
|
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
interface OnlyAudioTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'only-audio'
|
2019-10-31 20:06:19 -05:00
|
|
|
}
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
type TranscodeOptions =
|
|
|
|
HLSTranscodeOptions
|
2019-11-22 04:43:17 -06:00
|
|
|
| VideoTranscodeOptions
|
|
|
|
| MergeAudioTranscodeOptions
|
|
|
|
| OnlyAudioTranscodeOptions
|
|
|
|
| QuickTranscodeOptions
|
2019-05-16 09:55:34 -05:00
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
function transcode (options: TranscodeOptions) {
|
2018-02-26 03:48:53 -06:00
|
|
|
return new Promise<void>(async (res, rej) => {
|
2018-10-18 02:44:43 -05:00
|
|
|
try {
|
|
|
|
let command = ffmpeg(options.inputPath, { niceness: FFMPEG_NICE.TRANSCODING })
|
|
|
|
.output(options.outputPath)
|
2019-04-08 03:03:23 -05:00
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
if (options.type === 'quick-transcode') {
|
2020-01-31 09:56:52 -06:00
|
|
|
command = buildQuickTranscodeCommand(command)
|
2019-05-16 09:55:34 -05:00
|
|
|
} else if (options.type === 'hls') {
|
2019-04-08 03:03:23 -05:00
|
|
|
command = await buildHLSCommand(command, options)
|
2019-05-16 09:55:34 -05:00
|
|
|
} else if (options.type === 'merge-audio') {
|
|
|
|
command = await buildAudioMergeCommand(command, options)
|
2019-11-22 04:43:17 -06:00
|
|
|
} else if (options.type === 'only-audio') {
|
2020-01-31 09:56:52 -06:00
|
|
|
command = buildOnlyAudioCommand(command, options)
|
2019-04-08 03:03:23 -05:00
|
|
|
} else {
|
|
|
|
command = await buildx264Command(command, options)
|
|
|
|
}
|
2018-07-27 19:59:01 -05:00
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
if (CONFIG.TRANSCODING.THREADS > 0) {
|
|
|
|
// if we don't set any threads ffmpeg will chose automatically
|
|
|
|
command = command.outputOption('-threads ' + CONFIG.TRANSCODING.THREADS)
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
command
|
|
|
|
.on('error', (err, stdout, stderr) => {
|
|
|
|
logger.error('Error in transcoding job.', { stdout, stderr })
|
|
|
|
return rej(err)
|
|
|
|
})
|
2019-02-11 10:20:28 -06:00
|
|
|
.on('end', () => {
|
2019-05-16 09:55:34 -05:00
|
|
|
return fixHLSPlaylistIfNeeded(options)
|
2019-02-11 10:20:28 -06:00
|
|
|
.then(() => res())
|
|
|
|
.catch(err => rej(err))
|
|
|
|
})
|
2018-10-18 02:44:43 -05:00
|
|
|
.run()
|
|
|
|
} catch (err) {
|
|
|
|
return rej(err)
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-05-03 20:18:32 -05:00
|
|
|
async function canDoQuickTranscode (path: string): Promise<boolean> {
|
2019-04-24 13:27:05 -05:00
|
|
|
// NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway)
|
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
|
|
|
const parsedAudio = await audio.get(path)
|
|
|
|
const fps = await getVideoFileFPS(path)
|
|
|
|
const bitRate = await getVideoFileBitrate(path)
|
|
|
|
const resolution = await getVideoFileResolution(path)
|
|
|
|
|
|
|
|
// check video params
|
2019-10-31 20:06:19 -05:00
|
|
|
if (videoStream == null) return false
|
2020-01-31 09:56:52 -06:00
|
|
|
if (videoStream['codec_name'] !== 'h264') return false
|
|
|
|
if (videoStream['pix_fmt'] !== 'yuv420p') return false
|
2019-05-16 01:58:39 -05:00
|
|
|
if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false
|
|
|
|
if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false
|
2019-04-24 13:27:05 -05:00
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
// check audio params (if audio stream exists)
|
2019-04-24 13:27:05 -05:00
|
|
|
if (parsedAudio.audioStream) {
|
2020-01-31 09:56:52 -06:00
|
|
|
if (parsedAudio.audioStream['codec_name'] !== 'aac') return false
|
2019-05-16 01:58:39 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
const maxAudioBitrate = audio.bitrate['aac'](parsedAudio.audioStream['bit_rate'])
|
|
|
|
if (maxAudioBitrate !== -1 && parsedAudio.audioStream['bit_rate'] > maxAudioBitrate) return false
|
2019-04-24 13:27:05 -05:00
|
|
|
}
|
2019-05-03 20:18:32 -05:00
|
|
|
|
2019-04-24 13:27:05 -05:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2020-01-29 09:54:03 -06:00
|
|
|
function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number {
|
|
|
|
return VIDEO_TRANSCODING_FPS[type].slice(0)
|
|
|
|
.sort((a, b) => fps % a - fps % b)[0]
|
2020-01-20 13:40:30 -06:00
|
|
|
}
|
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
export {
|
2019-11-26 09:25:36 -06:00
|
|
|
getVideoStreamCodec,
|
|
|
|
getAudioStreamCodec,
|
|
|
|
getVideoStreamSize,
|
2018-02-27 08:57:28 -06:00
|
|
|
getVideoFileResolution,
|
2020-03-10 08:39:40 -05:00
|
|
|
getMetadataFromFile,
|
2017-10-09 04:06:13 -05:00
|
|
|
getDurationFromVideoFile,
|
|
|
|
generateImageFromVideoFile,
|
2019-05-16 09:55:34 -05:00
|
|
|
TranscodeOptions,
|
|
|
|
TranscodeOptionsType,
|
2018-02-26 03:48:53 -06:00
|
|
|
transcode,
|
2018-07-27 19:59:01 -05:00
|
|
|
getVideoFileFPS,
|
2018-08-14 08:28:30 -05:00
|
|
|
computeResolutionsToTranscode,
|
2018-10-08 09:26:04 -05:00
|
|
|
audio,
|
2019-04-24 13:27:05 -05:00
|
|
|
getVideoFileBitrate,
|
|
|
|
canDoQuickTranscode
|
2018-02-26 03:48:53 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
2019-11-15 08:06:03 -06:00
|
|
|
async function buildx264Command (command: ffmpeg.FfmpegCommand, options: TranscodeOptions) {
|
2019-04-08 03:03:23 -05:00
|
|
|
let fps = await getVideoFileFPS(options.inputPath)
|
|
|
|
if (
|
2020-01-14 16:34:03 -06:00
|
|
|
// On small/medium resolutions, limit FPS
|
2019-04-08 03:03:23 -05:00
|
|
|
options.resolution !== undefined &&
|
|
|
|
options.resolution < VIDEO_TRANSCODING_FPS.KEEP_ORIGIN_FPS_RESOLUTION_MIN &&
|
2020-01-29 09:54:03 -06:00
|
|
|
fps > VIDEO_TRANSCODING_FPS.AVERAGE
|
2019-04-08 03:03:23 -05:00
|
|
|
) {
|
2020-01-14 16:34:03 -06:00
|
|
|
// Get closest standard framerate by modulo: downsampling has to be done to a divisor of the nominal fps value
|
2020-01-29 09:54:03 -06:00
|
|
|
fps = getClosestFramerateStandard(fps, 'STANDARD')
|
2019-04-08 03:03:23 -05:00
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
command = await presetH264(command, options.inputPath, options.resolution, fps)
|
2019-04-08 03:03:23 -05:00
|
|
|
|
|
|
|
if (options.resolution !== undefined) {
|
|
|
|
// '?x720' or '720x?' for example
|
|
|
|
const size = options.isPortraitMode === true ? `${options.resolution}x?` : `?x${options.resolution}`
|
|
|
|
command = command.size(size)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (fps) {
|
|
|
|
// Hard FPS limits
|
2020-01-29 09:54:03 -06:00
|
|
|
if (fps > VIDEO_TRANSCODING_FPS.MAX) fps = getClosestFramerateStandard(fps, 'HD_STANDARD')
|
2019-04-08 03:03:23 -05:00
|
|
|
else if (fps < VIDEO_TRANSCODING_FPS.MIN) fps = VIDEO_TRANSCODING_FPS.MIN
|
|
|
|
|
|
|
|
command = command.withFPS(fps)
|
|
|
|
}
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
async function buildAudioMergeCommand (command: ffmpeg.FfmpegCommand, options: MergeAudioTranscodeOptions) {
|
|
|
|
command = command.loop(undefined)
|
|
|
|
|
|
|
|
command = await presetH264VeryFast(command, options.audioPath, options.resolution)
|
|
|
|
|
|
|
|
command = command.input(options.audioPath)
|
|
|
|
.videoFilter('scale=trunc(iw/2)*2:trunc(ih/2)*2') // Avoid "height not divisible by 2" error
|
|
|
|
.outputOption('-tune stillimage')
|
|
|
|
.outputOption('-shortest')
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, options: OnlyAudioTranscodeOptions) {
|
|
|
|
command = presetOnlyAudio(command)
|
2019-10-31 20:06:19 -05:00
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function buildQuickTranscodeCommand (command: ffmpeg.FfmpegCommand) {
|
|
|
|
command = presetCopy(command)
|
2019-05-16 09:55:34 -05:00
|
|
|
|
|
|
|
command = command.outputOption('-map_metadata -1') // strip all metadata
|
|
|
|
.outputOption('-movflags faststart')
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
|
|
|
async function buildHLSCommand (command: ffmpeg.FfmpegCommand, options: HLSTranscodeOptions) {
|
2019-04-08 03:03:23 -05:00
|
|
|
const videoPath = getHLSVideoPath(options)
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
if (options.copyCodecs) command = presetCopy(command)
|
2020-05-05 09:27:46 -05:00
|
|
|
else if (options.resolution === VideoResolution.H_NOVIDEO) command = presetOnlyAudio(command)
|
2019-11-15 08:06:03 -06:00
|
|
|
else command = await buildx264Command(command, options)
|
2019-04-08 03:03:23 -05:00
|
|
|
|
|
|
|
command = command.outputOption('-hls_time 4')
|
|
|
|
.outputOption('-hls_list_size 0')
|
|
|
|
.outputOption('-hls_playlist_type vod')
|
|
|
|
.outputOption('-hls_segment_filename ' + videoPath)
|
|
|
|
.outputOption('-hls_segment_type fmp4')
|
|
|
|
.outputOption('-f hls')
|
|
|
|
.outputOption('-hls_flags single_file')
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
function getHLSVideoPath (options: HLSTranscodeOptions) {
|
2019-02-11 10:20:28 -06:00
|
|
|
return `${dirname(options.outputPath)}/${options.hlsPlaylist.videoFilename}`
|
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
async function fixHLSPlaylistIfNeeded (options: TranscodeOptions) {
|
|
|
|
if (options.type !== 'hls') return
|
2019-02-11 10:20:28 -06:00
|
|
|
|
|
|
|
const fileContent = await readFile(options.outputPath)
|
|
|
|
|
|
|
|
const videoFileName = options.hlsPlaylist.videoFilename
|
|
|
|
const videoFilePath = getHLSVideoPath(options)
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
// Fix wrong mapping with some ffmpeg versions
|
2019-02-11 10:20:28 -06:00
|
|
|
const newContent = fileContent.toString()
|
|
|
|
.replace(`#EXT-X-MAP:URI="${videoFilePath}",`, `#EXT-X-MAP:URI="${videoFileName}",`)
|
|
|
|
|
|
|
|
await writeFile(options.outputPath, newContent)
|
|
|
|
}
|
|
|
|
|
2018-05-21 06:14:29 -05:00
|
|
|
/**
|
|
|
|
* A slightly customised version of the 'veryfast' x264 preset
|
|
|
|
*
|
|
|
|
* The veryfast preset is right in the sweet spot of performance
|
|
|
|
* and quality. Superfast and ultrafast will give you better
|
|
|
|
* performance, but then quality is noticeably worse.
|
|
|
|
*/
|
2019-05-16 09:55:34 -05:00
|
|
|
async function presetH264VeryFast (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) {
|
|
|
|
let localCommand = await presetH264(command, input, resolution, fps)
|
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
localCommand = localCommand.outputOption('-preset:v veryfast')
|
2019-05-16 09:55:34 -05:00
|
|
|
|
2018-05-21 06:14:29 -05:00
|
|
|
/*
|
|
|
|
MAIN reference: https://slhck.info/video/2017/03/01/rate-control.html
|
|
|
|
Our target situation is closer to a livestream than a stream,
|
|
|
|
since we want to reduce as much a possible the encoding burden,
|
2019-05-16 09:55:34 -05:00
|
|
|
although not to the point of a livestream where there is a hard
|
2018-05-21 06:14:29 -05:00
|
|
|
constraint on the frames per second to be encoded.
|
|
|
|
*/
|
2018-10-18 02:44:43 -05:00
|
|
|
|
|
|
|
return localCommand
|
2018-05-21 06:14:29 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Standard profile, with variable bitrate audio and faststart.
|
|
|
|
*
|
|
|
|
* As for the audio, quality '5' is the highest and ensures 96-112kbps/channel
|
|
|
|
* See https://trac.ffmpeg.org/wiki/Encode/AAC#fdk_vbr
|
|
|
|
*/
|
2019-05-16 09:55:34 -05:00
|
|
|
async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) {
|
2018-10-18 02:44:43 -05:00
|
|
|
let localCommand = command
|
2018-05-21 06:14:29 -05:00
|
|
|
.format('mp4')
|
|
|
|
.videoCodec('libx264')
|
2019-11-26 09:25:36 -06:00
|
|
|
.outputOption('-level 3.1') // 3.1 is the minimal resource allocation for our highest supported resolution
|
|
|
|
.outputOption('-b_strategy 1') // NOTE: b-strategy 1 - heuristic algorithm, 16 is optimal B-frames for it
|
2018-05-21 06:14:29 -05:00
|
|
|
.outputOption('-bf 16') // NOTE: Why 16: https://github.com/Chocobozzz/PeerTube/pull/774. b-strategy 2 -> B-frames<16
|
2018-11-12 08:43:51 -06:00
|
|
|
.outputOption('-pix_fmt yuv420p') // allows import of source material with incompatible pixel formats (e.g. MJPEG video)
|
2018-07-17 04:21:10 -05:00
|
|
|
.outputOption('-map_metadata -1') // strip all metadata
|
2018-05-21 06:14:29 -05:00
|
|
|
.outputOption('-movflags faststart')
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
const parsedAudio = await audio.get(input)
|
2018-05-21 06:14:29 -05:00
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
if (!parsedAudio.audioStream) {
|
|
|
|
localCommand = localCommand.noAudio()
|
|
|
|
} else if ((await checkFFmpegEncoders()).get('libfdk_aac')) { // we favor VBR, if a good AAC encoder is available
|
|
|
|
localCommand = localCommand
|
2018-05-21 06:14:29 -05:00
|
|
|
.audioCodec('libfdk_aac')
|
|
|
|
.audioQuality(5)
|
2018-10-18 02:44:43 -05:00
|
|
|
} else {
|
2019-05-16 09:55:34 -05:00
|
|
|
// we try to reduce the ceiling bitrate by making rough matches of bitrates
|
2018-10-18 02:44:43 -05:00
|
|
|
// of course this is far from perfect, but it might save some space in the end
|
2019-05-16 09:55:34 -05:00
|
|
|
localCommand = localCommand.audioCodec('aac')
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
const audioCodecName = parsedAudio.audioStream['codec_name']
|
2018-10-18 02:44:43 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
if (audio.bitrate[audioCodecName]) {
|
|
|
|
const bitrate = audio.bitrate[audioCodecName](parsedAudio.audioStream['bit_rate'])
|
2018-12-11 10:48:14 -06:00
|
|
|
if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate)
|
2018-10-18 02:44:43 -05:00
|
|
|
}
|
2018-05-21 06:14:29 -05:00
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
if (fps) {
|
|
|
|
// Constrained Encoding (VBV)
|
|
|
|
// https://slhck.info/video/2017/03/01/rate-control.html
|
|
|
|
// https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate
|
|
|
|
const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS)
|
|
|
|
localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ])
|
|
|
|
|
|
|
|
// Keyframe interval of 2 seconds for faster seeking and resolution switching.
|
|
|
|
// https://streaminglearningcenter.com/blogs/whats-the-right-keyframe-interval.html
|
|
|
|
// https://superuser.com/a/908325
|
|
|
|
localCommand = localCommand.outputOption(`-g ${fps * 2}`)
|
|
|
|
}
|
2018-10-17 03:15:38 -05:00
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
return localCommand
|
2018-05-21 06:14:29 -05:00
|
|
|
}
|
2019-04-08 03:03:23 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function presetCopy (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand {
|
2019-04-08 03:03:23 -05:00
|
|
|
return command
|
|
|
|
.format('mp4')
|
|
|
|
.videoCodec('copy')
|
|
|
|
.audioCodec('copy')
|
|
|
|
}
|
2019-10-31 20:06:19 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function presetOnlyAudio (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand {
|
2019-10-31 20:06:19 -05:00
|
|
|
return command
|
|
|
|
.format('mp4')
|
|
|
|
.audioCodec('copy')
|
|
|
|
.noVideo()
|
|
|
|
}
|