2017-10-09 04:06:13 -05:00
|
|
|
import * as ffmpeg from 'fluent-ffmpeg'
|
2020-11-06 03:57:40 -06:00
|
|
|
import { readFile, remove, writeFile } from 'fs-extra'
|
2019-01-29 01:37:25 -06:00
|
|
|
import { dirname, join } from 'path'
|
2020-09-17 02:20:52 -05:00
|
|
|
import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata'
|
2020-01-31 09:56:52 -06:00
|
|
|
import { getMaxBitrate, getTargetBitrate, VideoResolution } from '../../shared/models/videos'
|
2020-09-17 02:20:52 -05:00
|
|
|
import { checkFFmpegEncoders } from '../initializers/checker-before-init'
|
|
|
|
import { CONFIG } from '../initializers/config'
|
2020-09-25 09:19:35 -05:00
|
|
|
import { FFMPEG_NICE, VIDEO_LIVE, VIDEO_TRANSCODING_FPS } from '../initializers/constants'
|
2018-02-27 04:29:24 -06:00
|
|
|
import { processImage } from './image-utils'
|
2018-02-27 06:46:56 -06:00
|
|
|
import { logger } from './logger'
|
2017-10-09 04:06:13 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
/**
|
|
|
|
* A toolbox to play with audio
|
|
|
|
*/
|
|
|
|
namespace audio {
|
|
|
|
export const get = (videoPath: string) => {
|
|
|
|
// without position, ffprobe considers the last input only
|
|
|
|
// we make it consider the first input only
|
|
|
|
// if you pass a file path to pos, then ffprobe acts on that file directly
|
|
|
|
return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => {
|
|
|
|
|
|
|
|
function parseFfprobe (err: any, data: ffmpeg.FfprobeData) {
|
|
|
|
if (err) return rej(err)
|
|
|
|
|
|
|
|
if ('streams' in data) {
|
|
|
|
const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio')
|
|
|
|
if (audioStream) {
|
|
|
|
return res({
|
|
|
|
absolutePath: data.format.filename,
|
|
|
|
audioStream
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return res({ absolutePath: data.format.filename })
|
|
|
|
}
|
|
|
|
|
|
|
|
return ffmpeg.ffprobe(videoPath, parseFfprobe)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
export namespace bitrate {
|
|
|
|
const baseKbitrate = 384
|
|
|
|
|
|
|
|
const toBits = (kbits: number) => kbits * 8000
|
|
|
|
|
|
|
|
export const aac = (bitrate: number): number => {
|
|
|
|
switch (true) {
|
|
|
|
case bitrate > toBits(baseKbitrate):
|
|
|
|
return baseKbitrate
|
|
|
|
|
|
|
|
default:
|
|
|
|
return -1 // we interpret it as a signal to copy the audio stream as is
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export const mp3 = (bitrate: number): number => {
|
|
|
|
/*
|
|
|
|
a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac.
|
|
|
|
That's why, when using aac, we can go to lower kbit/sec. The equivalences
|
|
|
|
made here are not made to be accurate, especially with good mp3 encoders.
|
|
|
|
*/
|
|
|
|
switch (true) {
|
|
|
|
case bitrate <= toBits(192):
|
|
|
|
return 128
|
|
|
|
|
|
|
|
case bitrate <= toBits(384):
|
|
|
|
return 256
|
|
|
|
|
|
|
|
default:
|
|
|
|
return baseKbitrate
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-17 02:20:52 -05:00
|
|
|
function computeResolutionsToTranscode (videoFileResolution: number, type: 'vod' | 'live') {
|
|
|
|
const configResolutions = type === 'vod'
|
|
|
|
? CONFIG.TRANSCODING.RESOLUTIONS
|
|
|
|
: CONFIG.LIVE.TRANSCODING.RESOLUTIONS
|
|
|
|
|
2018-08-14 08:28:30 -05:00
|
|
|
const resolutionsEnabled: number[] = []
|
|
|
|
|
|
|
|
// Put in the order we want to proceed jobs
|
|
|
|
const resolutions = [
|
2019-10-31 20:06:19 -05:00
|
|
|
VideoResolution.H_NOVIDEO,
|
2018-08-14 08:28:30 -05:00
|
|
|
VideoResolution.H_480P,
|
|
|
|
VideoResolution.H_360P,
|
|
|
|
VideoResolution.H_720P,
|
|
|
|
VideoResolution.H_240P,
|
2019-06-06 07:45:57 -05:00
|
|
|
VideoResolution.H_1080P,
|
|
|
|
VideoResolution.H_4K
|
2018-08-14 08:28:30 -05:00
|
|
|
]
|
|
|
|
|
|
|
|
for (const resolution of resolutions) {
|
2020-08-03 09:03:52 -05:00
|
|
|
if (configResolutions[resolution + 'p'] === true && videoFileResolution > resolution) {
|
2018-08-14 08:28:30 -05:00
|
|
|
resolutionsEnabled.push(resolution)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return resolutionsEnabled
|
|
|
|
}
|
|
|
|
|
2019-11-26 09:25:36 -06:00
|
|
|
async function getVideoStreamSize (path: string) {
|
2019-04-24 13:27:05 -05:00
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
2018-02-27 08:57:28 -06:00
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
return videoStream === null
|
|
|
|
? { width: 0, height: 0 }
|
|
|
|
: { width: videoStream.width, height: videoStream.height }
|
2019-01-29 01:37:25 -06:00
|
|
|
}
|
|
|
|
|
2019-11-26 09:25:36 -06:00
|
|
|
async function getVideoStreamCodec (path: string) {
|
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
|
|
|
|
|
|
|
if (!videoStream) return ''
|
|
|
|
|
|
|
|
const videoCodec = videoStream.codec_tag_string
|
|
|
|
|
|
|
|
const baseProfileMatrix = {
|
2020-01-31 09:56:52 -06:00
|
|
|
High: '6400',
|
|
|
|
Main: '4D40',
|
|
|
|
Baseline: '42E0'
|
2019-11-26 09:25:36 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
let baseProfile = baseProfileMatrix[videoStream.profile]
|
|
|
|
if (!baseProfile) {
|
|
|
|
logger.warn('Cannot get video profile codec of %s.', path, { videoStream })
|
|
|
|
baseProfile = baseProfileMatrix['High'] // Fallback
|
|
|
|
}
|
|
|
|
|
2020-04-03 07:06:31 -05:00
|
|
|
let level = videoStream.level.toString(16)
|
|
|
|
if (level.length === 1) level = `0${level}`
|
2019-11-26 09:25:36 -06:00
|
|
|
|
|
|
|
return `${videoCodec}.${baseProfile}${level}`
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getAudioStreamCodec (path: string) {
|
|
|
|
const { audioStream } = await audio.get(path)
|
|
|
|
|
|
|
|
if (!audioStream) return ''
|
|
|
|
|
|
|
|
const audioCodec = audioStream.codec_name
|
2019-11-26 09:36:48 -06:00
|
|
|
if (audioCodec === 'aac') return 'mp4a.40.2'
|
2019-11-26 09:25:36 -06:00
|
|
|
|
|
|
|
logger.warn('Cannot get audio codec of %s.', path, { audioStream })
|
|
|
|
|
|
|
|
return 'mp4a.40.2' // Fallback
|
|
|
|
}
|
|
|
|
|
2019-01-29 01:37:25 -06:00
|
|
|
async function getVideoFileResolution (path: string) {
|
2019-11-26 09:25:36 -06:00
|
|
|
const size = await getVideoStreamSize(path)
|
2019-01-29 01:37:25 -06:00
|
|
|
|
|
|
|
return {
|
|
|
|
videoFileResolution: Math.min(size.height, size.width),
|
|
|
|
isPortraitMode: size.height > size.width
|
2018-02-27 08:57:28 -06:00
|
|
|
}
|
2018-02-26 03:48:53 -06:00
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
|
2018-02-26 03:48:53 -06:00
|
|
|
async function getVideoFileFPS (path: string) {
|
2019-04-24 13:27:05 -05:00
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
2019-11-22 04:43:17 -06:00
|
|
|
if (videoStream === null) return 0
|
2019-10-31 20:06:19 -05:00
|
|
|
|
2019-01-17 07:03:32 -06:00
|
|
|
for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) {
|
2020-01-31 09:56:52 -06:00
|
|
|
const valuesText: string = videoStream[key]
|
2018-02-26 03:48:53 -06:00
|
|
|
if (!valuesText) continue
|
|
|
|
|
|
|
|
const [ frames, seconds ] = valuesText.split('/')
|
|
|
|
if (!frames || !seconds) continue
|
|
|
|
|
|
|
|
const result = parseInt(frames, 10) / parseInt(seconds, 10)
|
2018-06-29 09:41:29 -05:00
|
|
|
if (result > 0) return Math.round(result)
|
2018-02-26 03:48:53 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
return 0
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2020-03-10 08:49:02 -05:00
|
|
|
async function getMetadataFromFile <T> (path: string, cb = metadata => metadata) {
|
2020-03-10 08:39:40 -05:00
|
|
|
return new Promise<T>((res, rej) => {
|
2018-10-08 09:26:04 -05:00
|
|
|
ffmpeg.ffprobe(path, (err, metadata) => {
|
|
|
|
if (err) return rej(err)
|
|
|
|
|
2020-03-10 08:39:40 -05:00
|
|
|
return res(cb(new VideoFileMetadata(metadata)))
|
2018-10-08 09:26:04 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-03-10 08:39:40 -05:00
|
|
|
async function getVideoFileBitrate (path: string) {
|
|
|
|
return getMetadataFromFile<number>(path, metadata => metadata.format.bit_rate)
|
|
|
|
}
|
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
function getDurationFromVideoFile (path: string) {
|
2020-03-10 08:39:40 -05:00
|
|
|
return getMetadataFromFile<number>(path, metadata => Math.floor(metadata.format.duration))
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
|
2020-03-10 08:39:40 -05:00
|
|
|
function getVideoStreamFromFile (path: string) {
|
|
|
|
return getMetadataFromFile<any>(path, metadata => metadata.streams.find(s => s.codec_type === 'video') || null)
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2018-02-27 04:29:24 -06:00
|
|
|
async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) {
|
|
|
|
const pendingImageName = 'pending-' + imageName
|
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
const options = {
|
2018-02-27 04:29:24 -06:00
|
|
|
filename: pendingImageName,
|
2017-10-09 04:06:13 -05:00
|
|
|
count: 1,
|
|
|
|
folder
|
|
|
|
}
|
|
|
|
|
2018-02-27 04:29:24 -06:00
|
|
|
const pendingImagePath = join(folder, pendingImageName)
|
2018-02-27 06:46:56 -06:00
|
|
|
|
|
|
|
try {
|
|
|
|
await new Promise<string>((res, rej) => {
|
2018-07-27 19:59:01 -05:00
|
|
|
ffmpeg(fromPath, { niceness: FFMPEG_NICE.THUMBNAIL })
|
2018-02-27 06:46:56 -06:00
|
|
|
.on('error', rej)
|
|
|
|
.on('end', () => res(imageName))
|
|
|
|
.thumbnail(options)
|
|
|
|
})
|
|
|
|
|
|
|
|
const destination = join(folder, imageName)
|
2019-04-24 02:56:25 -05:00
|
|
|
await processImage(pendingImagePath, destination, size)
|
2018-02-27 06:46:56 -06:00
|
|
|
} catch (err) {
|
2018-03-26 08:54:13 -05:00
|
|
|
logger.error('Cannot generate image from video %s.', fromPath, { err })
|
2018-02-27 06:46:56 -06:00
|
|
|
|
|
|
|
try {
|
2018-08-27 09:23:34 -05:00
|
|
|
await remove(pendingImagePath)
|
2018-02-27 06:46:56 -06:00
|
|
|
} catch (err) {
|
2018-03-26 08:54:13 -05:00
|
|
|
logger.debug('Cannot remove pending image path after generation error.', { err })
|
2018-02-27 06:46:56 -06:00
|
|
|
}
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio'
|
2019-05-16 09:55:34 -05:00
|
|
|
|
|
|
|
interface BaseTranscodeOptions {
|
|
|
|
type: TranscodeOptionsType
|
2017-10-09 04:06:13 -05:00
|
|
|
inputPath: string
|
|
|
|
outputPath: string
|
2019-01-29 01:37:25 -06:00
|
|
|
resolution: VideoResolution
|
2018-02-27 08:57:28 -06:00
|
|
|
isPortraitMode?: boolean
|
2019-05-16 09:55:34 -05:00
|
|
|
}
|
2019-01-29 01:37:25 -06:00
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
interface HLSTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'hls'
|
2019-11-15 08:06:03 -06:00
|
|
|
copyCodecs: boolean
|
2019-05-16 09:55:34 -05:00
|
|
|
hlsPlaylist: {
|
2019-02-07 08:08:19 -06:00
|
|
|
videoFilename: string
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
interface QuickTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'quick-transcode'
|
|
|
|
}
|
|
|
|
|
|
|
|
interface VideoTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'video'
|
|
|
|
}
|
|
|
|
|
|
|
|
interface MergeAudioTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'merge-audio'
|
|
|
|
audioPath: string
|
|
|
|
}
|
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
interface OnlyAudioTranscodeOptions extends BaseTranscodeOptions {
|
|
|
|
type: 'only-audio'
|
2019-10-31 20:06:19 -05:00
|
|
|
}
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
type TranscodeOptions =
|
|
|
|
HLSTranscodeOptions
|
2019-11-22 04:43:17 -06:00
|
|
|
| VideoTranscodeOptions
|
|
|
|
| MergeAudioTranscodeOptions
|
|
|
|
| OnlyAudioTranscodeOptions
|
|
|
|
| QuickTranscodeOptions
|
2019-05-16 09:55:34 -05:00
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
function transcode (options: TranscodeOptions) {
|
2020-11-06 07:33:31 -06:00
|
|
|
logger.debug('Will run transcode.', { options })
|
|
|
|
|
2018-02-26 03:48:53 -06:00
|
|
|
return new Promise<void>(async (res, rej) => {
|
2018-10-18 02:44:43 -05:00
|
|
|
try {
|
2020-09-17 02:20:52 -05:00
|
|
|
let command = getFFmpeg(options.inputPath)
|
2018-10-18 02:44:43 -05:00
|
|
|
.output(options.outputPath)
|
2019-04-08 03:03:23 -05:00
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
if (options.type === 'quick-transcode') {
|
2020-01-31 09:56:52 -06:00
|
|
|
command = buildQuickTranscodeCommand(command)
|
2019-05-16 09:55:34 -05:00
|
|
|
} else if (options.type === 'hls') {
|
2020-09-17 02:20:52 -05:00
|
|
|
command = await buildHLSVODCommand(command, options)
|
2019-05-16 09:55:34 -05:00
|
|
|
} else if (options.type === 'merge-audio') {
|
|
|
|
command = await buildAudioMergeCommand(command, options)
|
2019-11-22 04:43:17 -06:00
|
|
|
} else if (options.type === 'only-audio') {
|
2020-01-31 09:56:52 -06:00
|
|
|
command = buildOnlyAudioCommand(command, options)
|
2019-04-08 03:03:23 -05:00
|
|
|
} else {
|
|
|
|
command = await buildx264Command(command, options)
|
|
|
|
}
|
2018-07-27 19:59:01 -05:00
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
command
|
|
|
|
.on('error', (err, stdout, stderr) => {
|
|
|
|
logger.error('Error in transcoding job.', { stdout, stderr })
|
|
|
|
return rej(err)
|
|
|
|
})
|
2019-02-11 10:20:28 -06:00
|
|
|
.on('end', () => {
|
2019-05-16 09:55:34 -05:00
|
|
|
return fixHLSPlaylistIfNeeded(options)
|
2019-02-11 10:20:28 -06:00
|
|
|
.then(() => res())
|
|
|
|
.catch(err => rej(err))
|
|
|
|
})
|
2018-10-18 02:44:43 -05:00
|
|
|
.run()
|
|
|
|
} catch (err) {
|
|
|
|
return rej(err)
|
|
|
|
}
|
2017-10-09 04:06:13 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-05-03 20:18:32 -05:00
|
|
|
async function canDoQuickTranscode (path: string): Promise<boolean> {
|
2019-04-24 13:27:05 -05:00
|
|
|
// NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway)
|
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
|
|
|
const parsedAudio = await audio.get(path)
|
|
|
|
const fps = await getVideoFileFPS(path)
|
|
|
|
const bitRate = await getVideoFileBitrate(path)
|
|
|
|
const resolution = await getVideoFileResolution(path)
|
|
|
|
|
|
|
|
// check video params
|
2019-10-31 20:06:19 -05:00
|
|
|
if (videoStream == null) return false
|
2020-01-31 09:56:52 -06:00
|
|
|
if (videoStream['codec_name'] !== 'h264') return false
|
|
|
|
if (videoStream['pix_fmt'] !== 'yuv420p') return false
|
2019-05-16 01:58:39 -05:00
|
|
|
if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false
|
|
|
|
if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false
|
2019-04-24 13:27:05 -05:00
|
|
|
|
2019-11-22 04:43:17 -06:00
|
|
|
// check audio params (if audio stream exists)
|
2019-04-24 13:27:05 -05:00
|
|
|
if (parsedAudio.audioStream) {
|
2020-01-31 09:56:52 -06:00
|
|
|
if (parsedAudio.audioStream['codec_name'] !== 'aac') return false
|
2019-05-16 01:58:39 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
const maxAudioBitrate = audio.bitrate['aac'](parsedAudio.audioStream['bit_rate'])
|
|
|
|
if (maxAudioBitrate !== -1 && parsedAudio.audioStream['bit_rate'] > maxAudioBitrate) return false
|
2019-04-24 13:27:05 -05:00
|
|
|
}
|
2019-05-03 20:18:32 -05:00
|
|
|
|
2019-04-24 13:27:05 -05:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2020-01-29 09:54:03 -06:00
|
|
|
function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number {
|
|
|
|
return VIDEO_TRANSCODING_FPS[type].slice(0)
|
|
|
|
.sort((a, b) => fps % a - fps % b)[0]
|
2020-01-20 13:40:30 -06:00
|
|
|
}
|
|
|
|
|
2020-07-10 07:54:11 -05:00
|
|
|
function convertWebPToJPG (path: string, destination: string): Promise<void> {
|
|
|
|
return new Promise<void>(async (res, rej) => {
|
|
|
|
try {
|
|
|
|
const command = ffmpeg(path).output(destination)
|
|
|
|
|
|
|
|
command.on('error', (err, stdout, stderr) => {
|
|
|
|
logger.error('Error in ffmpeg webp convert process.', { stdout, stderr })
|
|
|
|
return rej(err)
|
|
|
|
})
|
|
|
|
.on('end', () => res())
|
|
|
|
.run()
|
|
|
|
} catch (err) {
|
|
|
|
return rej(err)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-11-04 07:16:57 -06:00
|
|
|
function runLiveTranscoding (rtmpUrl: string, outPath: string, resolutions: number[], fps, deleteSegments: boolean) {
|
2020-09-17 02:20:52 -05:00
|
|
|
const command = getFFmpeg(rtmpUrl)
|
|
|
|
command.inputOption('-fflags nobuffer')
|
|
|
|
|
|
|
|
const varStreamMap: string[] = []
|
|
|
|
|
|
|
|
command.complexFilter([
|
|
|
|
{
|
|
|
|
inputs: '[v:0]',
|
|
|
|
filter: 'split',
|
|
|
|
options: resolutions.length,
|
|
|
|
outputs: resolutions.map(r => `vtemp${r}`)
|
|
|
|
},
|
|
|
|
|
|
|
|
...resolutions.map(r => ({
|
|
|
|
inputs: `vtemp${r}`,
|
|
|
|
filter: 'scale',
|
|
|
|
options: `w=-2:h=${r}`,
|
|
|
|
outputs: `vout${r}`
|
|
|
|
}))
|
|
|
|
])
|
|
|
|
|
|
|
|
command.outputOption('-b_strategy 1')
|
|
|
|
command.outputOption('-bf 16')
|
|
|
|
command.outputOption('-preset superfast')
|
|
|
|
command.outputOption('-level 3.1')
|
|
|
|
command.outputOption('-map_metadata -1')
|
|
|
|
command.outputOption('-pix_fmt yuv420p')
|
2020-10-30 09:09:00 -05:00
|
|
|
command.outputOption('-max_muxing_queue_size 1024')
|
2020-11-04 07:16:57 -06:00
|
|
|
command.outputOption('-g ' + (fps * 2))
|
2020-09-17 02:20:52 -05:00
|
|
|
|
|
|
|
for (let i = 0; i < resolutions.length; i++) {
|
|
|
|
const resolution = resolutions[i]
|
|
|
|
|
|
|
|
command.outputOption(`-map [vout${resolution}]`)
|
|
|
|
command.outputOption(`-c:v:${i} libx264`)
|
2020-11-04 07:16:57 -06:00
|
|
|
command.outputOption(`-b:v:${i} ${getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS)}`)
|
2020-09-17 02:20:52 -05:00
|
|
|
|
|
|
|
command.outputOption(`-map a:0`)
|
|
|
|
command.outputOption(`-c:a:${i} aac`)
|
|
|
|
|
|
|
|
varStreamMap.push(`v:${i},a:${i}`)
|
|
|
|
}
|
|
|
|
|
2020-09-25 09:19:35 -05:00
|
|
|
addDefaultLiveHLSParams(command, outPath, deleteSegments)
|
2020-09-17 02:20:52 -05:00
|
|
|
|
|
|
|
command.outputOption('-var_stream_map', varStreamMap.join(' '))
|
|
|
|
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2020-09-25 09:19:35 -05:00
|
|
|
function runLiveMuxing (rtmpUrl: string, outPath: string, deleteSegments: boolean) {
|
2020-09-17 02:20:52 -05:00
|
|
|
const command = getFFmpeg(rtmpUrl)
|
|
|
|
command.inputOption('-fflags nobuffer')
|
|
|
|
|
|
|
|
command.outputOption('-c:v copy')
|
|
|
|
command.outputOption('-c:a copy')
|
|
|
|
command.outputOption('-map 0:a?')
|
|
|
|
command.outputOption('-map 0:v?')
|
|
|
|
|
2020-09-25 09:19:35 -05:00
|
|
|
addDefaultLiveHLSParams(command, outPath, deleteSegments)
|
2020-09-17 02:20:52 -05:00
|
|
|
|
|
|
|
command.run()
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2020-10-27 10:06:24 -05:00
|
|
|
async function hlsPlaylistToFragmentedMP4 (hlsDirectory: string, segmentFiles: string[], outputPath: string) {
|
2020-11-06 02:09:36 -06:00
|
|
|
const concatFilePath = join(hlsDirectory, 'concat.txt')
|
|
|
|
|
|
|
|
function cleaner () {
|
|
|
|
remove(concatFilePath)
|
|
|
|
.catch(err => logger.error('Cannot remove concat file in %s.', hlsDirectory, { err }))
|
|
|
|
}
|
|
|
|
|
|
|
|
// First concat the ts files to a mp4 file
|
2020-10-27 10:06:24 -05:00
|
|
|
const content = segmentFiles.map(f => 'file ' + f)
|
|
|
|
.join('\n')
|
|
|
|
|
|
|
|
await writeFile(concatFilePath, content + '\n')
|
|
|
|
|
|
|
|
const command = getFFmpeg(concatFilePath)
|
|
|
|
command.inputOption('-safe 0')
|
|
|
|
command.inputOption('-f concat')
|
2020-10-26 10:44:23 -05:00
|
|
|
|
2020-11-06 02:09:36 -06:00
|
|
|
command.outputOption('-c:v copy')
|
|
|
|
command.audioFilter('aresample=async=1:first_pts=0')
|
2020-10-26 10:44:23 -05:00
|
|
|
command.output(outputPath)
|
|
|
|
|
2020-11-06 02:09:36 -06:00
|
|
|
return runCommand(command, cleaner)
|
|
|
|
}
|
2020-10-26 10:44:23 -05:00
|
|
|
|
2020-11-06 02:09:36 -06:00
|
|
|
async function runCommand (command: ffmpeg.FfmpegCommand, onEnd?: Function) {
|
|
|
|
command.run()
|
2020-10-27 10:06:24 -05:00
|
|
|
|
2020-10-26 10:44:23 -05:00
|
|
|
return new Promise<string>((res, rej) => {
|
2020-10-27 10:06:24 -05:00
|
|
|
command.on('error', err => {
|
2020-11-06 02:09:36 -06:00
|
|
|
if (onEnd) onEnd()
|
2020-10-27 10:06:24 -05:00
|
|
|
|
|
|
|
rej(err)
|
|
|
|
})
|
|
|
|
|
|
|
|
command.on('end', () => {
|
2020-11-06 02:09:36 -06:00
|
|
|
if (onEnd) onEnd()
|
2020-10-27 10:06:24 -05:00
|
|
|
|
|
|
|
res()
|
|
|
|
})
|
2020-10-26 10:44:23 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-10-09 04:06:13 -05:00
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
export {
|
2019-11-26 09:25:36 -06:00
|
|
|
getVideoStreamCodec,
|
|
|
|
getAudioStreamCodec,
|
2020-09-17 02:20:52 -05:00
|
|
|
runLiveMuxing,
|
2020-07-10 07:54:11 -05:00
|
|
|
convertWebPToJPG,
|
2019-11-26 09:25:36 -06:00
|
|
|
getVideoStreamSize,
|
2018-02-27 08:57:28 -06:00
|
|
|
getVideoFileResolution,
|
2020-03-10 08:39:40 -05:00
|
|
|
getMetadataFromFile,
|
2017-10-09 04:06:13 -05:00
|
|
|
getDurationFromVideoFile,
|
2020-09-17 02:20:52 -05:00
|
|
|
runLiveTranscoding,
|
2017-10-09 04:06:13 -05:00
|
|
|
generateImageFromVideoFile,
|
2019-05-16 09:55:34 -05:00
|
|
|
TranscodeOptions,
|
|
|
|
TranscodeOptionsType,
|
2018-02-26 03:48:53 -06:00
|
|
|
transcode,
|
2018-07-27 19:59:01 -05:00
|
|
|
getVideoFileFPS,
|
2018-08-14 08:28:30 -05:00
|
|
|
computeResolutionsToTranscode,
|
2018-10-08 09:26:04 -05:00
|
|
|
audio,
|
2020-10-26 10:44:23 -05:00
|
|
|
hlsPlaylistToFragmentedMP4,
|
2019-04-24 13:27:05 -05:00
|
|
|
getVideoFileBitrate,
|
|
|
|
canDoQuickTranscode
|
2018-02-26 03:48:53 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
2020-09-17 02:20:52 -05:00
|
|
|
function addDefaultX264Params (command: ffmpeg.FfmpegCommand) {
|
|
|
|
command.outputOption('-level 3.1') // 3.1 is the minimal resource allocation for our highest supported resolution
|
|
|
|
.outputOption('-b_strategy 1') // NOTE: b-strategy 1 - heuristic algorithm, 16 is optimal B-frames for it
|
|
|
|
.outputOption('-bf 16') // NOTE: Why 16: https://github.com/Chocobozzz/PeerTube/pull/774. b-strategy 2 -> B-frames<16
|
|
|
|
.outputOption('-pix_fmt yuv420p') // allows import of source material with incompatible pixel formats (e.g. MJPEG video)
|
|
|
|
.outputOption('-map_metadata -1') // strip all metadata
|
|
|
|
}
|
|
|
|
|
2020-09-25 09:19:35 -05:00
|
|
|
function addDefaultLiveHLSParams (command: ffmpeg.FfmpegCommand, outPath: string, deleteSegments: boolean) {
|
2020-11-04 07:16:57 -06:00
|
|
|
command.outputOption('-hls_time ' + VIDEO_LIVE.SEGMENT_TIME_SECONDS)
|
2020-09-25 09:19:35 -05:00
|
|
|
command.outputOption('-hls_list_size ' + VIDEO_LIVE.SEGMENTS_LIST_SIZE)
|
|
|
|
|
|
|
|
if (deleteSegments === true) {
|
|
|
|
command.outputOption('-hls_flags delete_segments')
|
|
|
|
}
|
|
|
|
|
2020-11-06 02:09:36 -06:00
|
|
|
command.outputOption(`-hls_segment_filename ${join(outPath, '%v-%04d.ts')}`)
|
2020-09-17 02:20:52 -05:00
|
|
|
command.outputOption('-master_pl_name master.m3u8')
|
|
|
|
command.outputOption(`-f hls`)
|
|
|
|
|
|
|
|
command.output(join(outPath, '%v.m3u8'))
|
|
|
|
}
|
|
|
|
|
2019-11-15 08:06:03 -06:00
|
|
|
async function buildx264Command (command: ffmpeg.FfmpegCommand, options: TranscodeOptions) {
|
2019-04-08 03:03:23 -05:00
|
|
|
let fps = await getVideoFileFPS(options.inputPath)
|
|
|
|
if (
|
2020-01-14 16:34:03 -06:00
|
|
|
// On small/medium resolutions, limit FPS
|
2019-04-08 03:03:23 -05:00
|
|
|
options.resolution !== undefined &&
|
|
|
|
options.resolution < VIDEO_TRANSCODING_FPS.KEEP_ORIGIN_FPS_RESOLUTION_MIN &&
|
2020-01-29 09:54:03 -06:00
|
|
|
fps > VIDEO_TRANSCODING_FPS.AVERAGE
|
2019-04-08 03:03:23 -05:00
|
|
|
) {
|
2020-01-14 16:34:03 -06:00
|
|
|
// Get closest standard framerate by modulo: downsampling has to be done to a divisor of the nominal fps value
|
2020-01-29 09:54:03 -06:00
|
|
|
fps = getClosestFramerateStandard(fps, 'STANDARD')
|
2019-04-08 03:03:23 -05:00
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
command = await presetH264(command, options.inputPath, options.resolution, fps)
|
2019-04-08 03:03:23 -05:00
|
|
|
|
|
|
|
if (options.resolution !== undefined) {
|
|
|
|
// '?x720' or '720x?' for example
|
|
|
|
const size = options.isPortraitMode === true ? `${options.resolution}x?` : `?x${options.resolution}`
|
|
|
|
command = command.size(size)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (fps) {
|
|
|
|
// Hard FPS limits
|
2020-01-29 09:54:03 -06:00
|
|
|
if (fps > VIDEO_TRANSCODING_FPS.MAX) fps = getClosestFramerateStandard(fps, 'HD_STANDARD')
|
2019-04-08 03:03:23 -05:00
|
|
|
else if (fps < VIDEO_TRANSCODING_FPS.MIN) fps = VIDEO_TRANSCODING_FPS.MIN
|
|
|
|
|
|
|
|
command = command.withFPS(fps)
|
|
|
|
}
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
async function buildAudioMergeCommand (command: ffmpeg.FfmpegCommand, options: MergeAudioTranscodeOptions) {
|
|
|
|
command = command.loop(undefined)
|
|
|
|
|
|
|
|
command = await presetH264VeryFast(command, options.audioPath, options.resolution)
|
|
|
|
|
|
|
|
command = command.input(options.audioPath)
|
|
|
|
.videoFilter('scale=trunc(iw/2)*2:trunc(ih/2)*2') // Avoid "height not divisible by 2" error
|
|
|
|
.outputOption('-tune stillimage')
|
|
|
|
.outputOption('-shortest')
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, options: OnlyAudioTranscodeOptions) {
|
|
|
|
command = presetOnlyAudio(command)
|
2019-10-31 20:06:19 -05:00
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function buildQuickTranscodeCommand (command: ffmpeg.FfmpegCommand) {
|
|
|
|
command = presetCopy(command)
|
2019-05-16 09:55:34 -05:00
|
|
|
|
|
|
|
command = command.outputOption('-map_metadata -1') // strip all metadata
|
|
|
|
.outputOption('-movflags faststart')
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2020-09-17 02:20:52 -05:00
|
|
|
async function buildHLSVODCommand (command: ffmpeg.FfmpegCommand, options: HLSTranscodeOptions) {
|
2019-04-08 03:03:23 -05:00
|
|
|
const videoPath = getHLSVideoPath(options)
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
if (options.copyCodecs) command = presetCopy(command)
|
2020-05-05 09:27:46 -05:00
|
|
|
else if (options.resolution === VideoResolution.H_NOVIDEO) command = presetOnlyAudio(command)
|
2019-11-15 08:06:03 -06:00
|
|
|
else command = await buildx264Command(command, options)
|
2019-04-08 03:03:23 -05:00
|
|
|
|
|
|
|
command = command.outputOption('-hls_time 4')
|
|
|
|
.outputOption('-hls_list_size 0')
|
|
|
|
.outputOption('-hls_playlist_type vod')
|
|
|
|
.outputOption('-hls_segment_filename ' + videoPath)
|
|
|
|
.outputOption('-hls_segment_type fmp4')
|
|
|
|
.outputOption('-f hls')
|
|
|
|
.outputOption('-hls_flags single_file')
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
function getHLSVideoPath (options: HLSTranscodeOptions) {
|
2019-02-11 10:20:28 -06:00
|
|
|
return `${dirname(options.outputPath)}/${options.hlsPlaylist.videoFilename}`
|
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
async function fixHLSPlaylistIfNeeded (options: TranscodeOptions) {
|
|
|
|
if (options.type !== 'hls') return
|
2019-02-11 10:20:28 -06:00
|
|
|
|
|
|
|
const fileContent = await readFile(options.outputPath)
|
|
|
|
|
|
|
|
const videoFileName = options.hlsPlaylist.videoFilename
|
|
|
|
const videoFilePath = getHLSVideoPath(options)
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
// Fix wrong mapping with some ffmpeg versions
|
2019-02-11 10:20:28 -06:00
|
|
|
const newContent = fileContent.toString()
|
|
|
|
.replace(`#EXT-X-MAP:URI="${videoFilePath}",`, `#EXT-X-MAP:URI="${videoFileName}",`)
|
|
|
|
|
|
|
|
await writeFile(options.outputPath, newContent)
|
|
|
|
}
|
|
|
|
|
2018-05-21 06:14:29 -05:00
|
|
|
/**
|
|
|
|
* A slightly customised version of the 'veryfast' x264 preset
|
|
|
|
*
|
|
|
|
* The veryfast preset is right in the sweet spot of performance
|
|
|
|
* and quality. Superfast and ultrafast will give you better
|
|
|
|
* performance, but then quality is noticeably worse.
|
|
|
|
*/
|
2019-05-16 09:55:34 -05:00
|
|
|
async function presetH264VeryFast (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) {
|
|
|
|
let localCommand = await presetH264(command, input, resolution, fps)
|
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
localCommand = localCommand.outputOption('-preset:v veryfast')
|
2019-05-16 09:55:34 -05:00
|
|
|
|
2018-05-21 06:14:29 -05:00
|
|
|
/*
|
|
|
|
MAIN reference: https://slhck.info/video/2017/03/01/rate-control.html
|
|
|
|
Our target situation is closer to a livestream than a stream,
|
|
|
|
since we want to reduce as much a possible the encoding burden,
|
2019-05-16 09:55:34 -05:00
|
|
|
although not to the point of a livestream where there is a hard
|
2018-05-21 06:14:29 -05:00
|
|
|
constraint on the frames per second to be encoded.
|
|
|
|
*/
|
2018-10-18 02:44:43 -05:00
|
|
|
|
|
|
|
return localCommand
|
2018-05-21 06:14:29 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Standard profile, with variable bitrate audio and faststart.
|
|
|
|
*
|
|
|
|
* As for the audio, quality '5' is the highest and ensures 96-112kbps/channel
|
|
|
|
* See https://trac.ffmpeg.org/wiki/Encode/AAC#fdk_vbr
|
|
|
|
*/
|
2019-05-16 09:55:34 -05:00
|
|
|
async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) {
|
2018-10-18 02:44:43 -05:00
|
|
|
let localCommand = command
|
2018-05-21 06:14:29 -05:00
|
|
|
.format('mp4')
|
|
|
|
.videoCodec('libx264')
|
|
|
|
.outputOption('-movflags faststart')
|
|
|
|
|
2020-09-17 02:20:52 -05:00
|
|
|
addDefaultX264Params(localCommand)
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
const parsedAudio = await audio.get(input)
|
2018-05-21 06:14:29 -05:00
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
if (!parsedAudio.audioStream) {
|
|
|
|
localCommand = localCommand.noAudio()
|
|
|
|
} else if ((await checkFFmpegEncoders()).get('libfdk_aac')) { // we favor VBR, if a good AAC encoder is available
|
|
|
|
localCommand = localCommand
|
2018-05-21 06:14:29 -05:00
|
|
|
.audioCodec('libfdk_aac')
|
|
|
|
.audioQuality(5)
|
2018-10-18 02:44:43 -05:00
|
|
|
} else {
|
2019-05-16 09:55:34 -05:00
|
|
|
// we try to reduce the ceiling bitrate by making rough matches of bitrates
|
2018-10-18 02:44:43 -05:00
|
|
|
// of course this is far from perfect, but it might save some space in the end
|
2019-05-16 09:55:34 -05:00
|
|
|
localCommand = localCommand.audioCodec('aac')
|
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
const audioCodecName = parsedAudio.audioStream['codec_name']
|
2018-10-18 02:44:43 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
if (audio.bitrate[audioCodecName]) {
|
|
|
|
const bitrate = audio.bitrate[audioCodecName](parsedAudio.audioStream['bit_rate'])
|
2018-12-11 10:48:14 -06:00
|
|
|
if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate)
|
2018-10-18 02:44:43 -05:00
|
|
|
}
|
2018-05-21 06:14:29 -05:00
|
|
|
}
|
|
|
|
|
2019-05-16 09:55:34 -05:00
|
|
|
if (fps) {
|
|
|
|
// Constrained Encoding (VBV)
|
|
|
|
// https://slhck.info/video/2017/03/01/rate-control.html
|
|
|
|
// https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate
|
|
|
|
const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS)
|
|
|
|
localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ])
|
|
|
|
|
|
|
|
// Keyframe interval of 2 seconds for faster seeking and resolution switching.
|
|
|
|
// https://streaminglearningcenter.com/blogs/whats-the-right-keyframe-interval.html
|
|
|
|
// https://superuser.com/a/908325
|
|
|
|
localCommand = localCommand.outputOption(`-g ${fps * 2}`)
|
|
|
|
}
|
2018-10-17 03:15:38 -05:00
|
|
|
|
2018-10-18 02:44:43 -05:00
|
|
|
return localCommand
|
2018-05-21 06:14:29 -05:00
|
|
|
}
|
2019-04-08 03:03:23 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function presetCopy (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand {
|
2019-04-08 03:03:23 -05:00
|
|
|
return command
|
|
|
|
.format('mp4')
|
|
|
|
.videoCodec('copy')
|
|
|
|
.audioCodec('copy')
|
|
|
|
}
|
2019-10-31 20:06:19 -05:00
|
|
|
|
2020-01-31 09:56:52 -06:00
|
|
|
function presetOnlyAudio (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand {
|
2019-10-31 20:06:19 -05:00
|
|
|
return command
|
|
|
|
.format('mp4')
|
|
|
|
.audioCodec('copy')
|
|
|
|
.noVideo()
|
|
|
|
}
|
2020-09-17 02:20:52 -05:00
|
|
|
|
|
|
|
function getFFmpeg (input: string) {
|
|
|
|
// We set cwd explicitly because ffmpeg appears to create temporary files when trancoding which fails in read-only file systems
|
|
|
|
const command = ffmpeg(input, { niceness: FFMPEG_NICE.TRANSCODING, cwd: CONFIG.STORAGE.TMP_DIR })
|
|
|
|
|
|
|
|
if (CONFIG.TRANSCODING.THREADS > 0) {
|
|
|
|
// If we don't set any threads ffmpeg will chose automatically
|
|
|
|
command.outputOption('-threads ' + CONFIG.TRANSCODING.THREADS)
|
|
|
|
}
|
|
|
|
|
|
|
|
return command
|
|
|
|
}
|