2020-11-20 17:16:55 +01:00
|
|
|
import * as ffmpeg from 'fluent-ffmpeg'
|
2021-05-11 11:27:40 +02:00
|
|
|
import { getMaxBitrate, VideoFileMetadata, VideoResolution } from '../../shared/models/videos'
|
2020-11-20 17:16:55 +01:00
|
|
|
import { CONFIG } from '../initializers/config'
|
|
|
|
import { VIDEO_TRANSCODING_FPS } from '../initializers/constants'
|
|
|
|
import { logger } from './logger'
|
|
|
|
|
2020-11-24 16:29:39 +01:00
|
|
|
/**
|
|
|
|
*
|
|
|
|
* Helpers to run ffprobe and extract data from the JSON output
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2020-11-20 17:16:55 +01:00
|
|
|
function ffprobePromise (path: string) {
|
|
|
|
return new Promise<ffmpeg.FfprobeData>((res, rej) => {
|
|
|
|
ffmpeg.ffprobe(path, (err, data) => {
|
|
|
|
if (err) return rej(err)
|
|
|
|
|
|
|
|
return res(data)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getAudioStream (videoPath: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
// without position, ffprobe considers the last input only
|
|
|
|
// we make it consider the first input only
|
|
|
|
// if you pass a file path to pos, then ffprobe acts on that file directly
|
|
|
|
const data = existingProbe || await ffprobePromise(videoPath)
|
|
|
|
|
|
|
|
if (Array.isArray(data.streams)) {
|
|
|
|
const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio')
|
|
|
|
|
|
|
|
if (audioStream) {
|
|
|
|
return {
|
|
|
|
absolutePath: data.format.filename,
|
|
|
|
audioStream,
|
|
|
|
bitrate: parseInt(audioStream['bit_rate'] + '', 10)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return { absolutePath: data.format.filename }
|
|
|
|
}
|
|
|
|
|
|
|
|
function getMaxAudioBitrate (type: 'aac' | 'mp3' | string, bitrate: number) {
|
2020-11-24 16:24:50 +01:00
|
|
|
const maxKBitrate = 384
|
|
|
|
const kToBits = (kbits: number) => kbits * 1000
|
|
|
|
|
|
|
|
// If we did not manage to get the bitrate, use an average value
|
|
|
|
if (!bitrate) return 256
|
2020-11-20 17:16:55 +01:00
|
|
|
|
|
|
|
if (type === 'aac') {
|
|
|
|
switch (true) {
|
2020-11-24 16:24:50 +01:00
|
|
|
case bitrate > kToBits(maxKBitrate):
|
|
|
|
return maxKBitrate
|
2020-11-20 17:16:55 +01:00
|
|
|
|
|
|
|
default:
|
|
|
|
return -1 // we interpret it as a signal to copy the audio stream as is
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-24 16:24:50 +01:00
|
|
|
/*
|
|
|
|
a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac.
|
|
|
|
That's why, when using aac, we can go to lower kbit/sec. The equivalences
|
|
|
|
made here are not made to be accurate, especially with good mp3 encoders.
|
|
|
|
*/
|
|
|
|
switch (true) {
|
|
|
|
case bitrate <= kToBits(192):
|
|
|
|
return 128
|
2020-11-20 17:16:55 +01:00
|
|
|
|
2020-11-24 16:24:50 +01:00
|
|
|
case bitrate <= kToBits(384):
|
|
|
|
return 256
|
2020-11-20 17:16:55 +01:00
|
|
|
|
2020-11-24 16:24:50 +01:00
|
|
|
default:
|
|
|
|
return maxKBitrate
|
2020-11-20 17:16:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getVideoStreamSize (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const videoStream = await getVideoStreamFromFile(path, existingProbe)
|
|
|
|
|
|
|
|
return videoStream === null
|
|
|
|
? { width: 0, height: 0 }
|
|
|
|
: { width: videoStream.width, height: videoStream.height }
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getVideoStreamCodec (path: string) {
|
|
|
|
const videoStream = await getVideoStreamFromFile(path)
|
|
|
|
|
|
|
|
if (!videoStream) return ''
|
|
|
|
|
|
|
|
const videoCodec = videoStream.codec_tag_string
|
|
|
|
|
2021-01-29 13:57:17 +01:00
|
|
|
if (videoCodec === 'vp09') return 'vp09.00.50.08'
|
2021-04-21 09:08:14 +02:00
|
|
|
if (videoCodec === 'hev1') return 'hev1.1.6.L93.B0'
|
2021-01-29 13:57:17 +01:00
|
|
|
|
2020-11-20 17:16:55 +01:00
|
|
|
const baseProfileMatrix = {
|
2021-01-29 13:57:17 +01:00
|
|
|
avc1: {
|
|
|
|
High: '6400',
|
|
|
|
Main: '4D40',
|
|
|
|
Baseline: '42E0'
|
|
|
|
},
|
|
|
|
av01: {
|
|
|
|
High: '1',
|
|
|
|
Main: '0',
|
|
|
|
Professional: '2'
|
|
|
|
}
|
2020-11-20 17:16:55 +01:00
|
|
|
}
|
|
|
|
|
2021-01-29 13:57:17 +01:00
|
|
|
let baseProfile = baseProfileMatrix[videoCodec][videoStream.profile]
|
2020-11-20 17:16:55 +01:00
|
|
|
if (!baseProfile) {
|
|
|
|
logger.warn('Cannot get video profile codec of %s.', path, { videoStream })
|
2021-01-29 13:57:17 +01:00
|
|
|
baseProfile = baseProfileMatrix[videoCodec]['High'] // Fallback
|
|
|
|
}
|
|
|
|
|
|
|
|
if (videoCodec === 'av01') {
|
|
|
|
const level = videoStream.level
|
|
|
|
|
|
|
|
// Guess the tier indicator and bit depth
|
|
|
|
return `${videoCodec}.${baseProfile}.${level}M.08`
|
2020-11-20 17:16:55 +01:00
|
|
|
}
|
|
|
|
|
2021-01-29 13:57:17 +01:00
|
|
|
// Default, h264 codec
|
2020-11-20 17:16:55 +01:00
|
|
|
let level = videoStream.level.toString(16)
|
|
|
|
if (level.length === 1) level = `0${level}`
|
|
|
|
|
|
|
|
return `${videoCodec}.${baseProfile}${level}`
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getAudioStreamCodec (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const { audioStream } = await getAudioStream(path, existingProbe)
|
|
|
|
|
|
|
|
if (!audioStream) return ''
|
|
|
|
|
2021-01-29 13:57:17 +01:00
|
|
|
const audioCodecName = audioStream.codec_name
|
|
|
|
|
|
|
|
if (audioCodecName === 'opus') return 'opus'
|
|
|
|
if (audioCodecName === 'vorbis') return 'vorbis'
|
|
|
|
if (audioCodecName === 'aac') return 'mp4a.40.2'
|
2020-11-20 17:16:55 +01:00
|
|
|
|
|
|
|
logger.warn('Cannot get audio codec of %s.', path, { audioStream })
|
|
|
|
|
|
|
|
return 'mp4a.40.2' // Fallback
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getVideoFileResolution (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const size = await getVideoStreamSize(path, existingProbe)
|
|
|
|
|
|
|
|
return {
|
|
|
|
videoFileResolution: Math.min(size.height, size.width),
|
|
|
|
isPortraitMode: size.height > size.width
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getVideoFileFPS (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const videoStream = await getVideoStreamFromFile(path, existingProbe)
|
|
|
|
if (videoStream === null) return 0
|
|
|
|
|
|
|
|
for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) {
|
|
|
|
const valuesText: string = videoStream[key]
|
|
|
|
if (!valuesText) continue
|
|
|
|
|
|
|
|
const [ frames, seconds ] = valuesText.split('/')
|
|
|
|
if (!frames || !seconds) continue
|
|
|
|
|
|
|
|
const result = parseInt(frames, 10) / parseInt(seconds, 10)
|
|
|
|
if (result > 0) return Math.round(result)
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getMetadataFromFile (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const metadata = existingProbe || await ffprobePromise(path)
|
|
|
|
|
|
|
|
return new VideoFileMetadata(metadata)
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getVideoFileBitrate (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const metadata = await getMetadataFromFile(path, existingProbe)
|
|
|
|
|
|
|
|
return metadata.format.bit_rate as number
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getDurationFromVideoFile (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const metadata = await getMetadataFromFile(path, existingProbe)
|
|
|
|
|
2020-12-03 09:38:24 +01:00
|
|
|
return Math.round(metadata.format.duration)
|
2020-11-20 17:16:55 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
async function getVideoStreamFromFile (path: string, existingProbe?: ffmpeg.FfprobeData) {
|
|
|
|
const metadata = await getMetadataFromFile(path, existingProbe)
|
|
|
|
|
|
|
|
return metadata.streams.find(s => s.codec_type === 'video') || null
|
|
|
|
}
|
|
|
|
|
|
|
|
function computeResolutionsToTranscode (videoFileResolution: number, type: 'vod' | 'live') {
|
|
|
|
const configResolutions = type === 'vod'
|
|
|
|
? CONFIG.TRANSCODING.RESOLUTIONS
|
|
|
|
: CONFIG.LIVE.TRANSCODING.RESOLUTIONS
|
|
|
|
|
|
|
|
const resolutionsEnabled: number[] = []
|
|
|
|
|
|
|
|
// Put in the order we want to proceed jobs
|
|
|
|
const resolutions = [
|
|
|
|
VideoResolution.H_NOVIDEO,
|
|
|
|
VideoResolution.H_480P,
|
|
|
|
VideoResolution.H_360P,
|
|
|
|
VideoResolution.H_720P,
|
|
|
|
VideoResolution.H_240P,
|
|
|
|
VideoResolution.H_1080P,
|
2020-12-24 18:02:04 +01:00
|
|
|
VideoResolution.H_1440P,
|
2020-11-20 17:16:55 +01:00
|
|
|
VideoResolution.H_4K
|
|
|
|
]
|
|
|
|
|
|
|
|
for (const resolution of resolutions) {
|
|
|
|
if (configResolutions[resolution + 'p'] === true && videoFileResolution > resolution) {
|
|
|
|
resolutionsEnabled.push(resolution)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return resolutionsEnabled
|
|
|
|
}
|
|
|
|
|
|
|
|
async function canDoQuickTranscode (path: string): Promise<boolean> {
|
2021-01-29 10:29:29 +01:00
|
|
|
if (CONFIG.TRANSCODING.PROFILE !== 'default') return false
|
|
|
|
|
2020-11-20 17:16:55 +01:00
|
|
|
const probe = await ffprobePromise(path)
|
|
|
|
|
2020-11-24 14:08:23 +01:00
|
|
|
return await canDoQuickVideoTranscode(path, probe) &&
|
|
|
|
await canDoQuickAudioTranscode(path, probe)
|
|
|
|
}
|
|
|
|
|
|
|
|
async function canDoQuickVideoTranscode (path: string, probe?: ffmpeg.FfprobeData): Promise<boolean> {
|
2020-11-20 17:16:55 +01:00
|
|
|
const videoStream = await getVideoStreamFromFile(path, probe)
|
|
|
|
const fps = await getVideoFileFPS(path, probe)
|
|
|
|
const bitRate = await getVideoFileBitrate(path, probe)
|
|
|
|
const resolution = await getVideoFileResolution(path, probe)
|
|
|
|
|
2020-11-24 16:24:50 +01:00
|
|
|
// If ffprobe did not manage to guess the bitrate
|
|
|
|
if (!bitRate) return false
|
|
|
|
|
2020-11-20 17:16:55 +01:00
|
|
|
// check video params
|
|
|
|
if (videoStream == null) return false
|
|
|
|
if (videoStream['codec_name'] !== 'h264') return false
|
|
|
|
if (videoStream['pix_fmt'] !== 'yuv420p') return false
|
|
|
|
if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false
|
|
|
|
if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false
|
|
|
|
|
2020-11-24 14:08:23 +01:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
async function canDoQuickAudioTranscode (path: string, probe?: ffmpeg.FfprobeData): Promise<boolean> {
|
|
|
|
const parsedAudio = await getAudioStream(path, probe)
|
|
|
|
|
2020-11-24 16:24:50 +01:00
|
|
|
if (!parsedAudio.audioStream) return true
|
2020-11-20 17:16:55 +01:00
|
|
|
|
2020-11-24 16:24:50 +01:00
|
|
|
if (parsedAudio.audioStream['codec_name'] !== 'aac') return false
|
2020-11-20 17:16:55 +01:00
|
|
|
|
2020-11-24 16:24:50 +01:00
|
|
|
const audioBitrate = parsedAudio.bitrate
|
|
|
|
if (!audioBitrate) return false
|
|
|
|
|
|
|
|
const maxAudioBitrate = getMaxAudioBitrate('aac', audioBitrate)
|
|
|
|
if (maxAudioBitrate !== -1 && audioBitrate > maxAudioBitrate) return false
|
2020-11-20 17:16:55 +01:00
|
|
|
|
2021-02-02 11:19:46 +01:00
|
|
|
const channelLayout = parsedAudio.audioStream['channel_layout']
|
|
|
|
// Causes playback issues with Chrome
|
|
|
|
if (!channelLayout || channelLayout === 'unknown') return false
|
|
|
|
|
2020-11-20 17:16:55 +01:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number {
|
|
|
|
return VIDEO_TRANSCODING_FPS[type].slice(0)
|
|
|
|
.sort((a, b) => fps % a - fps % b)[0]
|
|
|
|
}
|
|
|
|
|
2020-11-26 11:29:50 +01:00
|
|
|
function computeFPS (fpsArg: number, resolution: VideoResolution) {
|
|
|
|
let fps = fpsArg
|
|
|
|
|
|
|
|
if (
|
|
|
|
// On small/medium resolutions, limit FPS
|
|
|
|
resolution !== undefined &&
|
|
|
|
resolution < VIDEO_TRANSCODING_FPS.KEEP_ORIGIN_FPS_RESOLUTION_MIN &&
|
|
|
|
fps > VIDEO_TRANSCODING_FPS.AVERAGE
|
|
|
|
) {
|
|
|
|
// Get closest standard framerate by modulo: downsampling has to be done to a divisor of the nominal fps value
|
|
|
|
fps = getClosestFramerateStandard(fps, 'STANDARD')
|
|
|
|
}
|
|
|
|
|
|
|
|
// Hard FPS limits
|
|
|
|
if (fps > VIDEO_TRANSCODING_FPS.MAX) fps = getClosestFramerateStandard(fps, 'HD_STANDARD')
|
|
|
|
else if (fps < VIDEO_TRANSCODING_FPS.MIN) fps = VIDEO_TRANSCODING_FPS.MIN
|
|
|
|
|
|
|
|
return fps
|
|
|
|
}
|
|
|
|
|
2020-11-20 17:16:55 +01:00
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
export {
|
|
|
|
getVideoStreamCodec,
|
|
|
|
getAudioStreamCodec,
|
|
|
|
getVideoStreamSize,
|
|
|
|
getVideoFileResolution,
|
|
|
|
getMetadataFromFile,
|
|
|
|
getMaxAudioBitrate,
|
2020-11-24 14:08:23 +01:00
|
|
|
getVideoStreamFromFile,
|
2020-11-20 17:16:55 +01:00
|
|
|
getDurationFromVideoFile,
|
|
|
|
getAudioStream,
|
2020-11-26 11:29:50 +01:00
|
|
|
computeFPS,
|
2020-11-20 17:16:55 +01:00
|
|
|
getVideoFileFPS,
|
2020-11-24 14:08:23 +01:00
|
|
|
ffprobePromise,
|
2020-11-20 17:16:55 +01:00
|
|
|
getClosestFramerateStandard,
|
|
|
|
computeResolutionsToTranscode,
|
|
|
|
getVideoFileBitrate,
|
2020-11-24 14:08:23 +01:00
|
|
|
canDoQuickTranscode,
|
|
|
|
canDoQuickVideoTranscode,
|
|
|
|
canDoQuickAudioTranscode
|
2020-11-20 17:16:55 +01:00
|
|
|
}
|