PeerTube/server/lib/job-queue/handlers/video-live-ending.ts

228 lines
7.3 KiB
TypeScript
Raw Normal View History

2021-08-27 14:32:44 +02:00
import { Job } from 'bull'
import { pathExists, readdir, remove } from 'fs-extra'
import { join } from 'path'
import { ffprobePromise, getAudioStream, getVideoStreamDimensionsInfo, getVideoStreamDuration } from '@server/helpers/ffmpeg'
import { getLocalVideoActivityPubUrl } from '@server/lib/activitypub/url'
import { federateVideoIfNeeded } from '@server/lib/activitypub/videos'
import { cleanupLive, LiveSegmentShaStore } from '@server/lib/live'
import {
generateHLSMasterPlaylistFilename,
generateHlsSha256SegmentsFilename,
getLiveDirectory,
getLiveReplayBaseDirectory
} from '@server/lib/paths'
2020-11-20 17:16:55 +01:00
import { generateVideoMiniature } from '@server/lib/thumbnail'
2022-02-11 10:51:33 +01:00
import { generateHlsPlaylistResolutionFromTS } from '@server/lib/transcoding/transcoding'
Add support for saving video files to object storage (#4290) * Add support for saving video files to object storage * Add support for custom url generation on s3 stored files Uses two config keys to support url generation that doesn't directly go to (compatible s3). Can be used to generate urls to any cache server or CDN. * Upload files to s3 concurrently and delete originals afterwards * Only publish after move to object storage is complete * Use base url instead of url template * Fix mistyped config field * Add rudenmentary way to download before transcode * Implement Chocobozzz suggestions https://github.com/Chocobozzz/PeerTube/pull/4290#issuecomment-891670478 The remarks in question: Try to use objectStorage prefix instead of s3 prefix for your function/variables/config names Prefer to use a tree for the config: s3.streaming_playlists_bucket -> object_storage.streaming_playlists.bucket Use uppercase for config: S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket -> OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET (maybe BUCKET_NAME instead of BUCKET) I suggest to rename moveJobsRunning to pendingMovingJobs (or better, create a dedicated videoJobInfo table with a pendingMove & videoId columns so we could also use this table to track pending transcoding jobs) https://github.com/Chocobozzz/PeerTube/pull/4290/files#diff-3e26d41ca4bda1de8e1747af70ca2af642abcc1e9e0bfb94239ff2165acfbde5R19 uses a string instead of an integer I think we should store the origin object storage URL in fileUrl, without base_url injection. Instead, inject the base_url at "runtime" so admins can easily change this configuration without running a script to update DB URLs * Import correct function * Support multipart upload * Remove import of node 15.0 module stream/promises * Extend maximum upload job length Using the same value as for redundancy downloading seems logical * Use dynamic part size for really large uploads Also adds very small part size for local testing * Fix decreasePendingMove query * Resolve various PR comments * Move to object storage after optimize * Make upload size configurable and increase default * Prune webtorrent files that are stored in object storage * Move files after transcoding jobs * Fix federation * Add video path manager * Support move to external storage job in client * Fix live object storage tests Co-authored-by: Chocobozzz <me@florianbigard.com>
2021-08-17 08:26:20 +02:00
import { moveToNextState } from '@server/lib/video-state'
import { VideoModel } from '@server/models/video/video'
2020-11-04 14:16:57 +01:00
import { VideoFileModel } from '@server/models/video/video-file'
2020-10-26 16:44:23 +01:00
import { VideoLiveModel } from '@server/models/video/video-live'
import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist'
import { MVideo, MVideoLive, MVideoWithAllFiles } from '@server/types/models'
2020-11-06 10:57:40 +01:00
import { ThumbnailType, VideoLiveEndingPayload, VideoState } from '@shared/models'
import { logger } from '../../../helpers/logger'
import { VideoBlacklistModel } from '@server/models/video/video-blacklist'
2021-08-27 14:32:44 +02:00
async function processVideoLiveEnding (job: Job) {
const payload = job.data as VideoLiveEndingPayload
logger.info('Processing video live ending for %s.', payload.videoId, { payload })
2020-11-04 14:16:57 +01:00
function logError () {
logger.warn('Video live %d does not exist anymore. Cannot process live ending.', payload.videoId)
}
2020-10-26 16:44:23 +01:00
const video = await VideoModel.load(payload.videoId)
const live = await VideoLiveModel.loadByVideoId(payload.videoId)
2020-11-04 14:16:57 +01:00
if (!video || !live) {
logError()
return
}
2021-06-16 15:14:41 +02:00
LiveSegmentShaStore.Instance.cleanupShaSegments(video.uuid)
2020-12-03 14:10:54 +01:00
2020-10-26 16:44:23 +01:00
if (live.saveReplay !== true) {
return cleanupLiveAndFederate(video)
2020-10-26 16:44:23 +01:00
}
if (live.permanentLive) {
await saveReplayToExternalVideo(video, payload.publishedAt, payload.replayDirectory)
return cleanupLiveAndFederate(video)
}
return replaceLiveByReplay(video, live, payload.replayDirectory)
2020-10-26 16:44:23 +01:00
}
// ---------------------------------------------------------------------------
export {
2021-06-16 15:14:41 +02:00
processVideoLiveEnding
2020-10-26 16:44:23 +01:00
}
// ---------------------------------------------------------------------------
async function saveReplayToExternalVideo (liveVideo: MVideo, publishedAt: string, replayDirectory: string) {
await cleanupTMPLiveFiles(getLiveDirectory(liveVideo))
const video = new VideoModel({
name: `${liveVideo.name} - ${new Date(publishedAt).toLocaleString()}`,
isLive: false,
state: VideoState.TO_TRANSCODE,
duration: 0,
remote: liveVideo.remote,
category: liveVideo.category,
licence: liveVideo.licence,
language: liveVideo.language,
commentsEnabled: liveVideo.commentsEnabled,
downloadEnabled: liveVideo.downloadEnabled,
waitTranscoding: liveVideo.waitTranscoding,
nsfw: liveVideo.nsfw,
description: liveVideo.description,
support: liveVideo.support,
privacy: liveVideo.privacy,
channelId: liveVideo.channelId
}) as MVideoWithAllFiles
video.Thumbnails = []
video.VideoFiles = []
video.VideoStreamingPlaylists = []
video.url = getLocalVideoActivityPubUrl(video)
await video.save()
// If live is blacklisted, also blacklist the replay
const blacklist = await VideoBlacklistModel.loadByVideoId(liveVideo.id)
if (blacklist) {
await VideoBlacklistModel.create({
videoId: video.id,
unfederated: blacklist.unfederated,
reason: blacklist.reason,
type: blacklist.type
})
}
await assignReplaysToVideo(video, replayDirectory)
await remove(replayDirectory)
for (const type of [ ThumbnailType.MINIATURE, ThumbnailType.PREVIEW ]) {
const image = await generateVideoMiniature({ video, videoFile: video.getMaxQualityFile(), type })
await video.addAndSaveThumbnail(image)
}
await moveToNextState({ video, isNewVideo: true })
}
async function replaceLiveByReplay (video: MVideo, live: MVideoLive, replayDirectory: string) {
Add support for saving video files to object storage (#4290) * Add support for saving video files to object storage * Add support for custom url generation on s3 stored files Uses two config keys to support url generation that doesn't directly go to (compatible s3). Can be used to generate urls to any cache server or CDN. * Upload files to s3 concurrently and delete originals afterwards * Only publish after move to object storage is complete * Use base url instead of url template * Fix mistyped config field * Add rudenmentary way to download before transcode * Implement Chocobozzz suggestions https://github.com/Chocobozzz/PeerTube/pull/4290#issuecomment-891670478 The remarks in question: Try to use objectStorage prefix instead of s3 prefix for your function/variables/config names Prefer to use a tree for the config: s3.streaming_playlists_bucket -> object_storage.streaming_playlists.bucket Use uppercase for config: S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket -> OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET (maybe BUCKET_NAME instead of BUCKET) I suggest to rename moveJobsRunning to pendingMovingJobs (or better, create a dedicated videoJobInfo table with a pendingMove & videoId columns so we could also use this table to track pending transcoding jobs) https://github.com/Chocobozzz/PeerTube/pull/4290/files#diff-3e26d41ca4bda1de8e1747af70ca2af642abcc1e9e0bfb94239ff2165acfbde5R19 uses a string instead of an integer I think we should store the origin object storage URL in fileUrl, without base_url injection. Instead, inject the base_url at "runtime" so admins can easily change this configuration without running a script to update DB URLs * Import correct function * Support multipart upload * Remove import of node 15.0 module stream/promises * Extend maximum upload job length Using the same value as for redundancy downloading seems logical * Use dynamic part size for really large uploads Also adds very small part size for local testing * Fix decreasePendingMove query * Resolve various PR comments * Move to object storage after optimize * Make upload size configurable and increase default * Prune webtorrent files that are stored in object storage * Move files after transcoding jobs * Fix federation * Add video path manager * Support move to external storage job in client * Fix live object storage tests Co-authored-by: Chocobozzz <me@florianbigard.com>
2021-08-17 08:26:20 +02:00
await cleanupTMPLiveFiles(getLiveDirectory(video))
2020-10-26 16:44:23 +01:00
2020-10-27 16:06:24 +01:00
await live.destroy()
2020-10-26 16:44:23 +01:00
video.isLive = false
video.state = VideoState.TO_TRANSCODE
2020-10-28 10:49:20 +01:00
2020-10-26 16:44:23 +01:00
await video.save()
2020-11-03 15:33:30 +01:00
// Remove old HLS playlist video files
const videoWithFiles = await VideoModel.loadAndPopulateAccountAndServerAndTags(video.id)
2020-10-26 16:44:23 +01:00
2020-11-03 15:33:30 +01:00
const hlsPlaylist = videoWithFiles.getHLSPlaylist()
await VideoFileModel.removeHLSFilesOfVideoId(hlsPlaylist.id)
2021-07-23 11:20:00 +02:00
// Reset playlist
2020-11-03 15:33:30 +01:00
hlsPlaylist.VideoFiles = []
2021-07-23 11:20:00 +02:00
hlsPlaylist.playlistFilename = generateHLSMasterPlaylistFilename()
hlsPlaylist.segmentsSha256Filename = generateHlsSha256SegmentsFilename()
await hlsPlaylist.save()
2020-11-03 15:33:30 +01:00
await assignReplaysToVideo(videoWithFiles, replayDirectory)
await remove(getLiveReplayBaseDirectory(videoWithFiles))
// Regenerate the thumbnail & preview?
if (videoWithFiles.getMiniature().automaticallyGenerated === true) {
const miniature = await generateVideoMiniature({
video: videoWithFiles,
videoFile: videoWithFiles.getMaxQualityFile(),
type: ThumbnailType.MINIATURE
})
await video.addAndSaveThumbnail(miniature)
}
if (videoWithFiles.getPreview().automaticallyGenerated === true) {
const preview = await generateVideoMiniature({
video: videoWithFiles,
videoFile: videoWithFiles.getMaxQualityFile(),
type: ThumbnailType.PREVIEW
})
await video.addAndSaveThumbnail(preview)
}
await moveToNextState({ video: videoWithFiles, isNewVideo: false })
}
async function assignReplaysToVideo (video: MVideo, replayDirectory: string) {
2020-12-09 15:00:02 +01:00
let durationDone = false
2020-10-26 16:44:23 +01:00
const concatenatedTsFiles = await readdir(replayDirectory)
for (const concatenatedTsFile of concatenatedTsFiles) {
2020-12-04 15:10:13 +01:00
const concatenatedTsFilePath = join(replayDirectory, concatenatedTsFile)
2020-12-02 10:07:26 +01:00
const probe = await ffprobePromise(concatenatedTsFilePath)
const { audioStream } = await getAudioStream(concatenatedTsFilePath, probe)
2022-02-11 10:51:33 +01:00
const { resolution, isPortraitMode } = await getVideoStreamDimensionsInfo(concatenatedTsFilePath, probe)
2020-12-02 10:07:26 +01:00
Add support for saving video files to object storage (#4290) * Add support for saving video files to object storage * Add support for custom url generation on s3 stored files Uses two config keys to support url generation that doesn't directly go to (compatible s3). Can be used to generate urls to any cache server or CDN. * Upload files to s3 concurrently and delete originals afterwards * Only publish after move to object storage is complete * Use base url instead of url template * Fix mistyped config field * Add rudenmentary way to download before transcode * Implement Chocobozzz suggestions https://github.com/Chocobozzz/PeerTube/pull/4290#issuecomment-891670478 The remarks in question: Try to use objectStorage prefix instead of s3 prefix for your function/variables/config names Prefer to use a tree for the config: s3.streaming_playlists_bucket -> object_storage.streaming_playlists.bucket Use uppercase for config: S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket -> OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET (maybe BUCKET_NAME instead of BUCKET) I suggest to rename moveJobsRunning to pendingMovingJobs (or better, create a dedicated videoJobInfo table with a pendingMove & videoId columns so we could also use this table to track pending transcoding jobs) https://github.com/Chocobozzz/PeerTube/pull/4290/files#diff-3e26d41ca4bda1de8e1747af70ca2af642abcc1e9e0bfb94239ff2165acfbde5R19 uses a string instead of an integer I think we should store the origin object storage URL in fileUrl, without base_url injection. Instead, inject the base_url at "runtime" so admins can easily change this configuration without running a script to update DB URLs * Import correct function * Support multipart upload * Remove import of node 15.0 module stream/promises * Extend maximum upload job length Using the same value as for redundancy downloading seems logical * Use dynamic part size for really large uploads Also adds very small part size for local testing * Fix decreasePendingMove query * Resolve various PR comments * Move to object storage after optimize * Make upload size configurable and increase default * Prune webtorrent files that are stored in object storage * Move files after transcoding jobs * Fix federation * Add video path manager * Support move to external storage job in client * Fix live object storage tests Co-authored-by: Chocobozzz <me@florianbigard.com>
2021-08-17 08:26:20 +02:00
const { resolutionPlaylistPath: outputPath } = await generateHlsPlaylistResolutionFromTS({
video,
2020-12-04 15:10:13 +01:00
concatenatedTsFilePath,
2021-08-06 13:35:25 +02:00
resolution,
isPortraitMode,
isAAC: audioStream?.codec_name === 'aac'
2020-10-26 16:44:23 +01:00
})
2020-12-03 09:38:24 +01:00
if (!durationDone) {
video.duration = await getVideoStreamDuration(outputPath)
await video.save()
2020-12-03 09:38:24 +01:00
durationDone = true
2020-12-02 10:07:26 +01:00
}
2020-11-03 15:33:30 +01:00
}
2020-10-28 10:49:20 +01:00
return video
}
2020-11-06 10:57:40 +01:00
async function cleanupLiveAndFederate (video: MVideo) {
const streamingPlaylist = await VideoStreamingPlaylistModel.loadHLSPlaylistByVideo(video.id)
await cleanupLive(video, streamingPlaylist)
2020-11-06 10:57:40 +01:00
const fullVideo = await VideoModel.loadAndPopulateAccountAndServerAndTags(video.id)
return federateVideoIfNeeded(fullVideo, false, undefined)
2020-10-26 16:44:23 +01:00
}
Add support for saving video files to object storage (#4290) * Add support for saving video files to object storage * Add support for custom url generation on s3 stored files Uses two config keys to support url generation that doesn't directly go to (compatible s3). Can be used to generate urls to any cache server or CDN. * Upload files to s3 concurrently and delete originals afterwards * Only publish after move to object storage is complete * Use base url instead of url template * Fix mistyped config field * Add rudenmentary way to download before transcode * Implement Chocobozzz suggestions https://github.com/Chocobozzz/PeerTube/pull/4290#issuecomment-891670478 The remarks in question: Try to use objectStorage prefix instead of s3 prefix for your function/variables/config names Prefer to use a tree for the config: s3.streaming_playlists_bucket -> object_storage.streaming_playlists.bucket Use uppercase for config: S3.STREAMING_PLAYLISTS_BUCKETINFO.bucket -> OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET (maybe BUCKET_NAME instead of BUCKET) I suggest to rename moveJobsRunning to pendingMovingJobs (or better, create a dedicated videoJobInfo table with a pendingMove & videoId columns so we could also use this table to track pending transcoding jobs) https://github.com/Chocobozzz/PeerTube/pull/4290/files#diff-3e26d41ca4bda1de8e1747af70ca2af642abcc1e9e0bfb94239ff2165acfbde5R19 uses a string instead of an integer I think we should store the origin object storage URL in fileUrl, without base_url injection. Instead, inject the base_url at "runtime" so admins can easily change this configuration without running a script to update DB URLs * Import correct function * Support multipart upload * Remove import of node 15.0 module stream/promises * Extend maximum upload job length Using the same value as for redundancy downloading seems logical * Use dynamic part size for really large uploads Also adds very small part size for local testing * Fix decreasePendingMove query * Resolve various PR comments * Move to object storage after optimize * Make upload size configurable and increase default * Prune webtorrent files that are stored in object storage * Move files after transcoding jobs * Fix federation * Add video path manager * Support move to external storage job in client * Fix live object storage tests Co-authored-by: Chocobozzz <me@florianbigard.com>
2021-08-17 08:26:20 +02:00
async function cleanupTMPLiveFiles (hlsDirectory: string) {
2020-12-03 14:10:54 +01:00
if (!await pathExists(hlsDirectory)) return
const files = await readdir(hlsDirectory)
for (const filename of files) {
if (
filename.endsWith('.ts') ||
filename.endsWith('.m3u8') ||
filename.endsWith('.mpd') ||
filename.endsWith('.m4s') ||
2020-12-02 10:07:26 +01:00
filename.endsWith('.tmp')
) {
const p = join(hlsDirectory, filename)
remove(p)
.catch(err => logger.error('Cannot remove %s.', p, { err }))
}
}
}