diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index c5bbd9e2c..35c91bf85 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -31,6 +31,11 @@ jobs:
ports:
- 10389:10389
+ s3ninja:
+ image: scireum/s3-ninja
+ ports:
+ - 9444:9000
+
strategy:
fail-fast: false
matrix:
@@ -40,6 +45,7 @@ jobs:
PGUSER: peertube
PGHOST: localhost
NODE_PENDING_JOB_WAIT: 250
+ ENABLE_OBJECT_STORAGE_TESTS: true
steps:
- uses: actions/checkout@v2
diff --git a/client/src/app/+admin/system/jobs/jobs.component.ts b/client/src/app/+admin/system/jobs/jobs.component.ts
index 29ba95c5c..4b02e1bc1 100644
--- a/client/src/app/+admin/system/jobs/jobs.component.ts
+++ b/client/src/app/+admin/system/jobs/jobs.component.ts
@@ -36,7 +36,8 @@ export class JobsComponent extends RestTable implements OnInit {
'video-live-ending',
'video-redundancy',
'video-transcoding',
- 'videos-views'
+ 'videos-views',
+ 'move-to-object-storage'
]
jobs: Job[] = []
diff --git a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html
index 3480d3656..e2dd44bf7 100644
--- a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html
+++ b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.html
@@ -6,6 +6,10 @@
The video is being transcoded, it may not work properly.
+
+ The video is being moved to an external server, it may not work properly.
+
+
This video will be published on {{ video.scheduledUpdate.updateAt | date: 'full' }}.
diff --git a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts
index 8a46ba0d5..0072492ac 100644
--- a/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts
+++ b/client/src/app/+videos/+video-watch/shared/information/video-alert.component.ts
@@ -18,6 +18,10 @@ export class VideoAlertComponent {
return this.video && this.video.state.id === VideoState.TO_IMPORT
}
+ isVideoToMoveToExternalStorage () {
+ return this.video && this.video.state.id === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE
+ }
+
hasVideoScheduledPublication () {
return this.video && this.video.scheduledUpdate !== undefined
}
diff --git a/config/default.yaml b/config/default.yaml
index 3d0ae6e87..3865ab5cf 100644
--- a/config/default.yaml
+++ b/config/default.yaml
@@ -95,6 +95,39 @@ storage:
# If not, peertube will fallback to the default fil
client_overrides: 'storage/client-overrides/'
+object_storage:
+ enabled: false
+
+ # Without protocol, will default to HTTPS
+ endpoint: '' # 's3.amazonaws.com' or 's3.fr-par.scw.cloud' for example
+
+ region: 'us-east-1'
+
+ credentials:
+ # You can also use AWS_ACCESS_KEY_ID env variable
+ access_key_id: ''
+ # You can also use AWS_SECRET_ACCESS_KEY env variable
+ secret_access_key: ''
+
+ # Maximum amount to upload in one request to object storage
+ max_upload_part: 2GB
+
+ streaming_playlists:
+ bucket_name: 'streaming-playlists'
+
+ # Allows setting all buckets to the same value but with a different prefix
+ prefix: '' # Example: 'streaming-playlists:'
+
+ # Base url for object URL generation, scheme and host will be replaced by this URL
+ # Useful when you want to use a CDN/external proxy
+ base_url: '' # Example: 'https://mirror.example.com'
+
+ # Same settings but for webtorrent videos
+ videos:
+ bucket_name: 'videos'
+ prefix: ''
+ base_url: ''
+
log:
level: 'info' # 'debug' | 'info' | 'warn' | 'error'
rotation:
diff --git a/config/production.yaml.example b/config/production.yaml.example
index 514ab99a4..94238fad0 100644
--- a/config/production.yaml.example
+++ b/config/production.yaml.example
@@ -93,6 +93,39 @@ storage:
# If not, peertube will fallback to the default file
client_overrides: '/var/www/peertube/storage/client-overrides/'
+object_storage:
+ enabled: false
+
+ # Without protocol, will default to HTTPS
+ endpoint: '' # 's3.amazonaws.com' or 's3.fr-par.scw.cloud' for example
+
+ region: 'us-east-1'
+
+ credentials:
+ # You can also use AWS_ACCESS_KEY_ID env variable
+ access_key_id: ''
+ # You can also use AWS_SECRET_ACCESS_KEY env variable
+ secret_access_key: ''
+
+ # Maximum amount to upload in one request to object storage
+ max_upload_part: 2GB
+
+ streaming_playlists:
+ bucket_name: 'streaming-playlists'
+
+ # Allows setting all buckets to the same value but with a different prefix
+ prefix: '' # Example: 'streaming-playlists:'
+
+ # Base url for object URL generation, scheme and host will be replaced by this URL
+ # Useful when you want to use a CDN/external proxy
+ base_url: '' # Example: 'https://mirror.example.com'
+
+ # Same settings but for webtorrent videos
+ videos:
+ bucket_name: 'videos'
+ prefix: ''
+ base_url: ''
+
log:
level: 'info' # 'debug' | 'info' | 'warn' | 'error'
rotation:
diff --git a/package.json b/package.json
index 807a90742..d2b1e1245 100644
--- a/package.json
+++ b/package.json
@@ -73,6 +73,7 @@
"swagger-cli": "swagger-cli"
},
"dependencies": {
+ "@aws-sdk/client-s3": "^3.23.0",
"@uploadx/core": "^4.4.0",
"async": "^3.0.1",
"async-lru": "^1.1.1",
diff --git a/scripts/ci.sh b/scripts/ci.sh
index 71b1be53b..f49dbe6ad 100755
--- a/scripts/ci.sh
+++ b/scripts/ci.sh
@@ -89,9 +89,10 @@ elif [ "$1" = "api-4" ]; then
moderationFiles=$(findTestFiles ./dist/server/tests/api/moderation)
redundancyFiles=$(findTestFiles ./dist/server/tests/api/redundancy)
+ objectStorageFiles=$(findTestFiles ./dist/server/tests/api/object-storage)
activitypubFiles=$(findTestFiles ./dist/server/tests/api/activitypub)
- MOCHA_PARALLEL=true TS_NODE_FILES=true runTest "$1" 2 $moderationFiles $redundancyFiles $activitypubFiles
+ MOCHA_PARALLEL=true TS_NODE_FILES=true runTest "$1" 2 $moderationFiles $redundancyFiles $activitypubFiles $objectStorageFiles
elif [ "$1" = "external-plugins" ]; then
npm run build:server
diff --git a/scripts/create-transcoding-job.ts b/scripts/create-transcoding-job.ts
index 3a552c19a..0bb9bfeab 100755
--- a/scripts/create-transcoding-job.ts
+++ b/scripts/create-transcoding-job.ts
@@ -6,9 +6,10 @@ import { VideoModel } from '../server/models/video/video'
import { initDatabaseModels } from '../server/initializers/database'
import { JobQueue } from '../server/lib/job-queue'
import { computeResolutionsToTranscode } from '@server/helpers/ffprobe-utils'
-import { VideoTranscodingPayload } from '@shared/models'
+import { VideoState, VideoTranscodingPayload } from '@shared/models'
import { CONFIG } from '@server/initializers/config'
import { isUUIDValid } from '@server/helpers/custom-validators/misc'
+import { addTranscodingJob } from '@server/lib/video'
program
.option('-v, --video [videoUUID]', 'Video UUID')
@@ -47,7 +48,7 @@ async function run () {
if (!video) throw new Error('Video not found.')
const dataInput: VideoTranscodingPayload[] = []
- const { resolution } = await video.getMaxQualityResolution()
+ const resolution = video.getMaxQualityFile().resolution
// Generate HLS files
if (options.generateHls || CONFIG.TRANSCODING.WEBTORRENT.ENABLED === false) {
@@ -62,6 +63,7 @@ async function run () {
resolution,
isPortraitMode: false,
copyCodecs: false,
+ isNewVideo: false,
isMaxQuality: false
})
}
@@ -87,10 +89,13 @@ async function run () {
}
}
- await JobQueue.Instance.init()
+ JobQueue.Instance.init()
+
+ video.state = VideoState.TO_TRANSCODE
+ await video.save()
for (const d of dataInput) {
- await JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: d })
+ await addTranscodingJob(d, {})
console.log('Transcoding job for video %s created.', video.uuid)
}
}
diff --git a/scripts/optimize-old-videos.ts b/scripts/optimize-old-videos.ts
index 9e66105dd..245e4cf28 100644
--- a/scripts/optimize-old-videos.ts
+++ b/scripts/optimize-old-videos.ts
@@ -1,15 +1,18 @@
import { registerTSPaths } from '../server/helpers/register-ts-paths'
registerTSPaths()
-import { getDurationFromVideoFile, getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../server/helpers/ffprobe-utils'
-import { VideoModel } from '../server/models/video/video'
-import { optimizeOriginalVideofile } from '../server/lib/transcoding/video-transcoding'
-import { initDatabaseModels } from '../server/initializers/database'
-import { basename, dirname } from 'path'
import { copy, move, remove } from 'fs-extra'
+import { basename, dirname } from 'path'
import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent'
-import { getVideoFilePath } from '@server/lib/video-paths'
+import { CONFIG } from '@server/initializers/config'
+import { processMoveToObjectStorage } from '@server/lib/job-queue/handlers/move-to-object-storage'
+import { VideoPathManager } from '@server/lib/video-path-manager'
import { getMaxBitrate } from '@shared/core-utils'
+import { MoveObjectStoragePayload } from '@shared/models'
+import { getDurationFromVideoFile, getVideoFileBitrate, getVideoFileFPS, getVideoFileResolution } from '../server/helpers/ffprobe-utils'
+import { initDatabaseModels } from '../server/initializers/database'
+import { optimizeOriginalVideofile } from '../server/lib/transcoding/video-transcoding'
+import { VideoModel } from '../server/models/video/video'
run()
.then(() => process.exit(0))
@@ -39,43 +42,49 @@ async function run () {
currentVideoId = video.id
for (const file of video.VideoFiles) {
- currentFilePath = getVideoFilePath(video, file)
+ await VideoPathManager.Instance.makeAvailableVideoFile(video, file, async path => {
+ currentFilePath = path
- const [ videoBitrate, fps, dataResolution ] = await Promise.all([
- getVideoFileBitrate(currentFilePath),
- getVideoFileFPS(currentFilePath),
- getVideoFileResolution(currentFilePath)
- ])
+ const [ videoBitrate, fps, dataResolution ] = await Promise.all([
+ getVideoFileBitrate(currentFilePath),
+ getVideoFileFPS(currentFilePath),
+ getVideoFileResolution(currentFilePath)
+ ])
- const maxBitrate = getMaxBitrate({ ...dataResolution, fps })
- const isMaxBitrateExceeded = videoBitrate > maxBitrate
- if (isMaxBitrateExceeded) {
- console.log(
- 'Optimizing video file %s with bitrate %s kbps (max: %s kbps)',
- basename(currentFilePath), videoBitrate / 1000, maxBitrate / 1000
- )
+ const maxBitrate = getMaxBitrate({ ...dataResolution, fps })
+ const isMaxBitrateExceeded = videoBitrate > maxBitrate
+ if (isMaxBitrateExceeded) {
+ console.log(
+ 'Optimizing video file %s with bitrate %s kbps (max: %s kbps)',
+ basename(currentFilePath), videoBitrate / 1000, maxBitrate / 1000
+ )
- const backupFile = `${currentFilePath}_backup`
- await copy(currentFilePath, backupFile)
+ const backupFile = `${currentFilePath}_backup`
+ await copy(currentFilePath, backupFile)
- await optimizeOriginalVideofile(video, file)
- // Update file path, the video filename changed
- currentFilePath = getVideoFilePath(video, file)
+ await optimizeOriginalVideofile(video, file)
+ // Update file path, the video filename changed
+ currentFilePath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, file)
- const originalDuration = await getDurationFromVideoFile(backupFile)
- const newDuration = await getDurationFromVideoFile(currentFilePath)
+ const originalDuration = await getDurationFromVideoFile(backupFile)
+ const newDuration = await getDurationFromVideoFile(currentFilePath)
- if (originalDuration === newDuration) {
- console.log('Finished optimizing %s', basename(currentFilePath))
- await remove(backupFile)
- continue
+ if (originalDuration === newDuration) {
+ console.log('Finished optimizing %s', basename(currentFilePath))
+ await remove(backupFile)
+ return
+ }
+
+ console.log('Failed to optimize %s, restoring original', basename(currentFilePath))
+ await move(backupFile, currentFilePath, { overwrite: true })
+ await createTorrentAndSetInfoHash(video, file)
+ await file.save()
}
+ })
+ }
- console.log('Failed to optimize %s, restoring original', basename(currentFilePath))
- await move(backupFile, currentFilePath, { overwrite: true })
- await createTorrentAndSetInfoHash(video, file)
- await file.save()
- }
+ if (CONFIG.OBJECT_STORAGE.ENABLED === true) {
+ await processMoveToObjectStorage({ data: { videoUUID: video.uuid } as MoveObjectStoragePayload } as any)
}
}
diff --git a/server/controllers/api/videos/upload.ts b/server/controllers/api/videos/upload.ts
index 89f50714d..5c740c041 100644
--- a/server/controllers/api/videos/upload.ts
+++ b/server/controllers/api/videos/upload.ts
@@ -1,12 +1,21 @@
import * as express from 'express'
import { move } from 'fs-extra'
+import { basename } from 'path'
import { getLowercaseExtension } from '@server/helpers/core-utils'
import { deleteResumableUploadMetaFile, getResumableUploadPath } from '@server/helpers/upload'
import { uuidToShort } from '@server/helpers/uuid'
import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent'
import { getLocalVideoActivityPubUrl } from '@server/lib/activitypub/url'
-import { addOptimizeOrMergeAudioJob, buildLocalVideoFromReq, buildVideoThumbnailsFromReq, setVideoTags } from '@server/lib/video'
-import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths'
+import { generateWebTorrentVideoFilename } from '@server/lib/paths'
+import {
+ addMoveToObjectStorageJob,
+ addOptimizeOrMergeAudioJob,
+ buildLocalVideoFromReq,
+ buildVideoThumbnailsFromReq,
+ setVideoTags
+} from '@server/lib/video'
+import { VideoPathManager } from '@server/lib/video-path-manager'
+import { buildNextVideoState } from '@server/lib/video-state'
import { openapiOperationDoc } from '@server/middlewares/doc'
import { MVideo, MVideoFile, MVideoFullLight } from '@server/types/models'
import { uploadx } from '@uploadx/core'
@@ -139,23 +148,20 @@ async function addVideo (options: {
const videoData = buildLocalVideoFromReq(videoInfo, videoChannel.id)
- videoData.state = CONFIG.TRANSCODING.ENABLED
- ? VideoState.TO_TRANSCODE
- : VideoState.PUBLISHED
-
+ videoData.state = buildNextVideoState()
videoData.duration = videoPhysicalFile.duration // duration was added by a previous middleware
const video = new VideoModel(videoData) as MVideoFullLight
video.VideoChannel = videoChannel
video.url = getLocalVideoActivityPubUrl(video) // We use the UUID, so set the URL after building the object
- const videoFile = await buildNewFile(video, videoPhysicalFile)
+ const videoFile = await buildNewFile(videoPhysicalFile)
// Move physical file
- const destination = getVideoFilePath(video, videoFile)
+ const destination = VideoPathManager.Instance.getFSVideoFileOutputPath(video, videoFile)
await move(videoPhysicalFile.path, destination)
// This is important in case if there is another attempt in the retry process
- videoPhysicalFile.filename = getVideoFilePath(video, videoFile)
+ videoPhysicalFile.filename = basename(destination)
videoPhysicalFile.path = destination
const [ thumbnailModel, previewModel ] = await buildVideoThumbnailsFromReq({
@@ -210,9 +216,13 @@ async function addVideo (options: {
createTorrentFederate(video, videoFile)
.then(() => {
- if (video.state !== VideoState.TO_TRANSCODE) return
+ if (video.state === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) {
+ return addMoveToObjectStorageJob(video)
+ }
- return addOptimizeOrMergeAudioJob(videoCreated, videoFile, user)
+ if (video.state === VideoState.TO_TRANSCODE) {
+ return addOptimizeOrMergeAudioJob(videoCreated, videoFile, user)
+ }
})
.catch(err => logger.error('Cannot add optimize/merge audio job for %s.', videoCreated.uuid, { err, ...lTags(videoCreated.uuid) }))
@@ -227,7 +237,7 @@ async function addVideo (options: {
})
}
-async function buildNewFile (video: MVideo, videoPhysicalFile: express.VideoUploadFile) {
+async function buildNewFile (videoPhysicalFile: express.VideoUploadFile) {
const videoFile = new VideoFileModel({
extname: getLowercaseExtension(videoPhysicalFile.filename),
size: videoPhysicalFile.size,
diff --git a/server/controllers/download.ts b/server/controllers/download.ts
index ddacc1b68..ffe40d57e 100644
--- a/server/controllers/download.ts
+++ b/server/controllers/download.ts
@@ -3,9 +3,9 @@ import * as express from 'express'
import { logger } from '@server/helpers/logger'
import { VideosTorrentCache } from '@server/lib/files-cache/videos-torrent-cache'
import { Hooks } from '@server/lib/plugins/hooks'
-import { getVideoFilePath } from '@server/lib/video-paths'
+import { VideoPathManager } from '@server/lib/video-path-manager'
import { MStreamingPlaylist, MVideo, MVideoFile, MVideoFullLight } from '@server/types/models'
-import { HttpStatusCode, VideoStreamingPlaylistType } from '@shared/models'
+import { HttpStatusCode, VideoStorage, VideoStreamingPlaylistType } from '@shared/models'
import { STATIC_DOWNLOAD_PATHS } from '../initializers/constants'
import { asyncMiddleware, videosDownloadValidator } from '../middlewares'
@@ -81,7 +81,15 @@ async function downloadVideoFile (req: express.Request, res: express.Response) {
if (!checkAllowResult(res, allowParameters, allowedResult)) return
- return res.download(getVideoFilePath(video, videoFile), `${video.name}-${videoFile.resolution}p${videoFile.extname}`)
+ if (videoFile.storage === VideoStorage.OBJECT_STORAGE) {
+ return res.redirect(videoFile.getObjectStorageUrl())
+ }
+
+ await VideoPathManager.Instance.makeAvailableVideoFile(video, videoFile, path => {
+ const filename = `${video.name}-${videoFile.resolution}p${videoFile.extname}`
+
+ return res.download(path, filename)
+ })
}
async function downloadHLSVideoFile (req: express.Request, res: express.Response) {
@@ -107,8 +115,15 @@ async function downloadHLSVideoFile (req: express.Request, res: express.Response
if (!checkAllowResult(res, allowParameters, allowedResult)) return
- const filename = `${video.name}-${videoFile.resolution}p-${streamingPlaylist.getStringType()}${videoFile.extname}`
- return res.download(getVideoFilePath(streamingPlaylist, videoFile), filename)
+ if (videoFile.storage === VideoStorage.OBJECT_STORAGE) {
+ return res.redirect(videoFile.getObjectStorageUrl())
+ }
+
+ await VideoPathManager.Instance.makeAvailableVideoFile(streamingPlaylist, videoFile, path => {
+ const filename = `${video.name}-${videoFile.resolution}p-${streamingPlaylist.getStringType()}${videoFile.extname}`
+
+ return res.download(path, filename)
+ })
}
function getVideoFile (req: express.Request, files: MVideoFile[]) {
diff --git a/server/helpers/webtorrent.ts b/server/helpers/webtorrent.ts
index ecf63e93e..c84376304 100644
--- a/server/helpers/webtorrent.ts
+++ b/server/helpers/webtorrent.ts
@@ -6,7 +6,8 @@ import { dirname, join } from 'path'
import * as WebTorrent from 'webtorrent'
import { isArray } from '@server/helpers/custom-validators/misc'
import { WEBSERVER } from '@server/initializers/constants'
-import { generateTorrentFileName, getVideoFilePath } from '@server/lib/video-paths'
+import { generateTorrentFileName } from '@server/lib/paths'
+import { VideoPathManager } from '@server/lib/video-path-manager'
import { MVideo } from '@server/types/models/video/video'
import { MVideoFile, MVideoFileRedundanciesOpt } from '@server/types/models/video/video-file'
import { MStreamingPlaylistVideo } from '@server/types/models/video/video-streaming-playlist'
@@ -78,7 +79,7 @@ async function downloadWebTorrentVideo (target: { magnetUri: string, torrentName
})
}
-async function createTorrentAndSetInfoHash (
+function createTorrentAndSetInfoHash (
videoOrPlaylist: MVideo | MStreamingPlaylistVideo,
videoFile: MVideoFile
) {
@@ -95,22 +96,24 @@ async function createTorrentAndSetInfoHash (
urlList: [ videoFile.getFileUrl(video) ]
}
- const torrent = await createTorrentPromise(getVideoFilePath(videoOrPlaylist, videoFile), options)
+ return VideoPathManager.Instance.makeAvailableVideoFile(videoOrPlaylist, videoFile, async videoPath => {
+ const torrent = await createTorrentPromise(videoPath, options)
- const torrentFilename = generateTorrentFileName(videoOrPlaylist, videoFile.resolution)
- const torrentPath = join(CONFIG.STORAGE.TORRENTS_DIR, torrentFilename)
- logger.info('Creating torrent %s.', torrentPath)
+ const torrentFilename = generateTorrentFileName(videoOrPlaylist, videoFile.resolution)
+ const torrentPath = join(CONFIG.STORAGE.TORRENTS_DIR, torrentFilename)
+ logger.info('Creating torrent %s.', torrentPath)
- await writeFile(torrentPath, torrent)
+ await writeFile(torrentPath, torrent)
- // Remove old torrent file if it existed
- if (videoFile.hasTorrent()) {
- await remove(join(CONFIG.STORAGE.TORRENTS_DIR, videoFile.torrentFilename))
- }
+ // Remove old torrent file if it existed
+ if (videoFile.hasTorrent()) {
+ await remove(join(CONFIG.STORAGE.TORRENTS_DIR, videoFile.torrentFilename))
+ }
- const parsedTorrent = parseTorrent(torrent)
- videoFile.infoHash = parsedTorrent.infoHash
- videoFile.torrentFilename = torrentFilename
+ const parsedTorrent = parseTorrent(torrent)
+ videoFile.infoHash = parsedTorrent.infoHash
+ videoFile.torrentFilename = torrentFilename
+ })
}
function generateMagnetUri (
diff --git a/server/initializers/checker-after-init.ts b/server/initializers/checker-after-init.ts
index 911734fa0..09f587274 100644
--- a/server/initializers/checker-after-init.ts
+++ b/server/initializers/checker-after-init.ts
@@ -153,6 +153,29 @@ function checkConfig () {
}
}
+ // Object storage
+ if (CONFIG.OBJECT_STORAGE.ENABLED === true) {
+
+ if (!CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME) {
+ return 'videos_bucket should be set when object storage support is enabled.'
+ }
+
+ if (!CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME) {
+ return 'streaming_playlists_bucket should be set when object storage support is enabled.'
+ }
+
+ if (
+ CONFIG.OBJECT_STORAGE.VIDEOS.BUCKET_NAME === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BUCKET_NAME &&
+ CONFIG.OBJECT_STORAGE.VIDEOS.PREFIX === CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.PREFIX
+ ) {
+ if (CONFIG.OBJECT_STORAGE.VIDEOS.PREFIX === '') {
+ return 'Object storage bucket prefixes should be set when the same bucket is used for both types of video.'
+ } else {
+ return 'Object storage bucket prefixes should be set to different values when the same bucket is used for both types of video.'
+ }
+ }
+ }
+
return null
}
diff --git a/server/initializers/config.ts b/server/initializers/config.ts
index 30a9823b9..0e684eef8 100644
--- a/server/initializers/config.ts
+++ b/server/initializers/config.ts
@@ -73,6 +73,26 @@ const CONFIG = {
PLUGINS_DIR: buildPath(config.get('storage.plugins')),
CLIENT_OVERRIDES_DIR: buildPath(config.get('storage.client_overrides'))
},
+ OBJECT_STORAGE: {
+ ENABLED: config.get('object_storage.enabled'),
+ MAX_UPLOAD_PART: bytes.parse(config.get('object_storage.max_upload_part')),
+ ENDPOINT: config.get('object_storage.endpoint'),
+ REGION: config.get('object_storage.region'),
+ CREDENTIALS: {
+ ACCESS_KEY_ID: config.get('object_storage.credentials.access_key_id'),
+ SECRET_ACCESS_KEY: config.get('object_storage.credentials.secret_access_key')
+ },
+ VIDEOS: {
+ BUCKET_NAME: config.get('object_storage.videos.bucket_name'),
+ PREFIX: config.get('object_storage.videos.prefix'),
+ BASE_URL: config.get('object_storage.videos.base_url')
+ },
+ STREAMING_PLAYLISTS: {
+ BUCKET_NAME: config.get('object_storage.streaming_playlists.bucket_name'),
+ PREFIX: config.get('object_storage.streaming_playlists.prefix'),
+ BASE_URL: config.get('object_storage.streaming_playlists.base_url')
+ }
+ },
WEBSERVER: {
SCHEME: config.get('webserver.https') === true ? 'https' : 'http',
WS: config.get('webserver.https') === true ? 'wss' : 'ws',
diff --git a/server/initializers/constants.ts b/server/initializers/constants.ts
index 5f121d9a4..8a1526ae8 100644
--- a/server/initializers/constants.ts
+++ b/server/initializers/constants.ts
@@ -24,7 +24,7 @@ import { CONFIG, registerConfigChangedHandler } from './config'
// ---------------------------------------------------------------------------
-const LAST_MIGRATION_VERSION = 655
+const LAST_MIGRATION_VERSION = 660
// ---------------------------------------------------------------------------
@@ -147,7 +147,8 @@ const JOB_ATTEMPTS: { [id in JobType]: number } = {
'videos-views': 1,
'activitypub-refresher': 1,
'video-redundancy': 1,
- 'video-live-ending': 1
+ 'video-live-ending': 1,
+ 'move-to-object-storage': 3
}
// Excluded keys are jobs that can be configured by admins
const JOB_CONCURRENCY: { [id in Exclude]: number } = {
@@ -162,7 +163,8 @@ const JOB_CONCURRENCY: { [id in Exclude {
- return utils.db.Video.listOwnedAndPopulateAuthorAndTags()
- .then((videos: VideoModel[]) => {
- const tasks: Promise[] = []
-
- videos.forEach(video => {
- video.VideoFiles.forEach(videoFile => {
- const p = new Promise((res, rej) => {
- stat(getVideoFilePath(video, videoFile), (err, stats) => {
- if (err) return rej(err)
-
- videoFile.size = stats.size
- videoFile.save().then(res).catch(rej)
- })
- })
-
- tasks.push(p)
- })
- })
-
- return tasks
- })
- .then((tasks: Promise[]) => {
- return Promise.all(tasks)
- })
+ throw new Error('Removed, please upgrade from a previous version first.')
}
function down (options) {
diff --git a/server/initializers/migrations/0660-object-storage.ts b/server/initializers/migrations/0660-object-storage.ts
new file mode 100644
index 000000000..c815c71c6
--- /dev/null
+++ b/server/initializers/migrations/0660-object-storage.ts
@@ -0,0 +1,58 @@
+import * as Sequelize from 'sequelize'
+import { VideoStorage } from '@shared/models'
+
+async function up (utils: {
+ transaction: Sequelize.Transaction
+ queryInterface: Sequelize.QueryInterface
+ sequelize: Sequelize.Sequelize
+ db: any
+}): Promise {
+ {
+ const query = `
+ CREATE TABLE IF NOT EXISTS "videoJobInfo" (
+ "id" serial,
+ "pendingMove" INTEGER NOT NULL,
+ "pendingTranscode" INTEGER NOT NULL,
+ "videoId" serial UNIQUE NOT NULL REFERENCES "video" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
+ "createdAt" timestamp WITH time zone NOT NULL,
+ "updatedAt" timestamp WITH time zone NOT NULL,
+ PRIMARY KEY ("id")
+ );
+ `
+
+ await utils.sequelize.query(query)
+ }
+
+ {
+ await utils.queryInterface.addColumn('videoFile', 'storage', { type: Sequelize.INTEGER, allowNull: true })
+ }
+ {
+ await utils.sequelize.query(
+ `UPDATE "videoFile" SET "storage" = ${VideoStorage.FILE_SYSTEM}`
+ )
+ }
+ {
+ await utils.queryInterface.changeColumn('videoFile', 'storage', { type: Sequelize.INTEGER, allowNull: false })
+ }
+
+ {
+ await utils.queryInterface.addColumn('videoStreamingPlaylist', 'storage', { type: Sequelize.INTEGER, allowNull: true })
+ }
+ {
+ await utils.sequelize.query(
+ `UPDATE "videoStreamingPlaylist" SET "storage" = ${VideoStorage.FILE_SYSTEM}`
+ )
+ }
+ {
+ await utils.queryInterface.changeColumn('videoStreamingPlaylist', 'storage', { type: Sequelize.INTEGER, allowNull: false })
+ }
+}
+
+function down (options) {
+ throw new Error('Not implemented.')
+}
+
+export {
+ up,
+ down
+}
diff --git a/server/lib/activitypub/videos/shared/object-to-model-attributes.ts b/server/lib/activitypub/videos/shared/object-to-model-attributes.ts
index 1fa16295d..bd9ed45a9 100644
--- a/server/lib/activitypub/videos/shared/object-to-model-attributes.ts
+++ b/server/lib/activitypub/videos/shared/object-to-model-attributes.ts
@@ -6,7 +6,7 @@ import { isVideoFileInfoHashValid } from '@server/helpers/custom-validators/vide
import { logger } from '@server/helpers/logger'
import { getExtFromMimetype } from '@server/helpers/video'
import { ACTIVITY_PUB, MIMETYPES, P2P_MEDIA_LOADER_PEER_VERSION, PREVIEWS_SIZE, THUMBNAILS_SIZE } from '@server/initializers/constants'
-import { generateTorrentFileName } from '@server/lib/video-paths'
+import { generateTorrentFileName } from '@server/lib/paths'
import { VideoCaptionModel } from '@server/models/video/video-caption'
import { VideoFileModel } from '@server/models/video/video-file'
import { VideoStreamingPlaylistModel } from '@server/models/video/video-streaming-playlist'
diff --git a/server/lib/hls.ts b/server/lib/hls.ts
index 0e77ab9fa..0828a2d0f 100644
--- a/server/lib/hls.ts
+++ b/server/lib/hls.ts
@@ -1,4 +1,4 @@
-import { close, ensureDir, move, open, outputJSON, pathExists, read, readFile, remove, stat, writeFile } from 'fs-extra'
+import { close, ensureDir, move, open, outputJSON, read, readFile, remove, stat, writeFile } from 'fs-extra'
import { flatten, uniq } from 'lodash'
import { basename, dirname, join } from 'path'
import { MStreamingPlaylistFilesVideo, MVideoWithFile } from '@server/types/models'
@@ -8,11 +8,12 @@ import { logger } from '../helpers/logger'
import { doRequest, doRequestAndSaveToFile } from '../helpers/requests'
import { generateRandomString } from '../helpers/utils'
import { CONFIG } from '../initializers/config'
-import { HLS_STREAMING_PLAYLIST_DIRECTORY, P2P_MEDIA_LOADER_PEER_VERSION } from '../initializers/constants'
+import { P2P_MEDIA_LOADER_PEER_VERSION } from '../initializers/constants'
import { sequelizeTypescript } from '../initializers/database'
import { VideoFileModel } from '../models/video/video-file'
import { VideoStreamingPlaylistModel } from '../models/video/video-streaming-playlist'
-import { getHlsResolutionPlaylistFilename, getVideoFilePath } from './video-paths'
+import { getHlsResolutionPlaylistFilename } from './paths'
+import { VideoPathManager } from './video-path-manager'
async function updateStreamingPlaylistsInfohashesIfNeeded () {
const playlistsToUpdate = await VideoStreamingPlaylistModel.listByIncorrectPeerVersion()
@@ -31,75 +32,66 @@ async function updateStreamingPlaylistsInfohashesIfNeeded () {
}
async function updateMasterHLSPlaylist (video: MVideoWithFile, playlist: MStreamingPlaylistFilesVideo) {
- const directory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid)
-
const masterPlaylists: string[] = [ '#EXTM3U', '#EXT-X-VERSION:3' ]
- const masterPlaylistPath = join(directory, playlist.playlistFilename)
-
for (const file of playlist.VideoFiles) {
const playlistFilename = getHlsResolutionPlaylistFilename(file.filename)
- // If we did not generated a playlist for this resolution, skip
- const filePlaylistPath = join(directory, playlistFilename)
- if (await pathExists(filePlaylistPath) === false) continue
+ await VideoPathManager.Instance.makeAvailableVideoFile(playlist, file, async videoFilePath => {
+ const size = await getVideoStreamSize(videoFilePath)
- const videoFilePath = getVideoFilePath(playlist, file)
+ const bandwidth = 'BANDWIDTH=' + video.getBandwidthBits(file)
+ const resolution = `RESOLUTION=${size.width}x${size.height}`
- const size = await getVideoStreamSize(videoFilePath)
+ let line = `#EXT-X-STREAM-INF:${bandwidth},${resolution}`
+ if (file.fps) line += ',FRAME-RATE=' + file.fps
- const bandwidth = 'BANDWIDTH=' + video.getBandwidthBits(file)
- const resolution = `RESOLUTION=${size.width}x${size.height}`
+ const codecs = await Promise.all([
+ getVideoStreamCodec(videoFilePath),
+ getAudioStreamCodec(videoFilePath)
+ ])
- let line = `#EXT-X-STREAM-INF:${bandwidth},${resolution}`
- if (file.fps) line += ',FRAME-RATE=' + file.fps
+ line += `,CODECS="${codecs.filter(c => !!c).join(',')}"`
- const codecs = await Promise.all([
- getVideoStreamCodec(videoFilePath),
- getAudioStreamCodec(videoFilePath)
- ])
-
- line += `,CODECS="${codecs.filter(c => !!c).join(',')}"`
-
- masterPlaylists.push(line)
- masterPlaylists.push(playlistFilename)
+ masterPlaylists.push(line)
+ masterPlaylists.push(playlistFilename)
+ })
}
- await writeFile(masterPlaylistPath, masterPlaylists.join('\n') + '\n')
+ await VideoPathManager.Instance.makeAvailablePlaylistFile(playlist, playlist.playlistFilename, masterPlaylistPath => {
+ return writeFile(masterPlaylistPath, masterPlaylists.join('\n') + '\n')
+ })
}
async function updateSha256VODSegments (video: MVideoWithFile, playlist: MStreamingPlaylistFilesVideo) {
const json: { [filename: string]: { [range: string]: string } } = {}
- const playlistDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid)
-
// For all the resolutions available for this video
for (const file of playlist.VideoFiles) {
const rangeHashes: { [range: string]: string } = {}
- const videoPath = getVideoFilePath(playlist, file)
- const resolutionPlaylistPath = join(playlistDirectory, getHlsResolutionPlaylistFilename(file.filename))
+ await VideoPathManager.Instance.makeAvailableVideoFile(playlist, file, videoPath => {
- // Maybe the playlist is not generated for this resolution yet
- if (!await pathExists(resolutionPlaylistPath)) continue
+ return VideoPathManager.Instance.makeAvailableResolutionPlaylistFile(playlist, file, async resolutionPlaylistPath => {
+ const playlistContent = await readFile(resolutionPlaylistPath)
+ const ranges = getRangesFromPlaylist(playlistContent.toString())
- const playlistContent = await readFile(resolutionPlaylistPath)
- const ranges = getRangesFromPlaylist(playlistContent.toString())
+ const fd = await open(videoPath, 'r')
+ for (const range of ranges) {
+ const buf = Buffer.alloc(range.length)
+ await read(fd, buf, 0, range.length, range.offset)
- const fd = await open(videoPath, 'r')
- for (const range of ranges) {
- const buf = Buffer.alloc(range.length)
- await read(fd, buf, 0, range.length, range.offset)
+ rangeHashes[`${range.offset}-${range.offset + range.length - 1}`] = sha256(buf)
+ }
+ await close(fd)
- rangeHashes[`${range.offset}-${range.offset + range.length - 1}`] = sha256(buf)
- }
- await close(fd)
-
- const videoFilename = file.filename
- json[videoFilename] = rangeHashes
+ const videoFilename = file.filename
+ json[videoFilename] = rangeHashes
+ })
+ })
}
- const outputPath = join(playlistDirectory, playlist.segmentsSha256Filename)
+ const outputPath = VideoPathManager.Instance.getFSHLSOutputPath(video, playlist.segmentsSha256Filename)
await outputJSON(outputPath, json)
}
diff --git a/server/lib/job-queue/handlers/move-to-object-storage.ts b/server/lib/job-queue/handlers/move-to-object-storage.ts
new file mode 100644
index 000000000..a0c58d211
--- /dev/null
+++ b/server/lib/job-queue/handlers/move-to-object-storage.ts
@@ -0,0 +1,114 @@
+import * as Bull from 'bull'
+import { remove } from 'fs-extra'
+import { join } from 'path'
+import { logger } from '@server/helpers/logger'
+import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent'
+import { CONFIG } from '@server/initializers/config'
+import { storeHLSFile, storeWebTorrentFile } from '@server/lib/object-storage'
+import { getHLSDirectory, getHlsResolutionPlaylistFilename } from '@server/lib/paths'
+import { moveToNextState } from '@server/lib/video-state'
+import { VideoModel } from '@server/models/video/video'
+import { VideoJobInfoModel } from '@server/models/video/video-job-info'
+import { MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoWithAllFiles } from '@server/types/models'
+import { MoveObjectStoragePayload, VideoStorage } from '../../../../shared'
+
+export async function processMoveToObjectStorage (job: Bull.Job) {
+ const payload = job.data as MoveObjectStoragePayload
+ logger.info('Moving video %s in job %d.', payload.videoUUID, job.id)
+
+ const video = await VideoModel.loadWithFiles(payload.videoUUID)
+ // No video, maybe deleted?
+ if (!video) {
+ logger.info('Can\'t process job %d, video does not exist.', job.id)
+ return undefined
+ }
+
+ if (video.VideoFiles) {
+ await moveWebTorrentFiles(video)
+ }
+
+ if (video.VideoStreamingPlaylists) {
+ await moveHLSFiles(video)
+ }
+
+ const pendingMove = await VideoJobInfoModel.decrease(video.uuid, 'pendingMove')
+ if (pendingMove === 0) {
+ logger.info('Running cleanup after moving files to object storage (video %s in job %d)', video.uuid, job.id)
+ await doAfterLastJob(video, payload.isNewVideo)
+ }
+
+ return payload.videoUUID
+}
+
+// ---------------------------------------------------------------------------
+
+async function moveWebTorrentFiles (video: MVideoWithAllFiles) {
+ for (const file of video.VideoFiles) {
+ if (file.storage !== VideoStorage.FILE_SYSTEM) continue
+
+ const fileUrl = await storeWebTorrentFile(file.filename)
+
+ const oldPath = join(CONFIG.STORAGE.VIDEOS_DIR, file.filename)
+ await onFileMoved({ videoOrPlaylist: video, file, fileUrl, oldPath })
+ }
+}
+
+async function moveHLSFiles (video: MVideoWithAllFiles) {
+ for (const playlist of video.VideoStreamingPlaylists) {
+
+ for (const file of playlist.VideoFiles) {
+ if (file.storage !== VideoStorage.FILE_SYSTEM) continue
+
+ // Resolution playlist
+ const playlistFilename = getHlsResolutionPlaylistFilename(file.filename)
+ await storeHLSFile(playlist, video, playlistFilename)
+
+ // Resolution fragmented file
+ const fileUrl = await storeHLSFile(playlist, video, file.filename)
+
+ const oldPath = join(getHLSDirectory(video), file.filename)
+
+ await onFileMoved({ videoOrPlaylist: Object.assign(playlist, { Video: video }), file, fileUrl, oldPath })
+ }
+ }
+}
+
+async function doAfterLastJob (video: MVideoWithAllFiles, isNewVideo: boolean) {
+ for (const playlist of video.VideoStreamingPlaylists) {
+ if (playlist.storage === VideoStorage.OBJECT_STORAGE) continue
+
+ // Master playlist
+ playlist.playlistUrl = await storeHLSFile(playlist, video, playlist.playlistFilename)
+ // Sha256 segments file
+ playlist.segmentsSha256Url = await storeHLSFile(playlist, video, playlist.segmentsSha256Filename)
+
+ playlist.storage = VideoStorage.OBJECT_STORAGE
+
+ await playlist.save()
+ }
+
+ // Remove empty hls video directory
+ if (video.VideoStreamingPlaylists) {
+ await remove(getHLSDirectory(video))
+ }
+
+ await moveToNextState(video, isNewVideo)
+}
+
+async function onFileMoved (options: {
+ videoOrPlaylist: MVideo | MStreamingPlaylistVideo
+ file: MVideoFile
+ fileUrl: string
+ oldPath: string
+}) {
+ const { videoOrPlaylist, file, fileUrl, oldPath } = options
+
+ file.fileUrl = fileUrl
+ file.storage = VideoStorage.OBJECT_STORAGE
+
+ await createTorrentAndSetInfoHash(videoOrPlaylist, file)
+ await file.save()
+
+ logger.debug('Removing %s because it\'s now on object storage', oldPath)
+ await remove(oldPath)
+}
diff --git a/server/lib/job-queue/handlers/video-file-import.ts b/server/lib/job-queue/handlers/video-file-import.ts
index 2f4abf730..e8ee1f759 100644
--- a/server/lib/job-queue/handlers/video-file-import.ts
+++ b/server/lib/job-queue/handlers/video-file-import.ts
@@ -2,15 +2,19 @@ import * as Bull from 'bull'
import { copy, stat } from 'fs-extra'
import { getLowercaseExtension } from '@server/helpers/core-utils'
import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent'
-import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths'
+import { CONFIG } from '@server/initializers/config'
+import { federateVideoIfNeeded } from '@server/lib/activitypub/videos'
+import { generateWebTorrentVideoFilename } from '@server/lib/paths'
+import { addMoveToObjectStorageJob } from '@server/lib/video'
+import { VideoPathManager } from '@server/lib/video-path-manager'
import { UserModel } from '@server/models/user/user'
import { MVideoFullLight } from '@server/types/models'
-import { VideoFileImportPayload } from '@shared/models'
+import { VideoFileImportPayload, VideoStorage } from '@shared/models'
import { getVideoFileFPS, getVideoFileResolution } from '../../../helpers/ffprobe-utils'
import { logger } from '../../../helpers/logger'
import { VideoModel } from '../../../models/video/video'
import { VideoFileModel } from '../../../models/video/video-file'
-import { onNewWebTorrentFileResolution } from './video-transcoding'
+import { createHlsJobIfEnabled } from './video-transcoding'
async function processVideoFileImport (job: Bull.Job) {
const payload = job.data as VideoFileImportPayload
@@ -29,15 +33,19 @@ async function processVideoFileImport (job: Bull.Job) {
const user = await UserModel.loadByChannelActorId(video.VideoChannel.actorId)
- const newResolutionPayload = {
- type: 'new-resolution-to-webtorrent' as 'new-resolution-to-webtorrent',
+ await createHlsJobIfEnabled(user, {
videoUUID: video.uuid,
resolution: data.resolution,
isPortraitMode: data.isPortraitMode,
- copyCodecs: false,
- isNewVideo: false
+ copyCodecs: true,
+ isMaxQuality: false
+ })
+
+ if (CONFIG.OBJECT_STORAGE.ENABLED) {
+ await addMoveToObjectStorageJob(video)
+ } else {
+ await federateVideoIfNeeded(video, false)
}
- await onNewWebTorrentFileResolution(video, user, newResolutionPayload)
return video
}
@@ -72,12 +80,13 @@ async function updateVideoFile (video: MVideoFullLight, inputFilePath: string) {
resolution,
extname: fileExt,
filename: generateWebTorrentVideoFilename(resolution, fileExt),
+ storage: VideoStorage.FILE_SYSTEM,
size,
fps,
videoId: video.id
})
- const outputPath = getVideoFilePath(video, newVideoFile)
+ const outputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, newVideoFile)
await copy(inputFilePath, outputPath)
video.VideoFiles.push(newVideoFile)
diff --git a/server/lib/job-queue/handlers/video-import.ts b/server/lib/job-queue/handlers/video-import.ts
index fec553f2b..a5fa204f5 100644
--- a/server/lib/job-queue/handlers/video-import.ts
+++ b/server/lib/job-queue/handlers/video-import.ts
@@ -4,11 +4,13 @@ import { getLowercaseExtension } from '@server/helpers/core-utils'
import { retryTransactionWrapper } from '@server/helpers/database-utils'
import { YoutubeDL } from '@server/helpers/youtube-dl'
import { isPostImportVideoAccepted } from '@server/lib/moderation'
+import { generateWebTorrentVideoFilename } from '@server/lib/paths'
import { Hooks } from '@server/lib/plugins/hooks'
import { ServerConfigManager } from '@server/lib/server-config-manager'
import { isAbleToUploadVideo } from '@server/lib/user'
-import { addOptimizeOrMergeAudioJob } from '@server/lib/video'
-import { generateWebTorrentVideoFilename, getVideoFilePath } from '@server/lib/video-paths'
+import { addMoveToObjectStorageJob, addOptimizeOrMergeAudioJob } from '@server/lib/video'
+import { VideoPathManager } from '@server/lib/video-path-manager'
+import { buildNextVideoState } from '@server/lib/video-state'
import { ThumbnailModel } from '@server/models/video/thumbnail'
import { MVideoImportDefault, MVideoImportDefaultFiles, MVideoImportVideo } from '@server/types/models/video/video-import'
import {
@@ -25,7 +27,6 @@ import { getDurationFromVideoFile, getVideoFileFPS, getVideoFileResolution } fro
import { logger } from '../../../helpers/logger'
import { getSecureTorrentName } from '../../../helpers/utils'
import { createTorrentAndSetInfoHash, downloadWebTorrentVideo } from '../../../helpers/webtorrent'
-import { CONFIG } from '../../../initializers/config'
import { VIDEO_IMPORT_TIMEOUT } from '../../../initializers/constants'
import { sequelizeTypescript } from '../../../initializers/database'
import { VideoModel } from '../../../models/video/video'
@@ -100,7 +101,6 @@ type ProcessFileOptions = {
}
async function processFile (downloader: () => Promise, videoImport: MVideoImportDefault, options: ProcessFileOptions) {
let tempVideoPath: string
- let videoDestFile: string
let videoFile: VideoFileModel
try {
@@ -159,7 +159,7 @@ async function processFile (downloader: () => Promise, videoImport: MVid
const videoImportWithFiles: MVideoImportDefaultFiles = Object.assign(videoImport, { Video: videoWithFiles })
// Move file
- videoDestFile = getVideoFilePath(videoImportWithFiles.Video, videoFile)
+ const videoDestFile = VideoPathManager.Instance.getFSVideoFileOutputPath(videoImportWithFiles.Video, videoFile)
await move(tempVideoPath, videoDestFile)
tempVideoPath = null // This path is not used anymore
@@ -204,7 +204,7 @@ async function processFile (downloader: () => Promise, videoImport: MVid
// Update video DB object
video.duration = duration
- video.state = CONFIG.TRANSCODING.ENABLED ? VideoState.TO_TRANSCODE : VideoState.PUBLISHED
+ video.state = buildNextVideoState(video.state)
await video.save({ transaction: t })
if (thumbnailModel) await video.addAndSaveThumbnail(thumbnailModel, t)
@@ -245,6 +245,10 @@ async function processFile (downloader: () => Promise, videoImport: MVid
Notifier.Instance.notifyOnNewVideoIfNeeded(video)
}
+ if (video.state === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) {
+ return addMoveToObjectStorageJob(videoImportUpdated.Video)
+ }
+
// Create transcoding jobs?
if (video.state === VideoState.TO_TRANSCODE) {
await addOptimizeOrMergeAudioJob(videoImportUpdated.Video, videoFile, videoImport.User)
diff --git a/server/lib/job-queue/handlers/video-live-ending.ts b/server/lib/job-queue/handlers/video-live-ending.ts
index aa5bd573a..9ccf724c2 100644
--- a/server/lib/job-queue/handlers/video-live-ending.ts
+++ b/server/lib/job-queue/handlers/video-live-ending.ts
@@ -4,10 +4,11 @@ import { join } from 'path'
import { ffprobePromise, getAudioStream, getDurationFromVideoFile, getVideoFileResolution } from '@server/helpers/ffprobe-utils'
import { VIDEO_LIVE } from '@server/initializers/constants'
import { buildConcatenatedName, cleanupLive, LiveSegmentShaStore } from '@server/lib/live'
+import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getLiveDirectory } from '@server/lib/paths'
import { generateVideoMiniature } from '@server/lib/thumbnail'
import { generateHlsPlaylistResolutionFromTS } from '@server/lib/transcoding/video-transcoding'
-import { publishAndFederateIfNeeded } from '@server/lib/video'
-import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename, getHLSDirectory } from '@server/lib/video-paths'
+import { VideoPathManager } from '@server/lib/video-path-manager'
+import { moveToNextState } from '@server/lib/video-state'
import { VideoModel } from '@server/models/video/video'
import { VideoFileModel } from '@server/models/video/video-file'
import { VideoLiveModel } from '@server/models/video/video-live'
@@ -55,16 +56,15 @@ export {
// ---------------------------------------------------------------------------
async function saveLive (video: MVideo, live: MVideoLive, streamingPlaylist: MStreamingPlaylist) {
- const hlsDirectory = getHLSDirectory(video, false)
- const replayDirectory = join(hlsDirectory, VIDEO_LIVE.REPLAY_DIRECTORY)
+ const replayDirectory = VideoPathManager.Instance.getFSHLSOutputPath(video, VIDEO_LIVE.REPLAY_DIRECTORY)
- const rootFiles = await readdir(hlsDirectory)
+ const rootFiles = await readdir(getLiveDirectory(video))
const playlistFiles = rootFiles.filter(file => {
return file.endsWith('.m3u8') && file !== streamingPlaylist.playlistFilename
})
- await cleanupLiveFiles(hlsDirectory)
+ await cleanupTMPLiveFiles(getLiveDirectory(video))
await live.destroy()
@@ -98,7 +98,7 @@ async function saveLive (video: MVideo, live: MVideoLive, streamingPlaylist: MSt
const { resolution, isPortraitMode } = await getVideoFileResolution(concatenatedTsFilePath, probe)
- const outputPath = await generateHlsPlaylistResolutionFromTS({
+ const { resolutionPlaylistPath: outputPath } = await generateHlsPlaylistResolutionFromTS({
video: videoWithFiles,
concatenatedTsFilePath,
resolution,
@@ -133,10 +133,10 @@ async function saveLive (video: MVideo, live: MVideoLive, streamingPlaylist: MSt
})
}
- await publishAndFederateIfNeeded(videoWithFiles, true)
+ await moveToNextState(videoWithFiles, false)
}
-async function cleanupLiveFiles (hlsDirectory: string) {
+async function cleanupTMPLiveFiles (hlsDirectory: string) {
if (!await pathExists(hlsDirectory)) return
const files = await readdir(hlsDirectory)
diff --git a/server/lib/job-queue/handlers/video-transcoding.ts b/server/lib/job-queue/handlers/video-transcoding.ts
index 876d1460c..b3149dde8 100644
--- a/server/lib/job-queue/handlers/video-transcoding.ts
+++ b/server/lib/job-queue/handlers/video-transcoding.ts
@@ -1,9 +1,11 @@
import * as Bull from 'bull'
import { TranscodeOptionsType } from '@server/helpers/ffmpeg-utils'
-import { getTranscodingJobPriority, publishAndFederateIfNeeded } from '@server/lib/video'
-import { getVideoFilePath } from '@server/lib/video-paths'
+import { addTranscodingJob, getTranscodingJobPriority } from '@server/lib/video'
+import { VideoPathManager } from '@server/lib/video-path-manager'
+import { moveToNextState } from '@server/lib/video-state'
import { UserModel } from '@server/models/user/user'
-import { MUser, MUserId, MVideoFullLight, MVideoUUID, MVideoWithFile } from '@server/types/models'
+import { VideoJobInfoModel } from '@server/models/video/video-job-info'
+import { MUser, MUserId, MVideo, MVideoFullLight, MVideoWithFile } from '@server/types/models'
import {
HLSTranscodingPayload,
MergeAudioTranscodingPayload,
@@ -16,17 +18,14 @@ import { computeResolutionsToTranscode } from '../../../helpers/ffprobe-utils'
import { logger } from '../../../helpers/logger'
import { CONFIG } from '../../../initializers/config'
import { VideoModel } from '../../../models/video/video'
-import { federateVideoIfNeeded } from '../../activitypub/videos'
-import { Notifier } from '../../notifier'
import {
generateHlsPlaylistResolution,
mergeAudioVideofile,
optimizeOriginalVideofile,
transcodeNewWebTorrentResolution
} from '../../transcoding/video-transcoding'
-import { JobQueue } from '../job-queue'
-type HandlerFunction = (job: Bull.Job, payload: VideoTranscodingPayload, video: MVideoFullLight, user: MUser) => Promise
+type HandlerFunction = (job: Bull.Job, payload: VideoTranscodingPayload, video: MVideoFullLight, user: MUser) => Promise
const handlers: { [ id in VideoTranscodingPayload['type'] ]: HandlerFunction } = {
'new-resolution-to-hls': handleHLSJob,
@@ -69,15 +68,16 @@ async function handleHLSJob (job: Bull.Job, payload: HLSTranscodingPayload, vide
: video.getMaxQualityFile()
const videoOrStreamingPlaylist = videoFileInput.getVideoOrStreamingPlaylist()
- const videoInputPath = getVideoFilePath(videoOrStreamingPlaylist, videoFileInput)
- await generateHlsPlaylistResolution({
- video,
- videoInputPath,
- resolution: payload.resolution,
- copyCodecs: payload.copyCodecs,
- isPortraitMode: payload.isPortraitMode || false,
- job
+ await VideoPathManager.Instance.makeAvailableVideoFile(videoOrStreamingPlaylist, videoFileInput, videoInputPath => {
+ return generateHlsPlaylistResolution({
+ video,
+ videoInputPath,
+ resolution: payload.resolution,
+ copyCodecs: payload.copyCodecs,
+ isPortraitMode: payload.isPortraitMode || false,
+ job
+ })
})
await retryTransactionWrapper(onHlsPlaylistGeneration, video, user, payload)
@@ -101,7 +101,7 @@ async function handleWebTorrentMergeAudioJob (job: Bull.Job, payload: MergeAudio
}
async function handleWebTorrentOptimizeJob (job: Bull.Job, payload: OptimizeTranscodingPayload, video: MVideoFullLight, user: MUserId) {
- const transcodeType = await optimizeOriginalVideofile(video, video.getMaxQualityFile(), job)
+ const { transcodeType } = await optimizeOriginalVideofile(video, video.getMaxQualityFile(), job)
await retryTransactionWrapper(onVideoFileOptimizer, video, payload, transcodeType, user)
}
@@ -121,10 +121,18 @@ async function onHlsPlaylistGeneration (video: MVideoFullLight, user: MUser, pay
video.VideoFiles = []
// Create HLS new resolution jobs
- await createLowerResolutionsJobs(video, user, payload.resolution, payload.isPortraitMode, 'hls')
+ await createLowerResolutionsJobs({
+ video,
+ user,
+ videoFileResolution: payload.resolution,
+ isPortraitMode: payload.isPortraitMode,
+ isNewVideo: payload.isNewVideo ?? true,
+ type: 'hls'
+ })
}
- return publishAndFederateIfNeeded(video)
+ await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscode')
+ await moveToNextState(video, payload.isNewVideo)
}
async function onVideoFileOptimizer (
@@ -143,58 +151,54 @@ async function onVideoFileOptimizer (
// Video does not exist anymore
if (!videoDatabase) return undefined
- let videoPublished = false
-
// Generate HLS version of the original file
- const originalFileHLSPayload = Object.assign({}, payload, {
+ const originalFileHLSPayload = {
+ ...payload,
+
isPortraitMode,
resolution: videoDatabase.getMaxQualityFile().resolution,
// If we quick transcoded original file, force transcoding for HLS to avoid some weird playback issues
copyCodecs: transcodeType !== 'quick-transcode',
isMaxQuality: true
- })
- const hasHls = await createHlsJobIfEnabled(user, originalFileHLSPayload)
-
- const hasNewResolutions = await createLowerResolutionsJobs(videoDatabase, user, resolution, isPortraitMode, 'webtorrent')
-
- if (!hasHls && !hasNewResolutions) {
- // No transcoding to do, it's now published
- videoPublished = await videoDatabase.publishIfNeededAndSave(undefined)
}
+ const hasHls = await createHlsJobIfEnabled(user, originalFileHLSPayload)
+ const hasNewResolutions = await createLowerResolutionsJobs({
+ video: videoDatabase,
+ user,
+ videoFileResolution: resolution,
+ isPortraitMode,
+ type: 'webtorrent',
+ isNewVideo: payload.isNewVideo ?? true
+ })
- await federateVideoIfNeeded(videoDatabase, payload.isNewVideo)
+ await VideoJobInfoModel.decrease(videoDatabase.uuid, 'pendingTranscode')
- if (payload.isNewVideo) Notifier.Instance.notifyOnNewVideoIfNeeded(videoDatabase)
- if (videoPublished) Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(videoDatabase)
+ // Move to next state if there are no other resolutions to generate
+ if (!hasHls && !hasNewResolutions) {
+ await moveToNextState(videoDatabase, payload.isNewVideo)
+ }
}
async function onNewWebTorrentFileResolution (
- video: MVideoUUID,
+ video: MVideo,
user: MUserId,
payload: NewResolutionTranscodingPayload | MergeAudioTranscodingPayload
) {
- await publishAndFederateIfNeeded(video)
+ await createHlsJobIfEnabled(user, { ...payload, copyCodecs: true, isMaxQuality: false })
+ await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscode')
- await createHlsJobIfEnabled(user, Object.assign({}, payload, { copyCodecs: true, isMaxQuality: false }))
+ await moveToNextState(video, payload.isNewVideo)
}
-// ---------------------------------------------------------------------------
-
-export {
- processVideoTranscoding,
- onNewWebTorrentFileResolution
-}
-
-// ---------------------------------------------------------------------------
-
async function createHlsJobIfEnabled (user: MUserId, payload: {
videoUUID: string
resolution: number
isPortraitMode?: boolean
copyCodecs: boolean
isMaxQuality: boolean
+ isNewVideo?: boolean
}) {
- if (!payload || CONFIG.TRANSCODING.HLS.ENABLED !== true) return false
+ if (!payload || CONFIG.TRANSCODING.ENABLED !== true || CONFIG.TRANSCODING.HLS.ENABLED !== true) return false
const jobOptions = {
priority: await getTranscodingJobPriority(user)
@@ -206,21 +210,35 @@ async function createHlsJobIfEnabled (user: MUserId, payload: {
resolution: payload.resolution,
isPortraitMode: payload.isPortraitMode,
copyCodecs: payload.copyCodecs,
- isMaxQuality: payload.isMaxQuality
+ isMaxQuality: payload.isMaxQuality,
+ isNewVideo: payload.isNewVideo
}
- JobQueue.Instance.createJob({ type: 'video-transcoding', payload: hlsTranscodingPayload }, jobOptions)
+ await addTranscodingJob(hlsTranscodingPayload, jobOptions)
return true
}
-async function createLowerResolutionsJobs (
- video: MVideoFullLight,
- user: MUserId,
- videoFileResolution: number,
- isPortraitMode: boolean,
+// ---------------------------------------------------------------------------
+
+export {
+ processVideoTranscoding,
+ createHlsJobIfEnabled,
+ onNewWebTorrentFileResolution
+}
+
+// ---------------------------------------------------------------------------
+
+async function createLowerResolutionsJobs (options: {
+ video: MVideoFullLight
+ user: MUserId
+ videoFileResolution: number
+ isPortraitMode: boolean
+ isNewVideo: boolean
type: 'hls' | 'webtorrent'
-) {
+}) {
+ const { video, user, videoFileResolution, isPortraitMode, isNewVideo, type } = options
+
// Create transcoding jobs if there are enabled resolutions
const resolutionsEnabled = computeResolutionsToTranscode(videoFileResolution, 'vod')
const resolutionCreated: number[] = []
@@ -234,7 +252,8 @@ async function createLowerResolutionsJobs (
type: 'new-resolution-to-webtorrent',
videoUUID: video.uuid,
resolution,
- isPortraitMode
+ isPortraitMode,
+ isNewVideo
}
}
@@ -245,7 +264,8 @@ async function createLowerResolutionsJobs (
resolution,
isPortraitMode,
copyCodecs: false,
- isMaxQuality: false
+ isMaxQuality: false,
+ isNewVideo
}
}
@@ -257,7 +277,7 @@ async function createLowerResolutionsJobs (
priority: await getTranscodingJobPriority(user)
}
- JobQueue.Instance.createJob({ type: 'video-transcoding', payload: dataInput }, jobOptions)
+ await addTranscodingJob(dataInput, jobOptions)
}
if (resolutionCreated.length === 0) {
diff --git a/server/lib/job-queue/job-queue.ts b/server/lib/job-queue/job-queue.ts
index 42e8347b1..7a3a1bf82 100644
--- a/server/lib/job-queue/job-queue.ts
+++ b/server/lib/job-queue/job-queue.ts
@@ -11,6 +11,7 @@ import {
EmailPayload,
JobState,
JobType,
+ MoveObjectStoragePayload,
RefreshPayload,
VideoFileImportPayload,
VideoImportPayload,
@@ -34,6 +35,7 @@ import { processVideoImport } from './handlers/video-import'
import { processVideoLiveEnding } from './handlers/video-live-ending'
import { processVideoTranscoding } from './handlers/video-transcoding'
import { processVideosViews } from './handlers/video-views'
+import { processMoveToObjectStorage } from './handlers/move-to-object-storage'
type CreateJobArgument =
{ type: 'activitypub-http-broadcast', payload: ActivitypubHttpBroadcastPayload } |
@@ -49,9 +51,10 @@ type CreateJobArgument =
{ type: 'videos-views', payload: {} } |
{ type: 'video-live-ending', payload: VideoLiveEndingPayload } |
{ type: 'actor-keys', payload: ActorKeysPayload } |
- { type: 'video-redundancy', payload: VideoRedundancyPayload }
+ { type: 'video-redundancy', payload: VideoRedundancyPayload } |
+ { type: 'move-to-object-storage', payload: MoveObjectStoragePayload }
-type CreateJobOptions = {
+export type CreateJobOptions = {
delay?: number
priority?: number
}
@@ -70,7 +73,8 @@ const handlers: { [id in JobType]: (job: Bull.Job) => Promise } = {
'activitypub-refresher': refreshAPObject,
'video-live-ending': processVideoLiveEnding,
'actor-keys': processActorKeys,
- 'video-redundancy': processVideoRedundancy
+ 'video-redundancy': processVideoRedundancy,
+ 'move-to-object-storage': processMoveToObjectStorage
}
const jobTypes: JobType[] = [
@@ -87,7 +91,8 @@ const jobTypes: JobType[] = [
'activitypub-refresher',
'video-redundancy',
'actor-keys',
- 'video-live-ending'
+ 'video-live-ending',
+ 'move-to-object-storage'
]
class JobQueue {
diff --git a/server/lib/live/live-manager.ts b/server/lib/live/live-manager.ts
index 2a429fb33..d7dc841d9 100644
--- a/server/lib/live/live-manager.ts
+++ b/server/lib/live/live-manager.ts
@@ -20,7 +20,7 @@ import { VideoState, VideoStreamingPlaylistType } from '@shared/models'
import { federateVideoIfNeeded } from '../activitypub/videos'
import { JobQueue } from '../job-queue'
import { PeerTubeSocket } from '../peertube-socket'
-import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename } from '../video-paths'
+import { generateHLSMasterPlaylistFilename, generateHlsSha256SegmentsFilename } from '../paths'
import { LiveQuotaStore } from './live-quota-store'
import { LiveSegmentShaStore } from './live-segment-sha-store'
import { cleanupLive } from './live-utils'
diff --git a/server/lib/live/live-utils.ts b/server/lib/live/live-utils.ts
index e4526c7a5..3bf723b98 100644
--- a/server/lib/live/live-utils.ts
+++ b/server/lib/live/live-utils.ts
@@ -1,7 +1,7 @@
import { remove } from 'fs-extra'
import { basename } from 'path'
import { MStreamingPlaylist, MVideo } from '@server/types/models'
-import { getHLSDirectory } from '../video-paths'
+import { getLiveDirectory } from '../paths'
function buildConcatenatedName (segmentOrPlaylistPath: string) {
const num = basename(segmentOrPlaylistPath).match(/^(\d+)(-|\.)/)
@@ -10,7 +10,7 @@ function buildConcatenatedName (segmentOrPlaylistPath: string) {
}
async function cleanupLive (video: MVideo, streamingPlaylist: MStreamingPlaylist) {
- const hlsDirectory = getHLSDirectory(video)
+ const hlsDirectory = getLiveDirectory(video)
await remove(hlsDirectory)
diff --git a/server/lib/live/shared/muxing-session.ts b/server/lib/live/shared/muxing-session.ts
index a80abc843..9b5b6c4fc 100644
--- a/server/lib/live/shared/muxing-session.ts
+++ b/server/lib/live/shared/muxing-session.ts
@@ -11,9 +11,9 @@ import { CONFIG } from '@server/initializers/config'
import { MEMOIZE_TTL, VIDEO_LIVE } from '@server/initializers/constants'
import { VideoFileModel } from '@server/models/video/video-file'
import { MStreamingPlaylistVideo, MUserId, MVideoLiveVideo } from '@server/types/models'
+import { getLiveDirectory } from '../../paths'
import { VideoTranscodingProfilesManager } from '../../transcoding/video-transcoding-profiles'
import { isAbleToUploadVideo } from '../../user'
-import { getHLSDirectory } from '../../video-paths'
import { LiveQuotaStore } from '../live-quota-store'
import { LiveSegmentShaStore } from '../live-segment-sha-store'
import { buildConcatenatedName } from '../live-utils'
@@ -282,7 +282,7 @@ class MuxingSession extends EventEmitter {
}
private async prepareDirectories () {
- const outPath = getHLSDirectory(this.videoLive.Video)
+ const outPath = getLiveDirectory(this.videoLive.Video)
await ensureDir(outPath)
const replayDirectory = join(outPath, VIDEO_LIVE.REPLAY_DIRECTORY)
diff --git a/server/lib/object-storage/index.ts b/server/lib/object-storage/index.ts
new file mode 100644
index 000000000..8b413a40e
--- /dev/null
+++ b/server/lib/object-storage/index.ts
@@ -0,0 +1,3 @@
+export * from './keys'
+export * from './urls'
+export * from './videos'
diff --git a/server/lib/object-storage/keys.ts b/server/lib/object-storage/keys.ts
new file mode 100644
index 000000000..519474775
--- /dev/null
+++ b/server/lib/object-storage/keys.ts
@@ -0,0 +1,20 @@
+import { join } from 'path'
+import { MStreamingPlaylist, MVideoUUID } from '@server/types/models'
+
+function generateHLSObjectStorageKey (playlist: MStreamingPlaylist, video: MVideoUUID, filename: string) {
+ return join(generateHLSObjectBaseStorageKey(playlist, video), filename)
+}
+
+function generateHLSObjectBaseStorageKey (playlist: MStreamingPlaylist, video: MVideoUUID) {
+ return playlist.getStringType() + '_' + video.uuid
+}
+
+function generateWebTorrentObjectStorageKey (filename: string) {
+ return filename
+}
+
+export {
+ generateHLSObjectStorageKey,
+ generateHLSObjectBaseStorageKey,
+ generateWebTorrentObjectStorageKey
+}
diff --git a/server/lib/object-storage/shared/client.ts b/server/lib/object-storage/shared/client.ts
new file mode 100644
index 000000000..c9a614593
--- /dev/null
+++ b/server/lib/object-storage/shared/client.ts
@@ -0,0 +1,56 @@
+import { S3Client } from '@aws-sdk/client-s3'
+import { logger } from '@server/helpers/logger'
+import { CONFIG } from '@server/initializers/config'
+import { lTags } from './logger'
+
+let endpointParsed: URL
+function getEndpointParsed () {
+ if (endpointParsed) return endpointParsed
+
+ endpointParsed = new URL(getEndpoint())
+
+ return endpointParsed
+}
+
+let s3Client: S3Client
+function getClient () {
+ if (s3Client) return s3Client
+
+ const OBJECT_STORAGE = CONFIG.OBJECT_STORAGE
+
+ s3Client = new S3Client({
+ endpoint: getEndpoint(),
+ region: OBJECT_STORAGE.REGION,
+ credentials: OBJECT_STORAGE.CREDENTIALS.ACCESS_KEY_ID
+ ? {
+ accessKeyId: OBJECT_STORAGE.CREDENTIALS.ACCESS_KEY_ID,
+ secretAccessKey: OBJECT_STORAGE.CREDENTIALS.SECRET_ACCESS_KEY
+ }
+ : undefined
+ })
+
+ logger.info('Initialized S3 client %s with region %s.', getEndpoint(), OBJECT_STORAGE.REGION, lTags())
+
+ return s3Client
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ getEndpointParsed,
+ getClient
+}
+
+// ---------------------------------------------------------------------------
+
+let endpoint: string
+function getEndpoint () {
+ if (endpoint) return endpoint
+
+ const endpointConfig = CONFIG.OBJECT_STORAGE.ENDPOINT
+ endpoint = endpointConfig.startsWith('http://') || endpointConfig.startsWith('https://')
+ ? CONFIG.OBJECT_STORAGE.ENDPOINT
+ : 'https://' + CONFIG.OBJECT_STORAGE.ENDPOINT
+
+ return endpoint
+}
diff --git a/server/lib/object-storage/shared/index.ts b/server/lib/object-storage/shared/index.ts
new file mode 100644
index 000000000..11e10aa9f
--- /dev/null
+++ b/server/lib/object-storage/shared/index.ts
@@ -0,0 +1,3 @@
+export * from './client'
+export * from './logger'
+export * from './object-storage-helpers'
diff --git a/server/lib/object-storage/shared/logger.ts b/server/lib/object-storage/shared/logger.ts
new file mode 100644
index 000000000..8ab7cbd71
--- /dev/null
+++ b/server/lib/object-storage/shared/logger.ts
@@ -0,0 +1,7 @@
+import { loggerTagsFactory } from '@server/helpers/logger'
+
+const lTags = loggerTagsFactory('object-storage')
+
+export {
+ lTags
+}
diff --git a/server/lib/object-storage/shared/object-storage-helpers.ts b/server/lib/object-storage/shared/object-storage-helpers.ts
new file mode 100644
index 000000000..e23216907
--- /dev/null
+++ b/server/lib/object-storage/shared/object-storage-helpers.ts
@@ -0,0 +1,229 @@
+import { close, createReadStream, createWriteStream, ensureDir, open, ReadStream, stat } from 'fs-extra'
+import { min } from 'lodash'
+import { dirname } from 'path'
+import { Readable } from 'stream'
+import {
+ CompletedPart,
+ CompleteMultipartUploadCommand,
+ CreateMultipartUploadCommand,
+ DeleteObjectCommand,
+ GetObjectCommand,
+ ListObjectsV2Command,
+ PutObjectCommand,
+ UploadPartCommand
+} from '@aws-sdk/client-s3'
+import { pipelinePromise } from '@server/helpers/core-utils'
+import { isArray } from '@server/helpers/custom-validators/misc'
+import { logger } from '@server/helpers/logger'
+import { CONFIG } from '@server/initializers/config'
+import { getPrivateUrl } from '../urls'
+import { getClient } from './client'
+import { lTags } from './logger'
+
+type BucketInfo = {
+ BUCKET_NAME: string
+ PREFIX?: string
+}
+
+async function storeObject (options: {
+ inputPath: string
+ objectStorageKey: string
+ bucketInfo: BucketInfo
+}): Promise {
+ const { inputPath, objectStorageKey, bucketInfo } = options
+
+ logger.debug('Uploading file %s to %s%s in bucket %s', inputPath, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags())
+
+ const stats = await stat(inputPath)
+
+ // If bigger than max allowed size we do a multipart upload
+ if (stats.size > CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART) {
+ return multiPartUpload({ inputPath, objectStorageKey, bucketInfo })
+ }
+
+ const fileStream = createReadStream(inputPath)
+ return objectStoragePut({ objectStorageKey, content: fileStream, bucketInfo })
+}
+
+async function removeObject (filename: string, bucketInfo: BucketInfo) {
+ const command = new DeleteObjectCommand({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Key: buildKey(filename, bucketInfo)
+ })
+
+ return getClient().send(command)
+}
+
+async function removePrefix (prefix: string, bucketInfo: BucketInfo) {
+ const s3Client = getClient()
+
+ const commandPrefix = bucketInfo.PREFIX + prefix
+ const listCommand = new ListObjectsV2Command({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Prefix: commandPrefix
+ })
+
+ const listedObjects = await s3Client.send(listCommand)
+
+ // FIXME: use bulk delete when s3ninja will support this operation
+ // const deleteParams = {
+ // Bucket: bucketInfo.BUCKET_NAME,
+ // Delete: { Objects: [] }
+ // }
+
+ if (isArray(listedObjects.Contents) !== true) {
+ const message = `Cannot remove ${commandPrefix} prefix in bucket ${bucketInfo.BUCKET_NAME}: no files listed.`
+
+ logger.error(message, { response: listedObjects, ...lTags() })
+ throw new Error(message)
+ }
+
+ for (const object of listedObjects.Contents) {
+ const command = new DeleteObjectCommand({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Key: object.Key
+ })
+
+ await s3Client.send(command)
+
+ // FIXME: use bulk delete when s3ninja will support this operation
+ // deleteParams.Delete.Objects.push({ Key: object.Key })
+ }
+
+ // FIXME: use bulk delete when s3ninja will support this operation
+ // const deleteCommand = new DeleteObjectsCommand(deleteParams)
+ // await s3Client.send(deleteCommand)
+
+ // Repeat if not all objects could be listed at once (limit of 1000?)
+ if (listedObjects.IsTruncated) await removePrefix(prefix, bucketInfo)
+}
+
+async function makeAvailable (options: {
+ key: string
+ destination: string
+ bucketInfo: BucketInfo
+}) {
+ const { key, destination, bucketInfo } = options
+
+ await ensureDir(dirname(options.destination))
+
+ const command = new GetObjectCommand({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Key: buildKey(key, bucketInfo)
+ })
+ const response = await getClient().send(command)
+
+ const file = createWriteStream(destination)
+ await pipelinePromise(response.Body as Readable, file)
+
+ file.close()
+}
+
+function buildKey (key: string, bucketInfo: BucketInfo) {
+ return bucketInfo.PREFIX + key
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ BucketInfo,
+ buildKey,
+ storeObject,
+ removeObject,
+ removePrefix,
+ makeAvailable
+}
+
+// ---------------------------------------------------------------------------
+
+async function objectStoragePut (options: {
+ objectStorageKey: string
+ content: ReadStream
+ bucketInfo: BucketInfo
+}) {
+ const { objectStorageKey, content, bucketInfo } = options
+
+ const command = new PutObjectCommand({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Key: buildKey(objectStorageKey, bucketInfo),
+ Body: content
+ })
+
+ await getClient().send(command)
+
+ return getPrivateUrl(bucketInfo, objectStorageKey)
+}
+
+async function multiPartUpload (options: {
+ inputPath: string
+ objectStorageKey: string
+ bucketInfo: BucketInfo
+}) {
+ const { objectStorageKey, inputPath, bucketInfo } = options
+
+ const key = buildKey(objectStorageKey, bucketInfo)
+ const s3Client = getClient()
+
+ const statResult = await stat(inputPath)
+
+ const createMultipartCommand = new CreateMultipartUploadCommand({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Key: key
+ })
+ const createResponse = await s3Client.send(createMultipartCommand)
+
+ const fd = await open(inputPath, 'r')
+ let partNumber = 1
+ const parts: CompletedPart[] = []
+ const partSize = CONFIG.OBJECT_STORAGE.MAX_UPLOAD_PART
+
+ for (let start = 0; start < statResult.size; start += partSize) {
+ logger.debug(
+ 'Uploading part %d of file to %s%s in bucket %s',
+ partNumber, bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, lTags()
+ )
+
+ // FIXME: Remove when https://github.com/aws/aws-sdk-js-v3/pull/2637 is released
+ // The s3 sdk needs to know the length of the http body beforehand, but doesn't support
+ // streams with start and end set, so it just tries to stat the file in stream.path.
+ // This fails for us because we only want to send part of the file. The stream type
+ // is modified so we can set the byteLength here, which s3 detects because array buffers
+ // have this field set
+ const stream: ReadStream & { byteLength: number } =
+ createReadStream(
+ inputPath,
+ { fd, autoClose: false, start, end: (start + partSize) - 1 }
+ ) as ReadStream & { byteLength: number }
+
+ // Calculate if the part size is more than what's left over, and in that case use left over bytes for byteLength
+ stream.byteLength = min([ statResult.size - start, partSize ])
+
+ const uploadPartCommand = new UploadPartCommand({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Key: key,
+ UploadId: createResponse.UploadId,
+ PartNumber: partNumber,
+ Body: stream
+ })
+ const uploadResponse = await s3Client.send(uploadPartCommand)
+
+ parts.push({ ETag: uploadResponse.ETag, PartNumber: partNumber })
+ partNumber += 1
+ }
+ await close(fd)
+
+ const completeUploadCommand = new CompleteMultipartUploadCommand({
+ Bucket: bucketInfo.BUCKET_NAME,
+ Key: objectStorageKey,
+ UploadId: createResponse.UploadId,
+ MultipartUpload: { Parts: parts }
+ })
+ await s3Client.send(completeUploadCommand)
+
+ logger.debug(
+ 'Completed %s%s in bucket %s in %d parts',
+ bucketInfo.PREFIX, objectStorageKey, bucketInfo.BUCKET_NAME, partNumber - 1, lTags()
+ )
+
+ return getPrivateUrl(bucketInfo, objectStorageKey)
+}
diff --git a/server/lib/object-storage/urls.ts b/server/lib/object-storage/urls.ts
new file mode 100644
index 000000000..2a889190b
--- /dev/null
+++ b/server/lib/object-storage/urls.ts
@@ -0,0 +1,40 @@
+import { CONFIG } from '@server/initializers/config'
+import { BucketInfo, buildKey, getEndpointParsed } from './shared'
+
+function getPrivateUrl (config: BucketInfo, keyWithoutPrefix: string) {
+ return getBaseUrl(config) + buildKey(keyWithoutPrefix, config)
+}
+
+function getWebTorrentPublicFileUrl (fileUrl: string) {
+ const baseUrl = CONFIG.OBJECT_STORAGE.VIDEOS.BASE_URL
+ if (!baseUrl) return fileUrl
+
+ return replaceByBaseUrl(fileUrl, baseUrl)
+}
+
+function getHLSPublicFileUrl (fileUrl: string) {
+ const baseUrl = CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS.BASE_URL
+ if (!baseUrl) return fileUrl
+
+ return replaceByBaseUrl(fileUrl, baseUrl)
+}
+
+export {
+ getPrivateUrl,
+ getWebTorrentPublicFileUrl,
+ replaceByBaseUrl,
+ getHLSPublicFileUrl
+}
+
+// ---------------------------------------------------------------------------
+
+function getBaseUrl (bucketInfo: BucketInfo, baseUrl?: string) {
+ if (baseUrl) return baseUrl
+
+ return `${getEndpointParsed().protocol}//${bucketInfo.BUCKET_NAME}.${getEndpointParsed().host}/`
+}
+
+const regex = new RegExp('https?://[^/]+')
+function replaceByBaseUrl (fileUrl: string, baseUrl: string) {
+ return fileUrl.replace(regex, baseUrl)
+}
diff --git a/server/lib/object-storage/videos.ts b/server/lib/object-storage/videos.ts
new file mode 100644
index 000000000..15b8f58d5
--- /dev/null
+++ b/server/lib/object-storage/videos.ts
@@ -0,0 +1,72 @@
+import { join } from 'path'
+import { logger } from '@server/helpers/logger'
+import { CONFIG } from '@server/initializers/config'
+import { MStreamingPlaylist, MVideoFile, MVideoUUID } from '@server/types/models'
+import { getHLSDirectory } from '../paths'
+import { generateHLSObjectBaseStorageKey, generateHLSObjectStorageKey, generateWebTorrentObjectStorageKey } from './keys'
+import { lTags, makeAvailable, removeObject, removePrefix, storeObject } from './shared'
+
+function storeHLSFile (playlist: MStreamingPlaylist, video: MVideoUUID, filename: string) {
+ const baseHlsDirectory = getHLSDirectory(video)
+
+ return storeObject({
+ inputPath: join(baseHlsDirectory, filename),
+ objectStorageKey: generateHLSObjectStorageKey(playlist, video, filename),
+ bucketInfo: CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS
+ })
+}
+
+function storeWebTorrentFile (filename: string) {
+ return storeObject({
+ inputPath: join(CONFIG.STORAGE.VIDEOS_DIR, filename),
+ objectStorageKey: generateWebTorrentObjectStorageKey(filename),
+ bucketInfo: CONFIG.OBJECT_STORAGE.VIDEOS
+ })
+}
+
+function removeHLSObjectStorage (playlist: MStreamingPlaylist, video: MVideoUUID) {
+ return removePrefix(generateHLSObjectBaseStorageKey(playlist, video), CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS)
+}
+
+function removeWebTorrentObjectStorage (videoFile: MVideoFile) {
+ return removeObject(generateWebTorrentObjectStorageKey(videoFile.filename), CONFIG.OBJECT_STORAGE.VIDEOS)
+}
+
+async function makeHLSFileAvailable (playlist: MStreamingPlaylist, video: MVideoUUID, filename: string, destination: string) {
+ const key = generateHLSObjectStorageKey(playlist, video, filename)
+
+ logger.info('Fetching HLS file %s from object storage to %s.', key, destination, lTags())
+
+ await makeAvailable({
+ key,
+ destination,
+ bucketInfo: CONFIG.OBJECT_STORAGE.STREAMING_PLAYLISTS
+ })
+
+ return destination
+}
+
+async function makeWebTorrentFileAvailable (filename: string, destination: string) {
+ const key = generateWebTorrentObjectStorageKey(filename)
+
+ logger.info('Fetching WebTorrent file %s from object storage to %s.', key, destination, lTags())
+
+ await makeAvailable({
+ key,
+ destination,
+ bucketInfo: CONFIG.OBJECT_STORAGE.VIDEOS
+ })
+
+ return destination
+}
+
+export {
+ storeWebTorrentFile,
+ storeHLSFile,
+
+ removeHLSObjectStorage,
+ removeWebTorrentObjectStorage,
+
+ makeWebTorrentFileAvailable,
+ makeHLSFileAvailable
+}
diff --git a/server/lib/video-paths.ts b/server/lib/paths.ts
similarity index 51%
rename from server/lib/video-paths.ts
rename to server/lib/paths.ts
index 1e4382108..434e637c6 100644
--- a/server/lib/video-paths.ts
+++ b/server/lib/paths.ts
@@ -1,9 +1,8 @@
import { join } from 'path'
-import { extractVideo } from '@server/helpers/video'
-import { CONFIG } from '@server/initializers/config'
-import { HLS_REDUNDANCY_DIRECTORY, HLS_STREAMING_PLAYLIST_DIRECTORY, STATIC_PATHS, WEBSERVER } from '@server/initializers/constants'
-import { isStreamingPlaylist, MStreamingPlaylist, MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models'
import { buildUUID } from '@server/helpers/uuid'
+import { CONFIG } from '@server/initializers/config'
+import { HLS_REDUNDANCY_DIRECTORY, HLS_STREAMING_PLAYLIST_DIRECTORY } from '@server/initializers/constants'
+import { isStreamingPlaylist, MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models'
import { removeFragmentedMP4Ext } from '@shared/core-utils'
// ################## Video file name ##################
@@ -16,39 +15,18 @@ function generateHLSVideoFilename (resolution: number) {
return `${buildUUID()}-${resolution}-fragmented.mp4`
}
-function getVideoFilePath (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile, isRedundancy = false) {
- if (videoFile.isHLS()) {
- const video = extractVideo(videoOrPlaylist)
-
- return join(getHLSDirectory(video), videoFile.filename)
- }
-
- const baseDir = isRedundancy
- ? CONFIG.STORAGE.REDUNDANCY_DIR
- : CONFIG.STORAGE.VIDEOS_DIR
-
- return join(baseDir, videoFile.filename)
-}
-
-// ################## Redundancy ##################
-
-function generateHLSRedundancyUrl (video: MVideo, playlist: MStreamingPlaylist) {
- // Base URL used by our HLS player
- return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + playlist.getStringType() + '/' + video.uuid
-}
-
-function generateWebTorrentRedundancyUrl (file: MVideoFile) {
- return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + file.filename
-}
-
// ################## Streaming playlist ##################
-function getHLSDirectory (video: MVideoUUID, isRedundancy = false) {
- const baseDir = isRedundancy
- ? HLS_REDUNDANCY_DIRECTORY
- : HLS_STREAMING_PLAYLIST_DIRECTORY
+function getLiveDirectory (video: MVideoUUID) {
+ return getHLSDirectory(video)
+}
- return join(baseDir, video.uuid)
+function getHLSDirectory (video: MVideoUUID) {
+ return join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid)
+}
+
+function getHLSRedundancyDirectory (video: MVideoUUID) {
+ return join(HLS_REDUNDANCY_DIRECTORY, video.uuid)
}
function getHlsResolutionPlaylistFilename (videoFilename: string) {
@@ -81,36 +59,24 @@ function generateTorrentFileName (videoOrPlaylist: MVideo | MStreamingPlaylistVi
return uuid + '-' + resolution + extension
}
-function getTorrentFilePath (videoFile: MVideoFile) {
+function getFSTorrentFilePath (videoFile: MVideoFile) {
return join(CONFIG.STORAGE.TORRENTS_DIR, videoFile.torrentFilename)
}
-// ################## Meta data ##################
-
-function getLocalVideoFileMetadataUrl (video: MVideoUUID, videoFile: MVideoFile) {
- const path = '/api/v1/videos/'
-
- return WEBSERVER.URL + path + video.uuid + '/metadata/' + videoFile.id
-}
-
// ---------------------------------------------------------------------------
export {
generateHLSVideoFilename,
generateWebTorrentVideoFilename,
- getVideoFilePath,
-
generateTorrentFileName,
- getTorrentFilePath,
+ getFSTorrentFilePath,
getHLSDirectory,
+ getLiveDirectory,
+ getHLSRedundancyDirectory,
+
generateHLSMasterPlaylistFilename,
generateHlsSha256SegmentsFilename,
- getHlsResolutionPlaylistFilename,
-
- getLocalVideoFileMetadataUrl,
-
- generateWebTorrentRedundancyUrl,
- generateHLSRedundancyUrl
+ getHlsResolutionPlaylistFilename
}
diff --git a/server/lib/schedulers/videos-redundancy-scheduler.ts b/server/lib/schedulers/videos-redundancy-scheduler.ts
index 137ae53a0..ebfd015b5 100644
--- a/server/lib/schedulers/videos-redundancy-scheduler.ts
+++ b/server/lib/schedulers/videos-redundancy-scheduler.ts
@@ -24,7 +24,7 @@ import { getLocalVideoCacheFileActivityPubUrl, getLocalVideoCacheStreamingPlayli
import { getOrCreateAPVideo } from '../activitypub/videos'
import { downloadPlaylistSegments } from '../hls'
import { removeVideoRedundancy } from '../redundancy'
-import { generateHLSRedundancyUrl, generateWebTorrentRedundancyUrl } from '../video-paths'
+import { generateHLSRedundancyUrl, generateWebTorrentRedundancyUrl } from '../video-urls'
import { AbstractScheduler } from './abstract-scheduler'
type CandidateToDuplicate = {
diff --git a/server/lib/thumbnail.ts b/server/lib/thumbnail.ts
index c08523988..d2384f53c 100644
--- a/server/lib/thumbnail.ts
+++ b/server/lib/thumbnail.ts
@@ -1,5 +1,4 @@
import { join } from 'path'
-
import { ThumbnailType } from '../../shared/models/videos/thumbnail.type'
import { generateImageFromVideoFile } from '../helpers/ffmpeg-utils'
import { generateImageFilename, processImage } from '../helpers/image-utils'
@@ -10,7 +9,7 @@ import { ThumbnailModel } from '../models/video/thumbnail'
import { MVideoFile, MVideoThumbnail, MVideoUUID } from '../types/models'
import { MThumbnail } from '../types/models/video/thumbnail'
import { MVideoPlaylistThumbnail } from '../types/models/video/video-playlist'
-import { getVideoFilePath } from './video-paths'
+import { VideoPathManager } from './video-path-manager'
type ImageSize = { height?: number, width?: number }
@@ -116,21 +115,22 @@ function generateVideoMiniature (options: {
}) {
const { video, videoFile, type } = options
- const input = getVideoFilePath(video, videoFile)
+ return VideoPathManager.Instance.makeAvailableVideoFile(video, videoFile, input => {
+ const { filename, basePath, height, width, existingThumbnail, outputPath } = buildMetadataFromVideo(video, type)
- const { filename, basePath, height, width, existingThumbnail, outputPath } = buildMetadataFromVideo(video, type)
- const thumbnailCreator = videoFile.isAudio()
- ? () => processImage(ASSETS_PATH.DEFAULT_AUDIO_BACKGROUND, outputPath, { width, height }, true)
- : () => generateImageFromVideoFile(input, basePath, filename, { height, width })
+ const thumbnailCreator = videoFile.isAudio()
+ ? () => processImage(ASSETS_PATH.DEFAULT_AUDIO_BACKGROUND, outputPath, { width, height }, true)
+ : () => generateImageFromVideoFile(input, basePath, filename, { height, width })
- return updateThumbnailFromFunction({
- thumbnailCreator,
- filename,
- height,
- width,
- type,
- automaticallyGenerated: true,
- existingThumbnail
+ return updateThumbnailFromFunction({
+ thumbnailCreator,
+ filename,
+ height,
+ width,
+ type,
+ automaticallyGenerated: true,
+ existingThumbnail
+ })
})
}
diff --git a/server/lib/transcoding/video-transcoding.ts b/server/lib/transcoding/video-transcoding.ts
index d2a556360..ee228c011 100644
--- a/server/lib/transcoding/video-transcoding.ts
+++ b/server/lib/transcoding/video-transcoding.ts
@@ -4,13 +4,13 @@ import { basename, extname as extnameUtil, join } from 'path'
import { toEven } from '@server/helpers/core-utils'
import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent'
import { MStreamingPlaylistFilesVideo, MVideoFile, MVideoFullLight } from '@server/types/models'
-import { VideoResolution } from '../../../shared/models/videos'
+import { VideoResolution, VideoStorage } from '../../../shared/models/videos'
import { VideoStreamingPlaylistType } from '../../../shared/models/videos/video-streaming-playlist.type'
import { transcode, TranscodeOptions, TranscodeOptionsType } from '../../helpers/ffmpeg-utils'
import { canDoQuickTranscode, getDurationFromVideoFile, getMetadataFromFile, getVideoFileFPS } from '../../helpers/ffprobe-utils'
import { logger } from '../../helpers/logger'
import { CONFIG } from '../../initializers/config'
-import { HLS_STREAMING_PLAYLIST_DIRECTORY, P2P_MEDIA_LOADER_PEER_VERSION } from '../../initializers/constants'
+import { P2P_MEDIA_LOADER_PEER_VERSION } from '../../initializers/constants'
import { VideoFileModel } from '../../models/video/video-file'
import { VideoStreamingPlaylistModel } from '../../models/video/video-streaming-playlist'
import { updateMasterHLSPlaylist, updateSha256VODSegments } from '../hls'
@@ -19,9 +19,9 @@ import {
generateHlsSha256SegmentsFilename,
generateHLSVideoFilename,
generateWebTorrentVideoFilename,
- getHlsResolutionPlaylistFilename,
- getVideoFilePath
-} from '../video-paths'
+ getHlsResolutionPlaylistFilename
+} from '../paths'
+import { VideoPathManager } from '../video-path-manager'
import { VideoTranscodingProfilesManager } from './video-transcoding-profiles'
/**
@@ -32,159 +32,162 @@ import { VideoTranscodingProfilesManager } from './video-transcoding-profiles'
*/
// Optimize the original video file and replace it. The resolution is not changed.
-async function optimizeOriginalVideofile (video: MVideoFullLight, inputVideoFile: MVideoFile, job?: Job) {
+function optimizeOriginalVideofile (video: MVideoFullLight, inputVideoFile: MVideoFile, job?: Job) {
const transcodeDirectory = CONFIG.STORAGE.TMP_DIR
const newExtname = '.mp4'
- const videoInputPath = getVideoFilePath(video, inputVideoFile)
- const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname)
+ return VideoPathManager.Instance.makeAvailableVideoFile(video, inputVideoFile, async videoInputPath => {
+ const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname)
- const transcodeType: TranscodeOptionsType = await canDoQuickTranscode(videoInputPath)
- ? 'quick-transcode'
- : 'video'
+ const transcodeType: TranscodeOptionsType = await canDoQuickTranscode(videoInputPath)
+ ? 'quick-transcode'
+ : 'video'
- const resolution = toEven(inputVideoFile.resolution)
+ const resolution = toEven(inputVideoFile.resolution)
- const transcodeOptions: TranscodeOptions = {
- type: transcodeType,
+ const transcodeOptions: TranscodeOptions = {
+ type: transcodeType,
- inputPath: videoInputPath,
- outputPath: videoTranscodedPath,
+ inputPath: videoInputPath,
+ outputPath: videoTranscodedPath,
- availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
- profile: CONFIG.TRANSCODING.PROFILE,
+ availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
+ profile: CONFIG.TRANSCODING.PROFILE,
- resolution,
+ resolution,
- job
- }
+ job
+ }
- // Could be very long!
- await transcode(transcodeOptions)
+ // Could be very long!
+ await transcode(transcodeOptions)
- try {
- await remove(videoInputPath)
+ try {
+ await remove(videoInputPath)
- // Important to do this before getVideoFilename() to take in account the new filename
- inputVideoFile.extname = newExtname
- inputVideoFile.filename = generateWebTorrentVideoFilename(resolution, newExtname)
+ // Important to do this before getVideoFilename() to take in account the new filename
+ inputVideoFile.extname = newExtname
+ inputVideoFile.filename = generateWebTorrentVideoFilename(resolution, newExtname)
+ inputVideoFile.storage = VideoStorage.FILE_SYSTEM
- const videoOutputPath = getVideoFilePath(video, inputVideoFile)
+ const videoOutputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, inputVideoFile)
- await onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath)
+ const { videoFile } = await onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath)
- return transcodeType
- } catch (err) {
- // Auto destruction...
- video.destroy().catch(err => logger.error('Cannot destruct video after transcoding failure.', { err }))
+ return { transcodeType, videoFile }
+ } catch (err) {
+ // Auto destruction...
+ video.destroy().catch(err => logger.error('Cannot destruct video after transcoding failure.', { err }))
- throw err
- }
+ throw err
+ }
+ })
}
-// Transcode the original video file to a lower resolution.
-async function transcodeNewWebTorrentResolution (video: MVideoFullLight, resolution: VideoResolution, isPortrait: boolean, job: Job) {
+// Transcode the original video file to a lower resolution
+// We are sure it's x264 in mp4 because optimizeOriginalVideofile was already executed
+function transcodeNewWebTorrentResolution (video: MVideoFullLight, resolution: VideoResolution, isPortrait: boolean, job: Job) {
const transcodeDirectory = CONFIG.STORAGE.TMP_DIR
const extname = '.mp4'
- // We are sure it's x264 in mp4 because optimizeOriginalVideofile was already executed
- const videoInputPath = getVideoFilePath(video, video.getMaxQualityFile())
+ return VideoPathManager.Instance.makeAvailableVideoFile(video, video.getMaxQualityFile(), async videoInputPath => {
+ const newVideoFile = new VideoFileModel({
+ resolution,
+ extname,
+ filename: generateWebTorrentVideoFilename(resolution, extname),
+ size: 0,
+ videoId: video.id
+ })
- const newVideoFile = new VideoFileModel({
- resolution,
- extname,
- filename: generateWebTorrentVideoFilename(resolution, extname),
- size: 0,
- videoId: video.id
+ const videoOutputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, newVideoFile)
+ const videoTranscodedPath = join(transcodeDirectory, newVideoFile.filename)
+
+ const transcodeOptions = resolution === VideoResolution.H_NOVIDEO
+ ? {
+ type: 'only-audio' as 'only-audio',
+
+ inputPath: videoInputPath,
+ outputPath: videoTranscodedPath,
+
+ availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
+ profile: CONFIG.TRANSCODING.PROFILE,
+
+ resolution,
+
+ job
+ }
+ : {
+ type: 'video' as 'video',
+ inputPath: videoInputPath,
+ outputPath: videoTranscodedPath,
+
+ availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
+ profile: CONFIG.TRANSCODING.PROFILE,
+
+ resolution,
+ isPortraitMode: isPortrait,
+
+ job
+ }
+
+ await transcode(transcodeOptions)
+
+ return onWebTorrentVideoFileTranscoding(video, newVideoFile, videoTranscodedPath, videoOutputPath)
})
-
- const videoOutputPath = getVideoFilePath(video, newVideoFile)
- const videoTranscodedPath = join(transcodeDirectory, newVideoFile.filename)
-
- const transcodeOptions = resolution === VideoResolution.H_NOVIDEO
- ? {
- type: 'only-audio' as 'only-audio',
-
- inputPath: videoInputPath,
- outputPath: videoTranscodedPath,
-
- availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
- profile: CONFIG.TRANSCODING.PROFILE,
-
- resolution,
-
- job
- }
- : {
- type: 'video' as 'video',
- inputPath: videoInputPath,
- outputPath: videoTranscodedPath,
-
- availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
- profile: CONFIG.TRANSCODING.PROFILE,
-
- resolution,
- isPortraitMode: isPortrait,
-
- job
- }
-
- await transcode(transcodeOptions)
-
- return onWebTorrentVideoFileTranscoding(video, newVideoFile, videoTranscodedPath, videoOutputPath)
}
// Merge an image with an audio file to create a video
-async function mergeAudioVideofile (video: MVideoFullLight, resolution: VideoResolution, job: Job) {
+function mergeAudioVideofile (video: MVideoFullLight, resolution: VideoResolution, job: Job) {
const transcodeDirectory = CONFIG.STORAGE.TMP_DIR
const newExtname = '.mp4'
const inputVideoFile = video.getMinQualityFile()
- const audioInputPath = getVideoFilePath(video, inputVideoFile)
- const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname)
+ return VideoPathManager.Instance.makeAvailableVideoFile(video, inputVideoFile, async audioInputPath => {
+ const videoTranscodedPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname)
- // If the user updates the video preview during transcoding
- const previewPath = video.getPreview().getPath()
- const tmpPreviewPath = join(CONFIG.STORAGE.TMP_DIR, basename(previewPath))
- await copyFile(previewPath, tmpPreviewPath)
+ // If the user updates the video preview during transcoding
+ const previewPath = video.getPreview().getPath()
+ const tmpPreviewPath = join(CONFIG.STORAGE.TMP_DIR, basename(previewPath))
+ await copyFile(previewPath, tmpPreviewPath)
- const transcodeOptions = {
- type: 'merge-audio' as 'merge-audio',
+ const transcodeOptions = {
+ type: 'merge-audio' as 'merge-audio',
- inputPath: tmpPreviewPath,
- outputPath: videoTranscodedPath,
+ inputPath: tmpPreviewPath,
+ outputPath: videoTranscodedPath,
- availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
- profile: CONFIG.TRANSCODING.PROFILE,
+ availableEncoders: VideoTranscodingProfilesManager.Instance.getAvailableEncoders(),
+ profile: CONFIG.TRANSCODING.PROFILE,
- audioPath: audioInputPath,
- resolution,
+ audioPath: audioInputPath,
+ resolution,
- job
- }
+ job
+ }
- try {
- await transcode(transcodeOptions)
+ try {
+ await transcode(transcodeOptions)
- await remove(audioInputPath)
- await remove(tmpPreviewPath)
- } catch (err) {
- await remove(tmpPreviewPath)
- throw err
- }
+ await remove(audioInputPath)
+ await remove(tmpPreviewPath)
+ } catch (err) {
+ await remove(tmpPreviewPath)
+ throw err
+ }
- // Important to do this before getVideoFilename() to take in account the new file extension
- inputVideoFile.extname = newExtname
- inputVideoFile.filename = generateWebTorrentVideoFilename(inputVideoFile.resolution, newExtname)
+ // Important to do this before getVideoFilename() to take in account the new file extension
+ inputVideoFile.extname = newExtname
+ inputVideoFile.filename = generateWebTorrentVideoFilename(inputVideoFile.resolution, newExtname)
- const videoOutputPath = getVideoFilePath(video, inputVideoFile)
- // ffmpeg generated a new video file, so update the video duration
- // See https://trac.ffmpeg.org/ticket/5456
- video.duration = await getDurationFromVideoFile(videoTranscodedPath)
- await video.save()
+ const videoOutputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, inputVideoFile)
+ // ffmpeg generated a new video file, so update the video duration
+ // See https://trac.ffmpeg.org/ticket/5456
+ video.duration = await getDurationFromVideoFile(videoTranscodedPath)
+ await video.save()
- return onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath)
+ return onWebTorrentVideoFileTranscoding(video, inputVideoFile, videoTranscodedPath, videoOutputPath)
+ })
}
// Concat TS segments from a live video to a fragmented mp4 HLS playlist
@@ -258,7 +261,7 @@ async function onWebTorrentVideoFileTranscoding (
await VideoFileModel.customUpsert(videoFile, 'video', undefined)
video.VideoFiles = await video.$get('VideoFiles')
- return video
+ return { video, videoFile }
}
async function generateHlsPlaylistCommon (options: {
@@ -335,14 +338,13 @@ async function generateHlsPlaylistCommon (options: {
videoStreamingPlaylistId: playlist.id
})
- const videoFilePath = getVideoFilePath(playlist, newVideoFile)
+ const videoFilePath = VideoPathManager.Instance.getFSVideoFileOutputPath(playlist, newVideoFile)
// Move files from tmp transcoded directory to the appropriate place
- const baseHlsDirectory = join(HLS_STREAMING_PLAYLIST_DIRECTORY, video.uuid)
- await ensureDir(baseHlsDirectory)
+ await ensureDir(VideoPathManager.Instance.getFSHLSOutputPath(video))
// Move playlist file
- const resolutionPlaylistPath = join(baseHlsDirectory, resolutionPlaylistFilename)
+ const resolutionPlaylistPath = VideoPathManager.Instance.getFSHLSOutputPath(video, resolutionPlaylistFilename)
await move(resolutionPlaylistFileTranscodePath, resolutionPlaylistPath, { overwrite: true })
// Move video file
await move(join(videoTranscodedBasePath, videoFilename), videoFilePath, { overwrite: true })
@@ -355,7 +357,7 @@ async function generateHlsPlaylistCommon (options: {
await createTorrentAndSetInfoHash(playlist, newVideoFile)
- await VideoFileModel.customUpsert(newVideoFile, 'streaming-playlist', undefined)
+ const savedVideoFile = await VideoFileModel.customUpsert(newVideoFile, 'streaming-playlist', undefined)
const playlistWithFiles = playlist as MStreamingPlaylistFilesVideo
playlistWithFiles.VideoFiles = await playlist.$get('VideoFiles')
@@ -368,5 +370,5 @@ async function generateHlsPlaylistCommon (options: {
await updateMasterHLSPlaylist(video, playlistWithFiles)
await updateSha256VODSegments(video, playlistWithFiles)
- return resolutionPlaylistPath
+ return { resolutionPlaylistPath, videoFile: savedVideoFile }
}
diff --git a/server/lib/video-path-manager.ts b/server/lib/video-path-manager.ts
new file mode 100644
index 000000000..4c5d0c89d
--- /dev/null
+++ b/server/lib/video-path-manager.ts
@@ -0,0 +1,139 @@
+import { remove } from 'fs-extra'
+import { extname, join } from 'path'
+import { buildUUID } from '@server/helpers/uuid'
+import { extractVideo } from '@server/helpers/video'
+import { CONFIG } from '@server/initializers/config'
+import { MStreamingPlaylistVideo, MVideo, MVideoFile, MVideoUUID } from '@server/types/models'
+import { VideoStorage } from '@shared/models'
+import { makeHLSFileAvailable, makeWebTorrentFileAvailable } from './object-storage'
+import { getHLSDirectory, getHLSRedundancyDirectory, getHlsResolutionPlaylistFilename } from './paths'
+
+type MakeAvailableCB = (path: string) => Promise | T
+
+class VideoPathManager {
+
+ private static instance: VideoPathManager
+
+ private constructor () {}
+
+ getFSHLSOutputPath (video: MVideoUUID, filename?: string) {
+ const base = getHLSDirectory(video)
+ if (!filename) return base
+
+ return join(base, filename)
+ }
+
+ getFSRedundancyVideoFilePath (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile) {
+ if (videoFile.isHLS()) {
+ const video = extractVideo(videoOrPlaylist)
+
+ return join(getHLSRedundancyDirectory(video), videoFile.filename)
+ }
+
+ return join(CONFIG.STORAGE.REDUNDANCY_DIR, videoFile.filename)
+ }
+
+ getFSVideoFileOutputPath (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile) {
+ if (videoFile.isHLS()) {
+ const video = extractVideo(videoOrPlaylist)
+
+ return join(getHLSDirectory(video), videoFile.filename)
+ }
+
+ return join(CONFIG.STORAGE.VIDEOS_DIR, videoFile.filename)
+ }
+
+ async makeAvailableVideoFile (videoOrPlaylist: MVideo | MStreamingPlaylistVideo, videoFile: MVideoFile, cb: MakeAvailableCB) {
+ if (videoFile.storage === VideoStorage.FILE_SYSTEM) {
+ return this.makeAvailableFactory(
+ () => this.getFSVideoFileOutputPath(videoOrPlaylist, videoFile),
+ false,
+ cb
+ )
+ }
+
+ const destination = this.buildTMPDestination(videoFile.filename)
+
+ if (videoFile.isHLS()) {
+ const video = extractVideo(videoOrPlaylist)
+
+ return this.makeAvailableFactory(
+ () => makeHLSFileAvailable(videoOrPlaylist as MStreamingPlaylistVideo, video, videoFile.filename, destination),
+ true,
+ cb
+ )
+ }
+
+ return this.makeAvailableFactory(
+ () => makeWebTorrentFileAvailable(videoFile.filename, destination),
+ true,
+ cb
+ )
+ }
+
+ async makeAvailableResolutionPlaylistFile (playlist: MStreamingPlaylistVideo, videoFile: MVideoFile, cb: MakeAvailableCB) {
+ const filename = getHlsResolutionPlaylistFilename(videoFile.filename)
+
+ if (videoFile.storage === VideoStorage.FILE_SYSTEM) {
+ return this.makeAvailableFactory(
+ () => join(getHLSDirectory(playlist.Video), filename),
+ false,
+ cb
+ )
+ }
+
+ return this.makeAvailableFactory(
+ () => makeHLSFileAvailable(playlist, playlist.Video, filename, this.buildTMPDestination(filename)),
+ true,
+ cb
+ )
+ }
+
+ async makeAvailablePlaylistFile (playlist: MStreamingPlaylistVideo, filename: string, cb: MakeAvailableCB) {
+ if (playlist.storage === VideoStorage.FILE_SYSTEM) {
+ return this.makeAvailableFactory(
+ () => join(getHLSDirectory(playlist.Video), filename),
+ false,
+ cb
+ )
+ }
+
+ return this.makeAvailableFactory(
+ () => makeHLSFileAvailable(playlist, playlist.Video, filename, this.buildTMPDestination(filename)),
+ true,
+ cb
+ )
+ }
+
+ private async makeAvailableFactory (method: () => Promise | string, clean: boolean, cb: MakeAvailableCB) {
+ let result: T
+
+ const destination = await method()
+
+ try {
+ result = await cb(destination)
+ } catch (err) {
+ if (destination && clean) await remove(destination)
+ throw err
+ }
+
+ if (clean) await remove(destination)
+
+ return result
+ }
+
+ private buildTMPDestination (filename: string) {
+ return join(CONFIG.STORAGE.TMP_DIR, buildUUID() + extname(filename))
+
+ }
+
+ static get Instance () {
+ return this.instance || (this.instance = new this())
+ }
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ VideoPathManager
+}
diff --git a/server/lib/video-state.ts b/server/lib/video-state.ts
new file mode 100644
index 000000000..0613d94bf
--- /dev/null
+++ b/server/lib/video-state.ts
@@ -0,0 +1,99 @@
+import { Transaction } from 'sequelize'
+import { logger } from '@server/helpers/logger'
+import { CONFIG } from '@server/initializers/config'
+import { sequelizeTypescript } from '@server/initializers/database'
+import { VideoModel } from '@server/models/video/video'
+import { VideoJobInfoModel } from '@server/models/video/video-job-info'
+import { MVideoFullLight, MVideoUUID } from '@server/types/models'
+import { VideoState } from '@shared/models'
+import { federateVideoIfNeeded } from './activitypub/videos'
+import { Notifier } from './notifier'
+import { addMoveToObjectStorageJob } from './video'
+
+function buildNextVideoState (currentState?: VideoState) {
+ if (currentState === VideoState.PUBLISHED) {
+ throw new Error('Video is already in its final state')
+ }
+
+ if (
+ currentState !== VideoState.TO_TRANSCODE &&
+ currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE &&
+ CONFIG.TRANSCODING.ENABLED
+ ) {
+ return VideoState.TO_TRANSCODE
+ }
+
+ if (
+ currentState !== VideoState.TO_MOVE_TO_EXTERNAL_STORAGE &&
+ CONFIG.OBJECT_STORAGE.ENABLED
+ ) {
+ return VideoState.TO_MOVE_TO_EXTERNAL_STORAGE
+ }
+
+ return VideoState.PUBLISHED
+}
+
+function moveToNextState (video: MVideoUUID, isNewVideo = true) {
+ return sequelizeTypescript.transaction(async t => {
+ // Maybe the video changed in database, refresh it
+ const videoDatabase = await VideoModel.loadAndPopulateAccountAndServerAndTags(video.uuid, t)
+ // Video does not exist anymore
+ if (!videoDatabase) return undefined
+
+ // Already in its final state
+ if (videoDatabase.state === VideoState.PUBLISHED) {
+ return federateVideoIfNeeded(videoDatabase, false, t)
+ }
+
+ const newState = buildNextVideoState(videoDatabase.state)
+
+ if (newState === VideoState.PUBLISHED) {
+ return moveToPublishedState(videoDatabase, isNewVideo, t)
+ }
+
+ if (newState === VideoState.TO_MOVE_TO_EXTERNAL_STORAGE) {
+ return moveToExternalStorageState(videoDatabase, isNewVideo, t)
+ }
+ })
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ buildNextVideoState,
+ moveToNextState
+}
+
+// ---------------------------------------------------------------------------
+
+async function moveToPublishedState (video: MVideoFullLight, isNewVideo: boolean, transaction: Transaction) {
+ logger.info('Publishing video %s.', video.uuid, { tags: [ video.uuid ] })
+
+ const previousState = video.state
+ await video.setNewState(VideoState.PUBLISHED, transaction)
+
+ // If the video was not published, we consider it is a new one for other instances
+ // Live videos are always federated, so it's not a new video
+ await federateVideoIfNeeded(video, isNewVideo, transaction)
+
+ Notifier.Instance.notifyOnNewVideoIfNeeded(video)
+
+ if (previousState === VideoState.TO_TRANSCODE) {
+ Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(video)
+ }
+}
+
+async function moveToExternalStorageState (video: MVideoFullLight, isNewVideo: boolean, transaction: Transaction) {
+ const videoJobInfo = await VideoJobInfoModel.load(video.id, transaction)
+ const pendingTranscode = videoJobInfo?.pendingTranscode || 0
+
+ // We want to wait all transcoding jobs before moving the video on an external storage
+ if (pendingTranscode !== 0) return
+
+ await video.setNewState(VideoState.TO_MOVE_TO_EXTERNAL_STORAGE, transaction)
+
+ logger.info('Creating external storage move job for video %s.', video.uuid, { tags: [ video.uuid ] })
+
+ addMoveToObjectStorageJob(video, isNewVideo)
+ .catch(err => logger.error('Cannot add move to object storage job', { err }))
+}
diff --git a/server/lib/video-urls.ts b/server/lib/video-urls.ts
new file mode 100644
index 000000000..64c2c9bf9
--- /dev/null
+++ b/server/lib/video-urls.ts
@@ -0,0 +1,31 @@
+
+import { STATIC_PATHS, WEBSERVER } from '@server/initializers/constants'
+import { MStreamingPlaylist, MVideo, MVideoFile, MVideoUUID } from '@server/types/models'
+
+// ################## Redundancy ##################
+
+function generateHLSRedundancyUrl (video: MVideo, playlist: MStreamingPlaylist) {
+ // Base URL used by our HLS player
+ return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + playlist.getStringType() + '/' + video.uuid
+}
+
+function generateWebTorrentRedundancyUrl (file: MVideoFile) {
+ return WEBSERVER.URL + STATIC_PATHS.REDUNDANCY + file.filename
+}
+
+// ################## Meta data ##################
+
+function getLocalVideoFileMetadataUrl (video: MVideoUUID, videoFile: MVideoFile) {
+ const path = '/api/v1/videos/'
+
+ return WEBSERVER.URL + path + video.uuid + '/metadata/' + videoFile.id
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ getLocalVideoFileMetadataUrl,
+
+ generateWebTorrentRedundancyUrl,
+ generateHLSRedundancyUrl
+}
diff --git a/server/lib/video.ts b/server/lib/video.ts
index 61fee4949..0a2b93cc0 100644
--- a/server/lib/video.ts
+++ b/server/lib/video.ts
@@ -1,15 +1,13 @@
import { UploadFiles } from 'express'
import { Transaction } from 'sequelize/types'
import { DEFAULT_AUDIO_RESOLUTION, JOB_PRIORITY } from '@server/initializers/constants'
-import { sequelizeTypescript } from '@server/initializers/database'
import { TagModel } from '@server/models/video/tag'
import { VideoModel } from '@server/models/video/video'
+import { VideoJobInfoModel } from '@server/models/video/video-job-info'
import { FilteredModelAttributes } from '@server/types'
import { MThumbnail, MUserId, MVideoFile, MVideoTag, MVideoThumbnail, MVideoUUID } from '@server/types/models'
import { ThumbnailType, VideoCreate, VideoPrivacy, VideoTranscodingPayload } from '@shared/models'
-import { federateVideoIfNeeded } from './activitypub/videos'
-import { JobQueue } from './job-queue/job-queue'
-import { Notifier } from './notifier'
+import { CreateJobOptions, JobQueue } from './job-queue/job-queue'
import { updateVideoMiniatureFromExisting } from './thumbnail'
function buildLocalVideoFromReq (videoInfo: VideoCreate, channelId: number): FilteredModelAttributes {
@@ -82,29 +80,6 @@ async function setVideoTags (options: {
video.Tags = tagInstances
}
-async function publishAndFederateIfNeeded (video: MVideoUUID, wasLive = false) {
- const result = await sequelizeTypescript.transaction(async t => {
- // Maybe the video changed in database, refresh it
- const videoDatabase = await VideoModel.loadAndPopulateAccountAndServerAndTags(video.uuid, t)
- // Video does not exist anymore
- if (!videoDatabase) return undefined
-
- // We transcoded the video file in another format, now we can publish it
- const videoPublished = await videoDatabase.publishIfNeededAndSave(t)
-
- // If the video was not published, we consider it is a new one for other instances
- // Live videos are always federated, so it's not a new video
- await federateVideoIfNeeded(videoDatabase, !wasLive && videoPublished, t)
-
- return { videoDatabase, videoPublished }
- })
-
- if (result?.videoPublished) {
- Notifier.Instance.notifyOnNewVideoIfNeeded(result.videoDatabase)
- Notifier.Instance.notifyOnVideoPublishedAfterTranscoding(result.videoDatabase)
- }
-}
-
async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoFile, user: MUserId) {
let dataInput: VideoTranscodingPayload
@@ -127,7 +102,20 @@ async function addOptimizeOrMergeAudioJob (video: MVideoUUID, videoFile: MVideoF
priority: await getTranscodingJobPriority(user)
}
- return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: dataInput }, jobOptions)
+ return addTranscodingJob(dataInput, jobOptions)
+}
+
+async function addTranscodingJob (payload: VideoTranscodingPayload, options: CreateJobOptions) {
+ await VideoJobInfoModel.increaseOrCreate(payload.videoUUID, 'pendingTranscode')
+
+ return JobQueue.Instance.createJobWithPromise({ type: 'video-transcoding', payload: payload }, options)
+}
+
+async function addMoveToObjectStorageJob (video: MVideoUUID, isNewVideo = true) {
+ await VideoJobInfoModel.increaseOrCreate(video.uuid, 'pendingMove')
+
+ const dataInput = { videoUUID: video.uuid, isNewVideo }
+ return JobQueue.Instance.createJobWithPromise({ type: 'move-to-object-storage', payload: dataInput })
}
async function getTranscodingJobPriority (user: MUserId) {
@@ -143,9 +131,10 @@ async function getTranscodingJobPriority (user: MUserId) {
export {
buildLocalVideoFromReq,
- publishAndFederateIfNeeded,
buildVideoThumbnailsFromReq,
setVideoTags,
addOptimizeOrMergeAudioJob,
+ addTranscodingJob,
+ addMoveToObjectStorageJob,
getTranscodingJobPriority
}
diff --git a/server/models/video/formatter/video-format-utils.ts b/server/models/video/formatter/video-format-utils.ts
index 8a54de3b0..b3c4f390d 100644
--- a/server/models/video/formatter/video-format-utils.ts
+++ b/server/models/video/formatter/video-format-utils.ts
@@ -1,6 +1,6 @@
import { uuidToShort } from '@server/helpers/uuid'
import { generateMagnetUri } from '@server/helpers/webtorrent'
-import { getLocalVideoFileMetadataUrl } from '@server/lib/video-paths'
+import { getLocalVideoFileMetadataUrl } from '@server/lib/video-urls'
import { VideoFile } from '@shared/models/videos/video-file.model'
import { ActivityTagObject, ActivityUrlObject, VideoObject } from '../../../../shared/models/activitypub/objects'
import { Video, VideoDetails } from '../../../../shared/models/videos'
diff --git a/server/models/video/sql/shared/video-tables.ts b/server/models/video/sql/shared/video-tables.ts
index 742d19099..75823864d 100644
--- a/server/models/video/sql/shared/video-tables.ts
+++ b/server/models/video/sql/shared/video-tables.ts
@@ -87,7 +87,8 @@ export class VideoTables {
'fps',
'metadataUrl',
'videoStreamingPlaylistId',
- 'videoId'
+ 'videoId',
+ 'storage'
]
}
@@ -102,7 +103,8 @@ export class VideoTables {
'segmentsSha256Url',
'videoId',
'createdAt',
- 'updatedAt'
+ 'updatedAt',
+ 'storage'
])
}
@@ -258,7 +260,8 @@ export class VideoTables {
'originallyPublishedAt',
'channelId',
'createdAt',
- 'updatedAt'
+ 'updatedAt',
+ 'moveJobsRunning'
]
}
}
diff --git a/server/models/video/video-file.ts b/server/models/video/video-file.ts
index 09fc5288b..627c95763 100644
--- a/server/models/video/video-file.ts
+++ b/server/models/video/video-file.ts
@@ -23,9 +23,11 @@ import validator from 'validator'
import { buildRemoteVideoBaseUrl } from '@server/helpers/activitypub'
import { logger } from '@server/helpers/logger'
import { extractVideo } from '@server/helpers/video'
-import { getTorrentFilePath } from '@server/lib/video-paths'
+import { getHLSPublicFileUrl, getWebTorrentPublicFileUrl } from '@server/lib/object-storage'
+import { getFSTorrentFilePath } from '@server/lib/paths'
import { MStreamingPlaylistVideo, MVideo, MVideoWithHost } from '@server/types/models'
import { AttributesOnly } from '@shared/core-utils'
+import { VideoStorage } from '@shared/models'
import {
isVideoFileExtnameValid,
isVideoFileInfoHashValid,
@@ -214,6 +216,11 @@ export class VideoFileModel extends Model
@Column
videoId: number
+ @AllowNull(false)
+ @Default(VideoStorage.FILE_SYSTEM)
+ @Column
+ storage: VideoStorage
+
@BelongsTo(() => VideoModel, {
foreignKey: {
allowNull: true
@@ -273,7 +280,7 @@ export class VideoFileModel extends Model
static async doesOwnedWebTorrentVideoFileExist (filename: string) {
const query = 'SELECT 1 FROM "videoFile" INNER JOIN "video" ON "video"."id" = "videoFile"."videoId" AND "video"."remote" IS FALSE ' +
- 'WHERE "filename" = $filename LIMIT 1'
+ `WHERE "filename" = $filename AND "storage" = ${VideoStorage.FILE_SYSTEM} LIMIT 1`
return doesExist(query, { filename })
}
@@ -450,9 +457,20 @@ export class VideoFileModel extends Model
return !!this.videoStreamingPlaylistId
}
- getFileUrl (video: MVideo) {
- if (!this.Video) this.Video = video as VideoModel
+ getObjectStorageUrl () {
+ if (this.isHLS()) {
+ return getHLSPublicFileUrl(this.fileUrl)
+ }
+ return getWebTorrentPublicFileUrl(this.fileUrl)
+ }
+
+ getFileUrl (video: MVideo) {
+ if (this.storage === VideoStorage.OBJECT_STORAGE) {
+ return this.getObjectStorageUrl()
+ }
+
+ if (!this.Video) this.Video = video as VideoModel
if (video.isOwned()) return WEBSERVER.URL + this.getFileStaticPath(video)
return this.fileUrl
@@ -503,7 +521,7 @@ export class VideoFileModel extends Model
removeTorrent () {
if (!this.torrentFilename) return null
- const torrentPath = getTorrentFilePath(this)
+ const torrentPath = getFSTorrentFilePath(this)
return remove(torrentPath)
.catch(err => logger.warn('Cannot delete torrent %s.', torrentPath, { err }))
}
diff --git a/server/models/video/video-job-info.ts b/server/models/video/video-job-info.ts
new file mode 100644
index 000000000..7c1fe6734
--- /dev/null
+++ b/server/models/video/video-job-info.ts
@@ -0,0 +1,100 @@
+import { Op, QueryTypes, Transaction } from 'sequelize'
+import { AllowNull, BelongsTo, Column, CreatedAt, Default, ForeignKey, IsInt, Model, Table, Unique, UpdatedAt } from 'sequelize-typescript'
+import { AttributesOnly } from '@shared/core-utils'
+import { VideoModel } from './video'
+
+@Table({
+ tableName: 'videoJobInfo',
+ indexes: [
+ {
+ fields: [ 'videoId' ],
+ where: {
+ videoId: {
+ [Op.ne]: null
+ }
+ }
+ }
+ ]
+})
+
+export class VideoJobInfoModel extends Model>> {
+ @CreatedAt
+ createdAt: Date
+
+ @UpdatedAt
+ updatedAt: Date
+
+ @AllowNull(false)
+ @Default(0)
+ @IsInt
+ @Column
+ pendingMove: number
+
+ @AllowNull(false)
+ @Default(0)
+ @IsInt
+ @Column
+ pendingTranscode: number
+
+ @ForeignKey(() => VideoModel)
+ @Unique
+ @Column
+ videoId: number
+
+ @BelongsTo(() => VideoModel, {
+ foreignKey: {
+ allowNull: false
+ },
+ onDelete: 'cascade'
+ })
+ Video: VideoModel
+
+ static load (videoId: number, transaction: Transaction) {
+ const where = {
+ videoId
+ }
+
+ return VideoJobInfoModel.findOne({ where, transaction })
+ }
+
+ static async increaseOrCreate (videoUUID: string, column: 'pendingMove' | 'pendingTranscode'): Promise {
+ const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } }
+
+ const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{ pendingMove: number }>(`
+ INSERT INTO "videoJobInfo" ("videoId", "${column}", "createdAt", "updatedAt")
+ SELECT
+ "video"."id" AS "videoId", 1, NOW(), NOW()
+ FROM
+ "video"
+ WHERE
+ "video"."uuid" = $videoUUID
+ ON CONFLICT ("videoId") DO UPDATE
+ SET
+ "${column}" = "videoJobInfo"."${column}" + 1,
+ "updatedAt" = NOW()
+ RETURNING
+ "${column}"
+ `, options)
+
+ return pendingMove
+ }
+
+ static async decrease (videoUUID: string, column: 'pendingMove' | 'pendingTranscode'): Promise {
+ const options = { type: QueryTypes.SELECT as QueryTypes.SELECT, bind: { videoUUID } }
+
+ const [ { pendingMove } ] = await VideoJobInfoModel.sequelize.query<{ pendingMove: number }>(`
+ UPDATE
+ "videoJobInfo"
+ SET
+ "${column}" = "videoJobInfo"."${column}" - 1,
+ "updatedAt" = NOW()
+ FROM "video"
+ WHERE
+ "video"."id" = "videoJobInfo"."videoId" AND "video"."uuid" = $videoUUID
+ RETURNING
+ "${column}";
+ `, options)
+
+ return pendingMove
+ }
+}
diff --git a/server/models/video/video-streaming-playlist.ts b/server/models/video/video-streaming-playlist.ts
index d591a3134..3e9fd97c7 100644
--- a/server/models/video/video-streaming-playlist.ts
+++ b/server/models/video/video-streaming-playlist.ts
@@ -1,10 +1,25 @@
import * as memoizee from 'memoizee'
import { join } from 'path'
import { Op } from 'sequelize'
-import { AllowNull, BelongsTo, Column, CreatedAt, DataType, ForeignKey, HasMany, Is, Model, Table, UpdatedAt } from 'sequelize-typescript'
+import {
+ AllowNull,
+ BelongsTo,
+ Column,
+ CreatedAt,
+ DataType,
+ Default,
+ ForeignKey,
+ HasMany,
+ Is,
+ Model,
+ Table,
+ UpdatedAt
+} from 'sequelize-typescript'
+import { getHLSPublicFileUrl } from '@server/lib/object-storage'
import { VideoFileModel } from '@server/models/video/video-file'
import { MStreamingPlaylist, MVideo } from '@server/types/models'
import { AttributesOnly } from '@shared/core-utils'
+import { VideoStorage } from '@shared/models'
import { VideoStreamingPlaylistType } from '../../../shared/models/videos/video-streaming-playlist.type'
import { sha1 } from '../../helpers/core-utils'
import { isActivityPubUrlValid } from '../../helpers/custom-validators/activitypub/misc'
@@ -81,6 +96,11 @@ export class VideoStreamingPlaylistModel extends Model VideoModel, {
foreignKey: {
allowNull: false
@@ -185,12 +205,20 @@ export class VideoStreamingPlaylistModel extends Model>> {
})
VideoCaptions: VideoCaptionModel[]
+ @HasOne(() => VideoJobInfoModel, {
+ foreignKey: {
+ name: 'videoId',
+ allowNull: false
+ },
+ onDelete: 'cascade'
+ })
+ VideoJobInfo: VideoJobInfoModel
+
@BeforeDestroy
static async sendDelete (instance: MVideoAccountLight, options) {
if (!instance.isOwned()) return undefined
@@ -1641,9 +1653,10 @@ export class VideoModel extends Model>> {
getMaxQualityResolution () {
const file = this.getMaxQualityFile()
const videoOrPlaylist = file.getVideoOrStreamingPlaylist()
- const originalFilePath = getVideoFilePath(videoOrPlaylist, file)
- return getVideoFileResolution(originalFilePath)
+ return VideoPathManager.Instance.makeAvailableVideoFile(videoOrPlaylist, file, originalFilePath => {
+ return getVideoFileResolution(originalFilePath)
+ })
}
getDescriptionAPIPath () {
@@ -1673,16 +1686,24 @@ export class VideoModel extends Model>> {
}
removeFileAndTorrent (videoFile: MVideoFile, isRedundancy = false) {
- const filePath = getVideoFilePath(this, videoFile, isRedundancy)
+ const filePath = isRedundancy
+ ? VideoPathManager.Instance.getFSRedundancyVideoFilePath(this, videoFile)
+ : VideoPathManager.Instance.getFSVideoFileOutputPath(this, videoFile)
const promises: Promise[] = [ remove(filePath) ]
if (!isRedundancy) promises.push(videoFile.removeTorrent())
+ if (videoFile.storage === VideoStorage.OBJECT_STORAGE) {
+ promises.push(removeWebTorrentObjectStorage(videoFile))
+ }
+
return Promise.all(promises)
}
async removeStreamingPlaylistFiles (streamingPlaylist: MStreamingPlaylist, isRedundancy = false) {
- const directoryPath = getHLSDirectory(this, isRedundancy)
+ const directoryPath = isRedundancy
+ ? getHLSRedundancyDirectory(this)
+ : getHLSDirectory(this)
await remove(directoryPath)
@@ -1698,6 +1719,10 @@ export class VideoModel extends Model>> {
await Promise.all(
streamingPlaylistWithFiles.VideoFiles.map(file => file.removeTorrent())
)
+
+ if (streamingPlaylist.storage === VideoStorage.OBJECT_STORAGE) {
+ await removeHLSObjectStorage(streamingPlaylist, this)
+ }
}
}
@@ -1741,16 +1766,16 @@ export class VideoModel extends Model>> {
this.privacy === VideoPrivacy.INTERNAL
}
- async publishIfNeededAndSave (t: Transaction) {
- if (this.state !== VideoState.PUBLISHED) {
- this.state = VideoState.PUBLISHED
- this.publishedAt = new Date()
- await this.save({ transaction: t })
+ async setNewState (newState: VideoState, transaction: Transaction) {
+ if (this.state === newState) throw new Error('Cannot use same state ' + newState)
- return true
+ this.state = newState
+
+ if (this.state === VideoState.PUBLISHED) {
+ this.publishedAt = new Date()
}
- return false
+ await this.save({ transaction })
}
getBandwidthBits (videoFile: MVideoFile) {
diff --git a/server/tests/api/index.ts b/server/tests/api/index.ts
index b62e2f5f7..19301c0b9 100644
--- a/server/tests/api/index.ts
+++ b/server/tests/api/index.ts
@@ -2,6 +2,7 @@
import './activitypub'
import './check-params'
import './moderation'
+import './object-storage'
import './notifications'
import './redundancy'
import './search'
diff --git a/server/tests/api/live/live-save-replay.ts b/server/tests/api/live/live-save-replay.ts
index 8f1fb78a5..6c4ea90ca 100644
--- a/server/tests/api/live/live-save-replay.ts
+++ b/server/tests/api/live/live-save-replay.ts
@@ -15,7 +15,9 @@ import {
stopFfmpeg,
testFfmpegStreamError,
wait,
- waitJobs
+ waitJobs,
+ waitUntilLivePublishedOnAllServers,
+ waitUntilLiveSavedOnAllServers
} from '@shared/extra-utils'
import { HttpStatusCode, LiveVideoCreate, VideoPrivacy, VideoState } from '@shared/models'
@@ -66,18 +68,6 @@ describe('Save replay setting', function () {
}
}
- async function waitUntilLivePublishedOnAllServers (videoId: string) {
- for (const server of servers) {
- await server.live.waitUntilPublished({ videoId })
- }
- }
-
- async function waitUntilLiveSavedOnAllServers (videoId: string) {
- for (const server of servers) {
- await server.live.waitUntilSaved({ videoId })
- }
- }
-
before(async function () {
this.timeout(120000)
@@ -127,7 +117,7 @@ describe('Save replay setting', function () {
ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID })
- await waitUntilLivePublishedOnAllServers(liveVideoUUID)
+ await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID)
await waitJobs(servers)
@@ -160,7 +150,7 @@ describe('Save replay setting', function () {
ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID })
- await waitUntilLivePublishedOnAllServers(liveVideoUUID)
+ await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID)
await waitJobs(servers)
await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200)
@@ -189,7 +179,7 @@ describe('Save replay setting', function () {
ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID })
- await waitUntilLivePublishedOnAllServers(liveVideoUUID)
+ await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID)
await waitJobs(servers)
await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200)
@@ -224,7 +214,7 @@ describe('Save replay setting', function () {
this.timeout(20000)
ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID })
- await waitUntilLivePublishedOnAllServers(liveVideoUUID)
+ await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID)
await waitJobs(servers)
@@ -237,7 +227,7 @@ describe('Save replay setting', function () {
await stopFfmpeg(ffmpegCommand)
- await waitUntilLiveSavedOnAllServers(liveVideoUUID)
+ await waitUntilLiveSavedOnAllServers(servers, liveVideoUUID)
await waitJobs(servers)
// Live has been transcoded
@@ -268,7 +258,7 @@ describe('Save replay setting', function () {
liveVideoUUID = await createLiveWrapper(true)
ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID })
- await waitUntilLivePublishedOnAllServers(liveVideoUUID)
+ await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID)
await waitJobs(servers)
await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200)
@@ -296,7 +286,7 @@ describe('Save replay setting', function () {
liveVideoUUID = await createLiveWrapper(true)
ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveVideoUUID })
- await waitUntilLivePublishedOnAllServers(liveVideoUUID)
+ await waitUntilLivePublishedOnAllServers(servers, liveVideoUUID)
await waitJobs(servers)
await checkVideosExist(liveVideoUUID, true, HttpStatusCode.OK_200)
diff --git a/server/tests/api/object-storage/index.ts b/server/tests/api/object-storage/index.ts
new file mode 100644
index 000000000..f319d6ef5
--- /dev/null
+++ b/server/tests/api/object-storage/index.ts
@@ -0,0 +1,3 @@
+export * from './live'
+export * from './video-imports'
+export * from './videos'
diff --git a/server/tests/api/object-storage/live.ts b/server/tests/api/object-storage/live.ts
new file mode 100644
index 000000000..d3e6777f2
--- /dev/null
+++ b/server/tests/api/object-storage/live.ts
@@ -0,0 +1,136 @@
+/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
+
+import 'mocha'
+import * as chai from 'chai'
+import { FfmpegCommand } from 'fluent-ffmpeg'
+import {
+ areObjectStorageTestsDisabled,
+ createMultipleServers,
+ doubleFollow,
+ expectStartWith,
+ killallServers,
+ makeRawRequest,
+ ObjectStorageCommand,
+ PeerTubeServer,
+ setAccessTokensToServers,
+ setDefaultVideoChannel,
+ stopFfmpeg,
+ waitJobs,
+ waitUntilLivePublishedOnAllServers,
+ waitUntilLiveSavedOnAllServers
+} from '@shared/extra-utils'
+import { HttpStatusCode, LiveVideoCreate, VideoFile, VideoPrivacy } from '@shared/models'
+
+const expect = chai.expect
+
+async function createLive (server: PeerTubeServer) {
+ const attributes: LiveVideoCreate = {
+ channelId: server.store.channel.id,
+ privacy: VideoPrivacy.PUBLIC,
+ name: 'my super live',
+ saveReplay: true
+ }
+
+ const { uuid } = await server.live.create({ fields: attributes })
+
+ return uuid
+}
+
+async function checkFiles (files: VideoFile[]) {
+ for (const file of files) {
+ expectStartWith(file.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl())
+
+ await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
+ }
+}
+
+describe('Object storage for lives', function () {
+ if (areObjectStorageTestsDisabled()) return
+
+ let ffmpegCommand: FfmpegCommand
+ let servers: PeerTubeServer[]
+ let videoUUID: string
+
+ before(async function () {
+ this.timeout(120000)
+
+ await ObjectStorageCommand.prepareDefaultBuckets()
+
+ servers = await createMultipleServers(2, ObjectStorageCommand.getDefaultConfig())
+
+ await setAccessTokensToServers(servers)
+ await setDefaultVideoChannel(servers)
+ await doubleFollow(servers[0], servers[1])
+
+ await servers[0].config.enableTranscoding()
+ })
+
+ describe('Without live transcoding', async function () {
+
+ before(async function () {
+ await servers[0].config.enableLive({ transcoding: false })
+
+ videoUUID = await createLive(servers[0])
+ })
+
+ it('Should create a live and save the replay on object storage', async function () {
+ this.timeout(220000)
+
+ ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID })
+ await waitUntilLivePublishedOnAllServers(servers, videoUUID)
+
+ await stopFfmpeg(ffmpegCommand)
+
+ await waitUntilLiveSavedOnAllServers(servers, videoUUID)
+ await waitJobs(servers)
+
+ for (const server of servers) {
+ const video = await server.videos.get({ id: videoUUID })
+
+ expect(video.files).to.have.lengthOf(0)
+ expect(video.streamingPlaylists).to.have.lengthOf(1)
+
+ const files = video.streamingPlaylists[0].files
+
+ await checkFiles(files)
+ }
+ })
+ })
+
+ describe('With live transcoding', async function () {
+
+ before(async function () {
+ await servers[0].config.enableLive({ transcoding: true })
+
+ videoUUID = await createLive(servers[0])
+ })
+
+ it('Should import a video and have sent it to object storage', async function () {
+ this.timeout(240000)
+
+ ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID })
+ await waitUntilLivePublishedOnAllServers(servers, videoUUID)
+
+ await stopFfmpeg(ffmpegCommand)
+
+ await waitUntilLiveSavedOnAllServers(servers, videoUUID)
+ await waitJobs(servers)
+
+ for (const server of servers) {
+ const video = await server.videos.get({ id: videoUUID })
+
+ expect(video.files).to.have.lengthOf(0)
+ expect(video.streamingPlaylists).to.have.lengthOf(1)
+
+ const files = video.streamingPlaylists[0].files
+ expect(files).to.have.lengthOf(4)
+
+ await checkFiles(files)
+ }
+ })
+ })
+
+ after(async function () {
+ await killallServers(servers)
+ })
+})
diff --git a/server/tests/api/object-storage/video-imports.ts b/server/tests/api/object-storage/video-imports.ts
new file mode 100644
index 000000000..efc01f550
--- /dev/null
+++ b/server/tests/api/object-storage/video-imports.ts
@@ -0,0 +1,112 @@
+/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
+
+import 'mocha'
+import * as chai from 'chai'
+import {
+ areObjectStorageTestsDisabled,
+ createSingleServer,
+ expectStartWith,
+ FIXTURE_URLS,
+ killallServers,
+ makeRawRequest,
+ ObjectStorageCommand,
+ PeerTubeServer,
+ setAccessTokensToServers,
+ setDefaultVideoChannel,
+ waitJobs
+} from '@shared/extra-utils'
+import { HttpStatusCode, VideoPrivacy } from '@shared/models'
+
+const expect = chai.expect
+
+async function importVideo (server: PeerTubeServer) {
+ const attributes = {
+ name: 'import 2',
+ privacy: VideoPrivacy.PUBLIC,
+ channelId: server.store.channel.id,
+ targetUrl: FIXTURE_URLS.goodVideo720
+ }
+
+ const { video: { uuid } } = await server.imports.importVideo({ attributes })
+
+ return uuid
+}
+
+describe('Object storage for video import', function () {
+ if (areObjectStorageTestsDisabled()) return
+
+ let server: PeerTubeServer
+
+ before(async function () {
+ this.timeout(120000)
+
+ await ObjectStorageCommand.prepareDefaultBuckets()
+
+ server = await createSingleServer(1, ObjectStorageCommand.getDefaultConfig())
+
+ await setAccessTokensToServers([ server ])
+ await setDefaultVideoChannel([ server ])
+
+ await server.config.enableImports()
+ })
+
+ describe('Without transcoding', async function () {
+
+ before(async function () {
+ await server.config.disableTranscoding()
+ })
+
+ it('Should import a video and have sent it to object storage', async function () {
+ this.timeout(120000)
+
+ const uuid = await importVideo(server)
+ await waitJobs(server)
+
+ const video = await server.videos.get({ id: uuid })
+
+ expect(video.files).to.have.lengthOf(1)
+ expect(video.streamingPlaylists).to.have.lengthOf(0)
+
+ const fileUrl = video.files[0].fileUrl
+ expectStartWith(fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl())
+
+ await makeRawRequest(fileUrl, HttpStatusCode.OK_200)
+ })
+ })
+
+ describe('With transcoding', async function () {
+
+ before(async function () {
+ await server.config.enableTranscoding()
+ })
+
+ it('Should import a video and have sent it to object storage', async function () {
+ this.timeout(120000)
+
+ const uuid = await importVideo(server)
+ await waitJobs(server)
+
+ const video = await server.videos.get({ id: uuid })
+
+ expect(video.files).to.have.lengthOf(4)
+ expect(video.streamingPlaylists).to.have.lengthOf(1)
+ expect(video.streamingPlaylists[0].files).to.have.lengthOf(4)
+
+ for (const file of video.files) {
+ expectStartWith(file.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl())
+
+ await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
+ }
+
+ for (const file of video.streamingPlaylists[0].files) {
+ expectStartWith(file.fileUrl, ObjectStorageCommand.getPlaylistBaseUrl())
+
+ await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
+ }
+ })
+ })
+
+ after(async function () {
+ await killallServers([ server ])
+ })
+})
diff --git a/server/tests/api/object-storage/videos.ts b/server/tests/api/object-storage/videos.ts
new file mode 100644
index 000000000..3958bd3d7
--- /dev/null
+++ b/server/tests/api/object-storage/videos.ts
@@ -0,0 +1,391 @@
+/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
+
+import 'mocha'
+import * as chai from 'chai'
+import { merge } from 'lodash'
+import {
+ areObjectStorageTestsDisabled,
+ checkTmpIsEmpty,
+ cleanupTests,
+ createMultipleServers,
+ createSingleServer,
+ doubleFollow,
+ expectStartWith,
+ killallServers,
+ makeRawRequest,
+ MockObjectStorage,
+ ObjectStorageCommand,
+ PeerTubeServer,
+ setAccessTokensToServers,
+ waitJobs,
+ webtorrentAdd
+} from '@shared/extra-utils'
+import { HttpStatusCode, VideoDetails } from '@shared/models'
+
+const expect = chai.expect
+
+async function checkFiles (options: {
+ video: VideoDetails
+
+ baseMockUrl?: string
+
+ playlistBucket: string
+ playlistPrefix?: string
+
+ webtorrentBucket: string
+ webtorrentPrefix?: string
+}) {
+ const {
+ video,
+ playlistBucket,
+ webtorrentBucket,
+ baseMockUrl,
+ playlistPrefix,
+ webtorrentPrefix
+ } = options
+
+ let allFiles = video.files
+
+ for (const file of video.files) {
+ const baseUrl = baseMockUrl
+ ? `${baseMockUrl}/${webtorrentBucket}/`
+ : `http://${webtorrentBucket}.${ObjectStorageCommand.getEndpointHost()}/`
+
+ const prefix = webtorrentPrefix || ''
+ const start = baseUrl + prefix
+
+ expectStartWith(file.fileUrl, start)
+
+ const res = await makeRawRequest(file.fileDownloadUrl, HttpStatusCode.FOUND_302)
+ const location = res.headers['location']
+ expectStartWith(location, start)
+
+ await makeRawRequest(location, HttpStatusCode.OK_200)
+ }
+
+ const hls = video.streamingPlaylists[0]
+
+ if (hls) {
+ allFiles = allFiles.concat(hls.files)
+
+ const baseUrl = baseMockUrl
+ ? `${baseMockUrl}/${playlistBucket}/`
+ : `http://${playlistBucket}.${ObjectStorageCommand.getEndpointHost()}/`
+
+ const prefix = playlistPrefix || ''
+ const start = baseUrl + prefix
+
+ expectStartWith(hls.playlistUrl, start)
+ expectStartWith(hls.segmentsSha256Url, start)
+
+ await makeRawRequest(hls.playlistUrl, HttpStatusCode.OK_200)
+
+ const resSha = await makeRawRequest(hls.segmentsSha256Url, HttpStatusCode.OK_200)
+ expect(JSON.stringify(resSha.body)).to.not.throw
+
+ for (const file of hls.files) {
+ expectStartWith(file.fileUrl, start)
+
+ const res = await makeRawRequest(file.fileDownloadUrl, HttpStatusCode.FOUND_302)
+ const location = res.headers['location']
+ expectStartWith(location, start)
+
+ await makeRawRequest(location, HttpStatusCode.OK_200)
+ }
+ }
+
+ for (const file of allFiles) {
+ const torrent = await webtorrentAdd(file.magnetUri, true)
+
+ expect(torrent.files).to.be.an('array')
+ expect(torrent.files.length).to.equal(1)
+ expect(torrent.files[0].path).to.exist.and.to.not.equal('')
+
+ const res = await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
+ expect(res.body).to.have.length.above(100)
+ }
+
+ return allFiles.map(f => f.fileUrl)
+}
+
+function runTestSuite (options: {
+ playlistBucket: string
+ playlistPrefix?: string
+
+ webtorrentBucket: string
+ webtorrentPrefix?: string
+
+ useMockBaseUrl?: boolean
+
+ maxUploadPart?: string
+}) {
+ const mockObjectStorage = new MockObjectStorage()
+ let baseMockUrl: string
+
+ let servers: PeerTubeServer[]
+
+ let keptUrls: string[] = []
+
+ const uuidsToDelete: string[] = []
+ let deletedUrls: string[] = []
+
+ before(async function () {
+ this.timeout(120000)
+
+ const port = await mockObjectStorage.initialize()
+ baseMockUrl = options.useMockBaseUrl ? `http://localhost:${port}` : undefined
+
+ await ObjectStorageCommand.createBucket(options.playlistBucket)
+ await ObjectStorageCommand.createBucket(options.webtorrentBucket)
+
+ const config = {
+ object_storage: {
+ enabled: true,
+ endpoint: 'http://' + ObjectStorageCommand.getEndpointHost(),
+ region: ObjectStorageCommand.getRegion(),
+
+ credentials: ObjectStorageCommand.getCredentialsConfig(),
+
+ max_upload_part: options.maxUploadPart || '2MB',
+
+ streaming_playlists: {
+ bucket_name: options.playlistBucket,
+ prefix: options.playlistPrefix,
+ base_url: baseMockUrl
+ ? `${baseMockUrl}/${options.playlistBucket}`
+ : undefined
+ },
+
+ videos: {
+ bucket_name: options.webtorrentBucket,
+ prefix: options.webtorrentPrefix,
+ base_url: baseMockUrl
+ ? `${baseMockUrl}/${options.webtorrentBucket}`
+ : undefined
+ }
+ }
+ }
+
+ servers = await createMultipleServers(2, config)
+
+ await setAccessTokensToServers(servers)
+ await doubleFollow(servers[0], servers[1])
+
+ for (const server of servers) {
+ const { uuid } = await server.videos.quickUpload({ name: 'video to keep' })
+ await waitJobs(servers)
+
+ const files = await server.videos.listFiles({ id: uuid })
+ keptUrls = keptUrls.concat(files.map(f => f.fileUrl))
+ }
+ })
+
+ it('Should upload a video and move it to the object storage without transcoding', async function () {
+ this.timeout(20000)
+
+ const { uuid } = await servers[0].videos.quickUpload({ name: 'video 1' })
+ uuidsToDelete.push(uuid)
+
+ await waitJobs(servers)
+
+ for (const server of servers) {
+ const video = await server.videos.get({ id: uuid })
+ const files = await checkFiles({ ...options, video, baseMockUrl })
+
+ deletedUrls = deletedUrls.concat(files)
+ }
+ })
+
+ it('Should upload a video and move it to the object storage with transcoding', async function () {
+ this.timeout(40000)
+
+ const { uuid } = await servers[1].videos.quickUpload({ name: 'video 2' })
+ uuidsToDelete.push(uuid)
+
+ await waitJobs(servers)
+
+ for (const server of servers) {
+ const video = await server.videos.get({ id: uuid })
+ const files = await checkFiles({ ...options, video, baseMockUrl })
+
+ deletedUrls = deletedUrls.concat(files)
+ }
+ })
+
+ it('Should correctly delete the files', async function () {
+ await servers[0].videos.remove({ id: uuidsToDelete[0] })
+ await servers[1].videos.remove({ id: uuidsToDelete[1] })
+
+ await waitJobs(servers)
+
+ for (const url of deletedUrls) {
+ await makeRawRequest(url, HttpStatusCode.NOT_FOUND_404)
+ }
+ })
+
+ it('Should have kept other files', async function () {
+ for (const url of keptUrls) {
+ await makeRawRequest(url, HttpStatusCode.OK_200)
+ }
+ })
+
+ it('Should have an empty tmp directory', async function () {
+ for (const server of servers) {
+ await checkTmpIsEmpty(server)
+ }
+ })
+
+ after(async function () {
+ mockObjectStorage.terminate()
+
+ await cleanupTests(servers)
+ })
+}
+
+describe('Object storage for videos', function () {
+ if (areObjectStorageTestsDisabled()) return
+
+ describe('Test config', function () {
+ let server: PeerTubeServer
+
+ const baseConfig = {
+ object_storage: {
+ enabled: true,
+ endpoint: 'http://' + ObjectStorageCommand.getEndpointHost(),
+ region: ObjectStorageCommand.getRegion(),
+
+ credentials: ObjectStorageCommand.getCredentialsConfig(),
+
+ streaming_playlists: {
+ bucket_name: ObjectStorageCommand.DEFAULT_PLAYLIST_BUCKET
+ },
+
+ videos: {
+ bucket_name: ObjectStorageCommand.DEFAULT_WEBTORRENT_BUCKET
+ }
+ }
+ }
+
+ const badCredentials = {
+ access_key_id: 'AKIAIOSFODNN7EXAMPLE',
+ secret_access_key: 'aJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'
+ }
+
+ it('Should fail with same bucket names without prefix', function (done) {
+ const config = merge({}, baseConfig, {
+ object_storage: {
+ streaming_playlists: {
+ bucket_name: 'aaa'
+ },
+
+ videos: {
+ bucket_name: 'aaa'
+ }
+ }
+ })
+
+ createSingleServer(1, config)
+ .then(() => done(new Error('Did not throw')))
+ .catch(() => done())
+ })
+
+ it('Should fail with bad credentials', async function () {
+ this.timeout(60000)
+
+ await ObjectStorageCommand.prepareDefaultBuckets()
+
+ const config = merge({}, baseConfig, {
+ object_storage: {
+ credentials: badCredentials
+ }
+ })
+
+ server = await createSingleServer(1, config)
+ await setAccessTokensToServers([ server ])
+
+ const { uuid } = await server.videos.quickUpload({ name: 'video' })
+
+ await waitJobs([ server ], true)
+ const video = await server.videos.get({ id: uuid })
+
+ expectStartWith(video.files[0].fileUrl, server.url)
+
+ await killallServers([ server ])
+ })
+
+ it('Should succeed with credentials from env', async function () {
+ this.timeout(60000)
+
+ await ObjectStorageCommand.prepareDefaultBuckets()
+
+ const config = merge({}, baseConfig, {
+ object_storage: {
+ credentials: {
+ access_key_id: '',
+ secret_access_key: ''
+ }
+ }
+ })
+
+ const goodCredentials = ObjectStorageCommand.getCredentialsConfig()
+
+ server = await createSingleServer(1, config, {
+ env: {
+ AWS_ACCESS_KEY_ID: goodCredentials.access_key_id,
+ AWS_SECRET_ACCESS_KEY: goodCredentials.secret_access_key
+ }
+ })
+
+ await setAccessTokensToServers([ server ])
+
+ const { uuid } = await server.videos.quickUpload({ name: 'video' })
+
+ await waitJobs([ server ], true)
+ const video = await server.videos.get({ id: uuid })
+
+ expectStartWith(video.files[0].fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl())
+ })
+
+ after(async function () {
+ await killallServers([ server ])
+ })
+ })
+
+ describe('Test simple object storage', function () {
+ runTestSuite({
+ playlistBucket: 'streaming-playlists',
+ webtorrentBucket: 'videos'
+ })
+ })
+
+ describe('Test object storage with prefix', function () {
+ runTestSuite({
+ playlistBucket: 'mybucket',
+ webtorrentBucket: 'mybucket',
+
+ playlistPrefix: 'streaming-playlists_',
+ webtorrentPrefix: 'webtorrent_'
+ })
+ })
+
+ describe('Test object storage with prefix and base URL', function () {
+ runTestSuite({
+ playlistBucket: 'mybucket',
+ webtorrentBucket: 'mybucket',
+
+ playlistPrefix: 'streaming-playlists_',
+ webtorrentPrefix: 'webtorrent_',
+
+ useMockBaseUrl: true
+ })
+ })
+
+ describe('Test object storage with small upload part', function () {
+ runTestSuite({
+ playlistBucket: 'streaming-playlists',
+ webtorrentBucket: 'videos',
+
+ maxUploadPart: '5KB'
+ })
+ })
+})
diff --git a/server/tests/api/redundancy/redundancy.ts b/server/tests/api/redundancy/redundancy.ts
index e1a12f5f8..3400b1d9a 100644
--- a/server/tests/api/redundancy/redundancy.ts
+++ b/server/tests/api/redundancy/redundancy.ts
@@ -207,14 +207,14 @@ async function check1PlaylistRedundancies (videoUUID?: string) {
expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
}
- const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls'
- const baseUrlSegment = servers[0].url + '/static/redundancy/hls'
+ const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID
+ const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID
const video = await servers[0].videos.get({ id: videoUUID })
const hlsPlaylist = video.streamingPlaylists[0]
for (const resolution of [ 240, 360, 480, 720 ]) {
- await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist })
+ await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist })
}
const { hlsFilenames } = await ensureSameFilenames(videoUUID)
diff --git a/server/tests/api/videos/video-hls.ts b/server/tests/api/videos/video-hls.ts
index 961f0e617..2c829f532 100644
--- a/server/tests/api/videos/video-hls.ts
+++ b/server/tests/api/videos/video-hls.ts
@@ -5,6 +5,7 @@ import * as chai from 'chai'
import { basename, join } from 'path'
import { removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils'
import {
+ areObjectStorageTestsDisabled,
checkDirectoryIsEmpty,
checkResolutionsInMasterPlaylist,
checkSegmentHash,
@@ -12,7 +13,9 @@ import {
cleanupTests,
createMultipleServers,
doubleFollow,
+ expectStartWith,
makeRawRequest,
+ ObjectStorageCommand,
PeerTubeServer,
setAccessTokensToServers,
waitJobs,
@@ -23,8 +26,19 @@ import { DEFAULT_AUDIO_RESOLUTION } from '../../../initializers/constants'
const expect = chai.expect
-async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, hlsOnly: boolean, resolutions = [ 240, 360, 480, 720 ]) {
- for (const server of servers) {
+async function checkHlsPlaylist (options: {
+ servers: PeerTubeServer[]
+ videoUUID: string
+ hlsOnly: boolean
+
+ resolutions?: number[]
+ objectStorageBaseUrl: string
+}) {
+ const { videoUUID, hlsOnly, objectStorageBaseUrl } = options
+
+ const resolutions = options.resolutions ?? [ 240, 360, 480, 720 ]
+
+ for (const server of options.servers) {
const videoDetails = await server.videos.get({ id: videoUUID })
const baseUrl = `http://${videoDetails.account.host}`
@@ -48,9 +62,15 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h
expect(file.torrentUrl).to.match(
new RegExp(`http://${server.host}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}-hls.torrent`)
)
- expect(file.fileUrl).to.match(
- new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`)
- )
+
+ if (objectStorageBaseUrl) {
+ expectStartWith(file.fileUrl, objectStorageBaseUrl)
+ } else {
+ expect(file.fileUrl).to.match(
+ new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`)
+ )
+ }
+
expect(file.resolution.label).to.equal(resolution + 'p')
await makeRawRequest(file.torrentUrl, HttpStatusCode.OK_200)
@@ -80,9 +100,11 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h
const file = hlsFiles.find(f => f.resolution.id === resolution)
const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8'
- const subPlaylist = await server.streamingPlaylists.get({
- url: `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}`
- })
+ const url = objectStorageBaseUrl
+ ? `${objectStorageBaseUrl}hls_${videoUUID}/${playlistName}`
+ : `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}`
+
+ const subPlaylist = await server.streamingPlaylists.get({ url })
expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`))
expect(subPlaylist).to.contain(basename(file.fileUrl))
@@ -90,14 +112,15 @@ async function checkHlsPlaylist (servers: PeerTubeServer[], videoUUID: string, h
}
{
- const baseUrlAndPath = baseUrl + '/static/streaming-playlists/hls'
+ const baseUrlAndPath = objectStorageBaseUrl
+ ? objectStorageBaseUrl + 'hls_' + videoUUID
+ : baseUrl + '/static/streaming-playlists/hls/' + videoUUID
for (const resolution of resolutions) {
await checkSegmentHash({
server,
baseUrlPlaylist: baseUrlAndPath,
baseUrlSegment: baseUrlAndPath,
- videoUUID,
resolution,
hlsPlaylist
})
@@ -111,7 +134,7 @@ describe('Test HLS videos', function () {
let videoUUID = ''
let videoAudioUUID = ''
- function runTestSuite (hlsOnly: boolean) {
+ function runTestSuite (hlsOnly: boolean, objectStorageBaseUrl?: string) {
it('Should upload a video and transcode it to HLS', async function () {
this.timeout(120000)
@@ -121,7 +144,7 @@ describe('Test HLS videos', function () {
await waitJobs(servers)
- await checkHlsPlaylist(servers, videoUUID, hlsOnly)
+ await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl })
})
it('Should upload an audio file and transcode it to HLS', async function () {
@@ -132,7 +155,13 @@ describe('Test HLS videos', function () {
await waitJobs(servers)
- await checkHlsPlaylist(servers, videoAudioUUID, hlsOnly, [ DEFAULT_AUDIO_RESOLUTION, 360, 240 ])
+ await checkHlsPlaylist({
+ servers,
+ videoUUID: videoAudioUUID,
+ hlsOnly,
+ resolutions: [ DEFAULT_AUDIO_RESOLUTION, 360, 240 ],
+ objectStorageBaseUrl
+ })
})
it('Should update the video', async function () {
@@ -142,7 +171,7 @@ describe('Test HLS videos', function () {
await waitJobs(servers)
- await checkHlsPlaylist(servers, videoUUID, hlsOnly)
+ await checkHlsPlaylist({ servers, videoUUID, hlsOnly, objectStorageBaseUrl })
})
it('Should delete videos', async function () {
@@ -229,6 +258,22 @@ describe('Test HLS videos', function () {
runTestSuite(true)
})
+ describe('With object storage enabled', function () {
+ if (areObjectStorageTestsDisabled()) return
+
+ before(async function () {
+ this.timeout(120000)
+
+ const configOverride = ObjectStorageCommand.getDefaultConfig()
+ await ObjectStorageCommand.prepareDefaultBuckets()
+
+ await servers[0].kill()
+ await servers[0].run(configOverride)
+ })
+
+ runTestSuite(true, ObjectStorageCommand.getPlaylistBaseUrl())
+ })
+
after(async function () {
await cleanupTests(servers)
})
diff --git a/server/tests/cli/create-import-video-file-job.ts b/server/tests/cli/create-import-video-file-job.ts
index bddcff5e7..9f1b57a2e 100644
--- a/server/tests/cli/create-import-video-file-job.ts
+++ b/server/tests/cli/create-import-video-file-job.ts
@@ -2,8 +2,19 @@
import 'mocha'
import * as chai from 'chai'
-import { cleanupTests, createMultipleServers, doubleFollow, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/extra-utils'
-import { VideoFile } from '@shared/models'
+import {
+ areObjectStorageTestsDisabled,
+ cleanupTests,
+ createMultipleServers,
+ doubleFollow,
+ expectStartWith,
+ makeRawRequest,
+ ObjectStorageCommand,
+ PeerTubeServer,
+ setAccessTokensToServers,
+ waitJobs
+} from '@shared/extra-utils'
+import { HttpStatusCode, VideoDetails, VideoFile } from '@shared/models'
const expect = chai.expect
@@ -17,22 +28,35 @@ function assertVideoProperties (video: VideoFile, resolution: number, extname: s
if (size) expect(video.size).to.equal(size)
}
-describe('Test create import video jobs', function () {
- this.timeout(60000)
+async function checkFiles (video: VideoDetails, objectStorage: boolean) {
+ for (const file of video.files) {
+ if (objectStorage) expectStartWith(file.fileUrl, ObjectStorageCommand.getWebTorrentBaseUrl())
- let servers: PeerTubeServer[] = []
+ await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
+ }
+}
+
+function runTests (objectStorage: boolean) {
let video1UUID: string
let video2UUID: string
+ let servers: PeerTubeServer[] = []
+
before(async function () {
this.timeout(90000)
+ const config = objectStorage
+ ? ObjectStorageCommand.getDefaultConfig()
+ : {}
+
// Run server 2 to have transcoding enabled
- servers = await createMultipleServers(2)
+ servers = await createMultipleServers(2, config)
await setAccessTokensToServers(servers)
await doubleFollow(servers[0], servers[1])
+ if (objectStorage) await ObjectStorageCommand.prepareDefaultBuckets()
+
// Upload two videos for our needs
{
const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video1' } })
@@ -44,7 +68,6 @@ describe('Test create import video jobs', function () {
video2UUID = uuid
}
- // Transcoding
await waitJobs(servers)
})
@@ -65,6 +88,8 @@ describe('Test create import video jobs', function () {
const [ originalVideo, transcodedVideo ] = videoDetails.files
assertVideoProperties(originalVideo, 720, 'webm', 218910)
assertVideoProperties(transcodedVideo, 480, 'webm', 69217)
+
+ await checkFiles(videoDetails, objectStorage)
}
})
@@ -87,6 +112,8 @@ describe('Test create import video jobs', function () {
assertVideoProperties(transcodedVideo420, 480, 'mp4')
assertVideoProperties(transcodedVideo320, 360, 'mp4')
assertVideoProperties(transcodedVideo240, 240, 'mp4')
+
+ await checkFiles(videoDetails, objectStorage)
}
})
@@ -107,10 +134,25 @@ describe('Test create import video jobs', function () {
const [ video720, video480 ] = videoDetails.files
assertVideoProperties(video720, 720, 'webm', 942961)
assertVideoProperties(video480, 480, 'webm', 69217)
+
+ await checkFiles(videoDetails, objectStorage)
}
})
after(async function () {
await cleanupTests(servers)
})
+}
+
+describe('Test create import video jobs', function () {
+
+ describe('On filesystem', function () {
+ runTests(false)
+ })
+
+ describe('On object storage', function () {
+ if (areObjectStorageTestsDisabled()) return
+
+ runTests(true)
+ })
})
diff --git a/server/tests/cli/create-transcoding-job.ts b/server/tests/cli/create-transcoding-job.ts
index df787ccdc..3313a492f 100644
--- a/server/tests/cli/create-transcoding-job.ts
+++ b/server/tests/cli/create-transcoding-job.ts
@@ -2,10 +2,15 @@
import 'mocha'
import * as chai from 'chai'
+import { HttpStatusCode, VideoFile } from '@shared/models'
import {
+ areObjectStorageTestsDisabled,
cleanupTests,
createMultipleServers,
doubleFollow,
+ expectStartWith,
+ makeRawRequest,
+ ObjectStorageCommand,
PeerTubeServer,
setAccessTokensToServers,
waitJobs
@@ -13,39 +18,39 @@ import {
const expect = chai.expect
-describe('Test create transcoding jobs', function () {
+async function checkFilesInObjectStorage (files: VideoFile[], type: 'webtorrent' | 'playlist') {
+ for (const file of files) {
+ const shouldStartWith = type === 'webtorrent'
+ ? ObjectStorageCommand.getWebTorrentBaseUrl()
+ : ObjectStorageCommand.getPlaylistBaseUrl()
+
+ expectStartWith(file.fileUrl, shouldStartWith)
+
+ await makeRawRequest(file.fileUrl, HttpStatusCode.OK_200)
+ }
+}
+
+function runTests (objectStorage: boolean) {
let servers: PeerTubeServer[] = []
const videosUUID: string[] = []
- const config = {
- transcoding: {
- enabled: false,
- resolutions: {
- '240p': true,
- '360p': true,
- '480p': true,
- '720p': true,
- '1080p': true,
- '1440p': true,
- '2160p': true
- },
- hls: {
- enabled: false
- }
- }
- }
-
before(async function () {
this.timeout(60000)
+ const config = objectStorage
+ ? ObjectStorageCommand.getDefaultConfig()
+ : {}
+
// Run server 2 to have transcoding enabled
- servers = await createMultipleServers(2)
+ servers = await createMultipleServers(2, config)
await setAccessTokensToServers(servers)
- await servers[0].config.updateCustomSubConfig({ newConfig: config })
+ await servers[0].config.disableTranscoding()
await doubleFollow(servers[0], servers[1])
+ if (objectStorage) await ObjectStorageCommand.prepareDefaultBuckets()
+
for (let i = 1; i <= 5; i++) {
const { uuid } = await servers[0].videos.upload({ attributes: { name: 'video' + i } })
videosUUID.push(uuid)
@@ -81,27 +86,29 @@ describe('Test create transcoding jobs', function () {
let infoHashes: { [id: number]: string }
for (const video of data) {
- const videoDetail = await server.videos.get({ id: video.uuid })
+ const videoDetails = await server.videos.get({ id: video.uuid })
if (video.uuid === videosUUID[1]) {
- expect(videoDetail.files).to.have.lengthOf(4)
- expect(videoDetail.streamingPlaylists).to.have.lengthOf(0)
+ expect(videoDetails.files).to.have.lengthOf(4)
+ expect(videoDetails.streamingPlaylists).to.have.lengthOf(0)
+
+ if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
if (!infoHashes) {
infoHashes = {}
- for (const file of videoDetail.files) {
+ for (const file of videoDetails.files) {
infoHashes[file.resolution.id.toString()] = file.magnetUri
}
} else {
for (const resolution of Object.keys(infoHashes)) {
- const file = videoDetail.files.find(f => f.resolution.id.toString() === resolution)
+ const file = videoDetails.files.find(f => f.resolution.id.toString() === resolution)
expect(file.magnetUri).to.equal(infoHashes[resolution])
}
}
} else {
- expect(videoDetail.files).to.have.lengthOf(1)
- expect(videoDetail.streamingPlaylists).to.have.lengthOf(0)
+ expect(videoDetails.files).to.have.lengthOf(1)
+ expect(videoDetails.streamingPlaylists).to.have.lengthOf(0)
}
}
}
@@ -125,6 +132,8 @@ describe('Test create transcoding jobs', function () {
expect(videoDetails.files[1].resolution.id).to.equal(480)
expect(videoDetails.streamingPlaylists).to.have.lengthOf(0)
+
+ if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
}
})
@@ -139,11 +148,15 @@ describe('Test create transcoding jobs', function () {
const videoDetails = await server.videos.get({ id: videosUUID[2] })
expect(videoDetails.files).to.have.lengthOf(1)
+ if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
+
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
const files = videoDetails.streamingPlaylists[0].files
expect(files).to.have.lengthOf(1)
expect(files[0].resolution.id).to.equal(480)
+
+ if (objectStorage) await checkFilesInObjectStorage(files, 'playlist')
}
})
@@ -160,6 +173,8 @@ describe('Test create transcoding jobs', function () {
const files = videoDetails.streamingPlaylists[0].files
expect(files).to.have.lengthOf(1)
expect(files[0].resolution.id).to.equal(480)
+
+ if (objectStorage) await checkFilesInObjectStorage(files, 'playlist')
}
})
@@ -178,15 +193,15 @@ describe('Test create transcoding jobs', function () {
const files = videoDetails.streamingPlaylists[0].files
expect(files).to.have.lengthOf(4)
+
+ if (objectStorage) await checkFilesInObjectStorage(files, 'playlist')
}
})
it('Should optimize the video file and generate HLS videos if enabled in config', async function () {
this.timeout(120000)
- config.transcoding.hls.enabled = true
- await servers[0].config.updateCustomSubConfig({ newConfig: config })
-
+ await servers[0].config.enableTranscoding()
await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[4]}`)
await waitJobs(servers)
@@ -197,10 +212,28 @@ describe('Test create transcoding jobs', function () {
expect(videoDetails.files).to.have.lengthOf(4)
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(4)
+
+ if (objectStorage) {
+ await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
+ await checkFilesInObjectStorage(videoDetails.streamingPlaylists[0].files, 'playlist')
+ }
}
})
after(async function () {
await cleanupTests(servers)
})
+}
+
+describe('Test create transcoding jobs', function () {
+
+ describe('On filesystem', function () {
+ runTests(false)
+ })
+
+ describe('On object storage', function () {
+ if (areObjectStorageTestsDisabled()) return
+
+ runTests(true)
+ })
})
diff --git a/server/tests/helpers/request.ts b/server/tests/helpers/request.ts
index 7f7873df3..c9a2eb831 100644
--- a/server/tests/helpers/request.ts
+++ b/server/tests/helpers/request.ts
@@ -13,7 +13,7 @@ describe('Request helpers', function () {
it('Should throw an error when the bytes limit is exceeded for request', async function () {
try {
- await doRequest(FIXTURE_URLS.video4K, { bodyKBLimit: 3 })
+ await doRequest(FIXTURE_URLS.file4K, { bodyKBLimit: 3 })
} catch {
return
}
@@ -23,7 +23,7 @@ describe('Request helpers', function () {
it('Should throw an error when the bytes limit is exceeded for request and save file', async function () {
try {
- await doRequestAndSaveToFile(FIXTURE_URLS.video4K, destPath1, { bodyKBLimit: 3 })
+ await doRequestAndSaveToFile(FIXTURE_URLS.file4K, destPath1, { bodyKBLimit: 3 })
} catch {
await wait(500)
@@ -35,8 +35,8 @@ describe('Request helpers', function () {
})
it('Should succeed if the file is below the limit', async function () {
- await doRequest(FIXTURE_URLS.video4K, { bodyKBLimit: 5 })
- await doRequestAndSaveToFile(FIXTURE_URLS.video4K, destPath2, { bodyKBLimit: 5 })
+ await doRequest(FIXTURE_URLS.file4K, { bodyKBLimit: 5 })
+ await doRequestAndSaveToFile(FIXTURE_URLS.file4K, destPath2, { bodyKBLimit: 5 })
expect(await pathExists(destPath2)).to.be.true
})
diff --git a/shared/extra-utils/miscs/checks.ts b/shared/extra-utils/miscs/checks.ts
index 7fc92f804..aa2c8e8fa 100644
--- a/shared/extra-utils/miscs/checks.ts
+++ b/shared/extra-utils/miscs/checks.ts
@@ -16,6 +16,10 @@ function dateIsValid (dateString: string, interval = 300000) {
return Math.abs(now.getTime() - dateToCheck.getTime()) <= interval
}
+function expectStartWith (str: string, start: string) {
+ expect(str.startsWith(start), `${str} does not start with ${start}`).to.be.true
+}
+
async function testImage (url: string, imageName: string, imagePath: string, extension = '.jpg') {
const res = await makeGetRequest({
url,
@@ -42,5 +46,6 @@ async function testFileExistsOrNot (server: PeerTubeServer, directory: string, f
export {
dateIsValid,
testImage,
- testFileExistsOrNot
+ testFileExistsOrNot,
+ expectStartWith
}
diff --git a/shared/extra-utils/miscs/tests.ts b/shared/extra-utils/miscs/tests.ts
index 3dfb2487e..dd86041fe 100644
--- a/shared/extra-utils/miscs/tests.ts
+++ b/shared/extra-utils/miscs/tests.ts
@@ -28,7 +28,9 @@ const FIXTURE_URLS = {
badVideo: 'https://download.cpy.re/peertube/bad_video.mp4',
goodVideo: 'https://download.cpy.re/peertube/good_video.mp4',
- video4K: 'https://download.cpy.re/peertube/4k_file.txt'
+ goodVideo720: 'https://download.cpy.re/peertube/good_video_720.mp4',
+
+ file4K: 'https://download.cpy.re/peertube/4k_file.txt'
}
function parallelTests () {
@@ -42,7 +44,15 @@ function isGithubCI () {
function areHttpImportTestsDisabled () {
const disabled = process.env.DISABLE_HTTP_IMPORT_TESTS === 'true'
- if (disabled) console.log('Import tests are disabled')
+ if (disabled) console.log('DISABLE_HTTP_IMPORT_TESTS env set to "true" so import tests are disabled')
+
+ return disabled
+}
+
+function areObjectStorageTestsDisabled () {
+ const disabled = process.env.ENABLE_OBJECT_STORAGE_TESTS !== 'true'
+
+ if (disabled) console.log('ENABLE_OBJECT_STORAGE_TESTS env is not set to "true" so object storage tests are disabled')
return disabled
}
@@ -89,6 +99,7 @@ export {
buildAbsoluteFixturePath,
getFileSize,
buildRequestStub,
+ areObjectStorageTestsDisabled,
wait,
root
}
diff --git a/shared/extra-utils/mock-servers/index.ts b/shared/extra-utils/mock-servers/index.ts
index 0ec07f685..93c00c788 100644
--- a/shared/extra-utils/mock-servers/index.ts
+++ b/shared/extra-utils/mock-servers/index.ts
@@ -2,3 +2,4 @@ export * from './mock-email'
export * from './mock-instances-index'
export * from './mock-joinpeertube-versions'
export * from './mock-plugin-blocklist'
+export * from './mock-object-storage'
diff --git a/shared/extra-utils/mock-servers/mock-object-storage.ts b/shared/extra-utils/mock-servers/mock-object-storage.ts
new file mode 100644
index 000000000..19ea7c87c
--- /dev/null
+++ b/shared/extra-utils/mock-servers/mock-object-storage.ts
@@ -0,0 +1,42 @@
+import * as express from 'express'
+import got, { RequestError } from 'got'
+import { Server } from 'http'
+import { pipeline } from 'stream'
+import { randomInt } from '@shared/core-utils'
+import { ObjectStorageCommand } from '../server'
+
+export class MockObjectStorage {
+ private server: Server
+
+ initialize () {
+ return new Promise(res => {
+ const app = express()
+
+ app.get('/:bucketName/:path(*)', (req: express.Request, res: express.Response, next: express.NextFunction) => {
+ const url = `http://${req.params.bucketName}.${ObjectStorageCommand.getEndpointHost()}/${req.params.path}`
+
+ if (process.env.DEBUG) {
+ console.log('Receiving request on mocked server %s.', req.url)
+ console.log('Proxifying request to %s', url)
+ }
+
+ return pipeline(
+ got.stream(url, { throwHttpErrors: false }),
+ res,
+ (err: RequestError) => {
+ if (!err) return
+
+ console.error('Pipeline failed.', err)
+ }
+ )
+ })
+
+ const port = 42301 + randomInt(1, 100)
+ this.server = app.listen(port, () => res(port))
+ })
+ }
+
+ terminate () {
+ if (this.server) this.server.close()
+ }
+}
diff --git a/shared/extra-utils/requests/requests.ts b/shared/extra-utils/requests/requests.ts
index 70f790222..e3ecd1af2 100644
--- a/shared/extra-utils/requests/requests.ts
+++ b/shared/extra-utils/requests/requests.ts
@@ -121,6 +121,20 @@ function unwrapText (test: request.Test): Promise {
return test.then(res => res.text)
}
+function unwrapBodyOrDecodeToJSON (test: request.Test): Promise {
+ return test.then(res => {
+ if (res.body instanceof Buffer) {
+ return JSON.parse(new TextDecoder().decode(res.body))
+ }
+
+ return res.body
+ })
+}
+
+function unwrapTextOrDecode (test: request.Test): Promise {
+ return test.then(res => res.text || new TextDecoder().decode(res.body))
+}
+
// ---------------------------------------------------------------------------
export {
@@ -134,6 +148,8 @@ export {
makeRawRequest,
makeActivityPubGetRequest,
unwrapBody,
+ unwrapTextOrDecode,
+ unwrapBodyOrDecodeToJSON,
unwrapText
}
diff --git a/shared/extra-utils/server/config-command.ts b/shared/extra-utils/server/config-command.ts
index 11148aa46..51d04fa63 100644
--- a/shared/extra-utils/server/config-command.ts
+++ b/shared/extra-utils/server/config-command.ts
@@ -18,6 +18,70 @@ export class ConfigCommand extends AbstractCommand {
}
}
+ enableImports () {
+ return this.updateExistingSubConfig({
+ newConfig: {
+ import: {
+ videos: {
+ http: {
+ enabled: true
+ },
+
+ torrent: {
+ enabled: true
+ }
+ }
+ }
+ }
+ })
+ }
+
+ enableLive (options: {
+ allowReplay?: boolean
+ transcoding?: boolean
+ } = {}) {
+ return this.updateExistingSubConfig({
+ newConfig: {
+ live: {
+ enabled: true,
+ allowReplay: options.allowReplay ?? true,
+ transcoding: {
+ enabled: options.transcoding ?? true,
+ resolutions: ConfigCommand.getCustomConfigResolutions(true)
+ }
+ }
+ }
+ })
+ }
+
+ disableTranscoding () {
+ return this.updateExistingSubConfig({
+ newConfig: {
+ transcoding: {
+ enabled: false
+ }
+ }
+ })
+ }
+
+ enableTranscoding (webtorrent = true, hls = true) {
+ return this.updateExistingSubConfig({
+ newConfig: {
+ transcoding: {
+ enabled: true,
+ resolutions: ConfigCommand.getCustomConfigResolutions(true),
+
+ webtorrent: {
+ enabled: webtorrent
+ },
+ hls: {
+ enabled: hls
+ }
+ }
+ }
+ })
+ }
+
getConfig (options: OverrideCommandOptions = {}) {
const path = '/api/v1/config'
@@ -81,6 +145,14 @@ export class ConfigCommand extends AbstractCommand {
})
}
+ async updateExistingSubConfig (options: OverrideCommandOptions & {
+ newConfig: DeepPartial
+ }) {
+ const existing = await this.getCustomConfig(options)
+
+ return this.updateCustomConfig({ ...options, newCustomConfig: merge({}, existing, options.newConfig) })
+ }
+
updateCustomSubConfig (options: OverrideCommandOptions & {
newConfig: DeepPartial
}) {
diff --git a/shared/extra-utils/server/index.ts b/shared/extra-utils/server/index.ts
index 9055dfc57..92ff7a0f9 100644
--- a/shared/extra-utils/server/index.ts
+++ b/shared/extra-utils/server/index.ts
@@ -6,6 +6,7 @@ export * from './follows-command'
export * from './follows'
export * from './jobs'
export * from './jobs-command'
+export * from './object-storage-command'
export * from './plugins-command'
export * from './plugins'
export * from './redundancy-command'
diff --git a/shared/extra-utils/server/jobs-command.ts b/shared/extra-utils/server/jobs-command.ts
index c4eb12dc2..91771c176 100644
--- a/shared/extra-utils/server/jobs-command.ts
+++ b/shared/extra-utils/server/jobs-command.ts
@@ -5,6 +5,16 @@ import { AbstractCommand, OverrideCommandOptions } from '../shared'
export class JobsCommand extends AbstractCommand {
+ async getLatest (options: OverrideCommandOptions & {
+ jobType: JobType
+ }) {
+ const { data } = await this.getJobsList({ ...options, start: 0, count: 1, sort: '-createdAt' })
+
+ if (data.length === 0) return undefined
+
+ return data[0]
+ }
+
getJobsList (options: OverrideCommandOptions & {
state?: JobState
jobType?: JobType
diff --git a/shared/extra-utils/server/jobs.ts b/shared/extra-utils/server/jobs.ts
index 64a0353eb..27104bfdf 100644
--- a/shared/extra-utils/server/jobs.ts
+++ b/shared/extra-utils/server/jobs.ts
@@ -3,7 +3,7 @@ import { JobState } from '../../models'
import { wait } from '../miscs'
import { PeerTubeServer } from './server'
-async function waitJobs (serversArg: PeerTubeServer[] | PeerTubeServer) {
+async function waitJobs (serversArg: PeerTubeServer[] | PeerTubeServer, skipDelayed = false) {
const pendingJobWait = process.env.NODE_PENDING_JOB_WAIT
? parseInt(process.env.NODE_PENDING_JOB_WAIT, 10)
: 250
@@ -13,7 +13,9 @@ async function waitJobs (serversArg: PeerTubeServer[] | PeerTubeServer) {
if (Array.isArray(serversArg) === false) servers = [ serversArg as PeerTubeServer ]
else servers = serversArg as PeerTubeServer[]
- const states: JobState[] = [ 'waiting', 'active', 'delayed' ]
+ const states: JobState[] = [ 'waiting', 'active' ]
+ if (!skipDelayed) states.push('delayed')
+
const repeatableJobs = [ 'videos-views', 'activitypub-cleaner' ]
let pendingRequests: boolean
diff --git a/shared/extra-utils/server/object-storage-command.ts b/shared/extra-utils/server/object-storage-command.ts
new file mode 100644
index 000000000..b4de8f4cb
--- /dev/null
+++ b/shared/extra-utils/server/object-storage-command.ts
@@ -0,0 +1,77 @@
+
+import { HttpStatusCode } from '@shared/models'
+import { makePostBodyRequest } from '../requests'
+import { AbstractCommand } from '../shared'
+
+export class ObjectStorageCommand extends AbstractCommand {
+ static readonly DEFAULT_PLAYLIST_BUCKET = 'streaming-playlists'
+ static readonly DEFAULT_WEBTORRENT_BUCKET = 'videos'
+
+ static getDefaultConfig () {
+ return {
+ object_storage: {
+ enabled: true,
+ endpoint: 'http://' + this.getEndpointHost(),
+ region: this.getRegion(),
+
+ credentials: this.getCredentialsConfig(),
+
+ streaming_playlists: {
+ bucket_name: this.DEFAULT_PLAYLIST_BUCKET
+ },
+
+ videos: {
+ bucket_name: this.DEFAULT_WEBTORRENT_BUCKET
+ }
+ }
+ }
+ }
+
+ static getCredentialsConfig () {
+ return {
+ access_key_id: 'AKIAIOSFODNN7EXAMPLE',
+ secret_access_key: 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'
+ }
+ }
+
+ static getEndpointHost () {
+ return 'localhost:9444'
+ }
+
+ static getRegion () {
+ return 'us-east-1'
+ }
+
+ static getWebTorrentBaseUrl () {
+ return `http://${this.DEFAULT_WEBTORRENT_BUCKET}.${this.getEndpointHost()}/`
+ }
+
+ static getPlaylistBaseUrl () {
+ return `http://${this.DEFAULT_PLAYLIST_BUCKET}.${this.getEndpointHost()}/`
+ }
+
+ static async prepareDefaultBuckets () {
+ await this.createBucket(this.DEFAULT_PLAYLIST_BUCKET)
+ await this.createBucket(this.DEFAULT_WEBTORRENT_BUCKET)
+ }
+
+ static async createBucket (name: string) {
+ await makePostBodyRequest({
+ url: this.getEndpointHost(),
+ path: '/ui/' + name + '?delete',
+ expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307
+ })
+
+ await makePostBodyRequest({
+ url: this.getEndpointHost(),
+ path: '/ui/' + name + '?create',
+ expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307
+ })
+
+ await makePostBodyRequest({
+ url: this.getEndpointHost(),
+ path: '/ui/' + name + '?make-public',
+ expectedStatus: HttpStatusCode.TEMPORARY_REDIRECT_307
+ })
+ }
+}
diff --git a/shared/extra-utils/server/server.ts b/shared/extra-utils/server/server.ts
index 3c335b8e4..bc5e1cd5f 100644
--- a/shared/extra-utils/server/server.ts
+++ b/shared/extra-utils/server/server.ts
@@ -38,11 +38,13 @@ import { PluginsCommand } from './plugins-command'
import { RedundancyCommand } from './redundancy-command'
import { ServersCommand } from './servers-command'
import { StatsCommand } from './stats-command'
+import { ObjectStorageCommand } from './object-storage-command'
export type RunServerOptions = {
hideLogs?: boolean
nodeArgs?: string[]
peertubeArgs?: string[]
+ env?: { [ id: string ]: string }
}
export class PeerTubeServer {
@@ -121,6 +123,7 @@ export class PeerTubeServer {
servers?: ServersCommand
login?: LoginCommand
users?: UsersCommand
+ objectStorage?: ObjectStorageCommand
videos?: VideosCommand
constructor (options: { serverNumber: number } | { url: string }) {
@@ -202,6 +205,10 @@ export class PeerTubeServer {
env['NODE_APP_INSTANCE'] = this.internalServerNumber.toString()
env['NODE_CONFIG'] = JSON.stringify(configOverride)
+ if (options.env) {
+ Object.assign(env, options.env)
+ }
+
const forkOptions = {
silent: true,
env,
@@ -209,10 +216,17 @@ export class PeerTubeServer {
execArgv: options.nodeArgs || []
}
- return new Promise(res => {
+ return new Promise((res, rej) => {
const self = this
this.app = fork(join(root(), 'dist', 'server.js'), options.peertubeArgs || [], forkOptions)
+
+ const onExit = function () {
+ return rej(new Error('Process exited'))
+ }
+
+ this.app.on('exit', onExit)
+
this.app.stdout.on('data', function onStdout (data) {
let dontContinue = false
@@ -241,6 +255,7 @@ export class PeerTubeServer {
console.log(data.toString())
} else {
self.app.stdout.removeListener('data', onStdout)
+ self.app.removeListener('exit', onExit)
}
process.on('exit', () => {
@@ -365,5 +380,6 @@ export class PeerTubeServer {
this.login = new LoginCommand(this)
this.users = new UsersCommand(this)
this.videos = new VideosCommand(this)
+ this.objectStorage = new ObjectStorageCommand(this)
}
}
diff --git a/shared/extra-utils/server/servers.ts b/shared/extra-utils/server/servers.ts
index f0622feb0..21ab9405b 100644
--- a/shared/extra-utils/server/servers.ts
+++ b/shared/extra-utils/server/servers.ts
@@ -10,11 +10,11 @@ async function createSingleServer (serverNumber: number, configOverride?: Object
return server
}
-function createMultipleServers (totalServers: number, configOverride?: Object) {
+function createMultipleServers (totalServers: number, configOverride?: Object, options: RunServerOptions = {}) {
const serverPromises: Promise[] = []
for (let i = 1; i <= totalServers; i++) {
- serverPromises.push(createSingleServer(i, configOverride))
+ serverPromises.push(createSingleServer(i, configOverride, options))
}
return Promise.all(serverPromises)
diff --git a/shared/extra-utils/videos/live-command.ts b/shared/extra-utils/videos/live-command.ts
index 81ae458e0..74f5d3089 100644
--- a/shared/extra-utils/videos/live-command.ts
+++ b/shared/extra-utils/videos/live-command.ts
@@ -126,7 +126,7 @@ export class LiveCommand extends AbstractCommand {
video = await this.server.videos.getWithToken({ token: options.token, id: options.videoId })
await wait(500)
- } while (video.isLive === true && video.state.id !== VideoState.PUBLISHED)
+ } while (video.isLive === true || video.state.id !== VideoState.PUBLISHED)
}
async countPlaylists (options: OverrideCommandOptions & {
diff --git a/shared/extra-utils/videos/live.ts b/shared/extra-utils/videos/live.ts
index 9a6df07a8..29f99ed6d 100644
--- a/shared/extra-utils/videos/live.ts
+++ b/shared/extra-utils/videos/live.ts
@@ -89,6 +89,12 @@ async function waitUntilLivePublishedOnAllServers (servers: PeerTubeServer[], vi
}
}
+async function waitUntilLiveSavedOnAllServers (servers: PeerTubeServer[], videoId: string) {
+ for (const server of servers) {
+ await server.live.waitUntilSaved({ videoId })
+ }
+}
+
async function checkLiveCleanupAfterSave (server: PeerTubeServer, videoUUID: string, resolutions: number[] = []) {
const basePath = server.servers.buildDirectory('streaming-playlists')
const hlsPath = join(basePath, 'hls', videoUUID)
@@ -126,5 +132,6 @@ export {
testFfmpegStreamError,
stopFfmpeg,
waitUntilLivePublishedOnAllServers,
+ waitUntilLiveSavedOnAllServers,
checkLiveCleanupAfterSave
}
diff --git a/shared/extra-utils/videos/streaming-playlists-command.ts b/shared/extra-utils/videos/streaming-playlists-command.ts
index 9662685da..5d40d35cb 100644
--- a/shared/extra-utils/videos/streaming-playlists-command.ts
+++ b/shared/extra-utils/videos/streaming-playlists-command.ts
@@ -1,5 +1,5 @@
import { HttpStatusCode } from '@shared/models'
-import { unwrapBody, unwrapText } from '../requests'
+import { unwrapBody, unwrapTextOrDecode, unwrapBodyOrDecodeToJSON } from '../requests'
import { AbstractCommand, OverrideCommandOptions } from '../shared'
export class StreamingPlaylistsCommand extends AbstractCommand {
@@ -7,7 +7,7 @@ export class StreamingPlaylistsCommand extends AbstractCommand {
get (options: OverrideCommandOptions & {
url: string
}) {
- return unwrapText(this.getRawRequest({
+ return unwrapTextOrDecode(this.getRawRequest({
...options,
url: options.url,
@@ -33,7 +33,7 @@ export class StreamingPlaylistsCommand extends AbstractCommand {
getSegmentSha256 (options: OverrideCommandOptions & {
url: string
}) {
- return unwrapBody<{ [ id: string ]: string }>(this.getRawRequest({
+ return unwrapBodyOrDecodeToJSON<{ [ id: string ]: string }>(this.getRawRequest({
...options,
url: options.url,
diff --git a/shared/extra-utils/videos/streaming-playlists.ts b/shared/extra-utils/videos/streaming-playlists.ts
index a224b8f5f..6671e3fa6 100644
--- a/shared/extra-utils/videos/streaming-playlists.ts
+++ b/shared/extra-utils/videos/streaming-playlists.ts
@@ -9,17 +9,16 @@ async function checkSegmentHash (options: {
server: PeerTubeServer
baseUrlPlaylist: string
baseUrlSegment: string
- videoUUID: string
resolution: number
hlsPlaylist: VideoStreamingPlaylist
}) {
- const { server, baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist } = options
+ const { server, baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist } = options
const command = server.streamingPlaylists
const file = hlsPlaylist.files.find(f => f.resolution.id === resolution)
const videoName = basename(file.fileUrl)
- const playlist = await command.get({ url: `${baseUrlPlaylist}/${videoUUID}/${removeFragmentedMP4Ext(videoName)}.m3u8` })
+ const playlist = await command.get({ url: `${baseUrlPlaylist}/${removeFragmentedMP4Ext(videoName)}.m3u8` })
const matches = /#EXT-X-BYTERANGE:(\d+)@(\d+)/.exec(playlist)
@@ -28,7 +27,7 @@ async function checkSegmentHash (options: {
const range = `${offset}-${offset + length - 1}`
const segmentBody = await command.getSegment({
- url: `${baseUrlSegment}/${videoUUID}/${videoName}`,
+ url: `${baseUrlSegment}/${videoName}`,
expectedStatus: HttpStatusCode.PARTIAL_CONTENT_206,
range: `bytes=${range}`
})
diff --git a/shared/extra-utils/videos/videos-command.ts b/shared/extra-utils/videos/videos-command.ts
index 33725bfdc..d35339c8d 100644
--- a/shared/extra-utils/videos/videos-command.ts
+++ b/shared/extra-utils/videos/videos-command.ts
@@ -188,6 +188,17 @@ export class VideosCommand extends AbstractCommand {
return id
}
+ async listFiles (options: OverrideCommandOptions & {
+ id: number | string
+ }) {
+ const video = await this.get(options)
+
+ const files = video.files || []
+ const hlsFiles = video.streamingPlaylists[0]?.files || []
+
+ return files.concat(hlsFiles)
+ }
+
// ---------------------------------------------------------------------------
listMyVideos (options: OverrideCommandOptions & {
diff --git a/shared/models/server/job.model.ts b/shared/models/server/job.model.ts
index 4ab249e0b..ff96283a4 100644
--- a/shared/models/server/job.model.ts
+++ b/shared/models/server/job.model.ts
@@ -19,6 +19,7 @@ export type JobType =
| 'video-redundancy'
| 'video-live-ending'
| 'actor-keys'
+ | 'move-to-object-storage'
export interface Job {
id: number
@@ -136,3 +137,8 @@ export interface VideoLiveEndingPayload {
export interface ActorKeysPayload {
actorId: number
}
+
+export interface MoveObjectStoragePayload {
+ videoUUID: string
+ isNewVideo: boolean
+}
diff --git a/shared/models/videos/index.ts b/shared/models/videos/index.ts
index faa9b9868..733c433a0 100644
--- a/shared/models/videos/index.ts
+++ b/shared/models/videos/index.ts
@@ -26,6 +26,7 @@ export * from './video-resolution.enum'
export * from './video-schedule-update.model'
export * from './video-sort-field.type'
export * from './video-state.enum'
+export * from './video-storage.enum'
export * from './video-streaming-playlist.model'
export * from './video-streaming-playlist.type'
diff --git a/shared/models/videos/video-state.enum.ts b/shared/models/videos/video-state.enum.ts
index 49d997f24..c6af481e7 100644
--- a/shared/models/videos/video-state.enum.ts
+++ b/shared/models/videos/video-state.enum.ts
@@ -3,5 +3,6 @@ export const enum VideoState {
TO_TRANSCODE = 2,
TO_IMPORT = 3,
WAITING_FOR_LIVE = 4,
- LIVE_ENDED = 5
+ LIVE_ENDED = 5,
+ TO_MOVE_TO_EXTERNAL_STORAGE = 6
}
diff --git a/shared/models/videos/video-storage.enum.ts b/shared/models/videos/video-storage.enum.ts
new file mode 100644
index 000000000..7c6690db2
--- /dev/null
+++ b/shared/models/videos/video-storage.enum.ts
@@ -0,0 +1,4 @@
+export const enum VideoStorage {
+ FILE_SYSTEM,
+ OBJECT_STORAGE,
+}
diff --git a/support/docker/production/config/custom-environment-variables.yaml b/support/docker/production/config/custom-environment-variables.yaml
index ce0f89d7b..1b474582a 100644
--- a/support/docker/production/config/custom-environment-variables.yaml
+++ b/support/docker/production/config/custom-environment-variables.yaml
@@ -45,6 +45,29 @@ smtp:
__format: "json"
from_address: "PEERTUBE_SMTP_FROM"
+object_storage:
+ enabled:
+ __name: "PEERTUBE_OBJECT_STORAGE_ENABLED"
+ __format: "json"
+
+ endpoint: "PEERTUBE_OBJECT_STORAGE_ENDPOINT"
+
+ region: "PEERTUBE_OBJECT_STORAGE_REGION"
+
+ max_upload_part:
+ __name: "PEERTUBE_OBJECT_STORAGE_MAX_UPLOAD_PART"
+ __format: "json"
+
+ streaming_playlists:
+ bucket_name: "PEERTUBE_OBJECT_STORAGE_STREAMING_PLAYLISTS_BUCKET_NAME"
+ prefix: "PEERTUBE_OBJECT_STORAGE_STREAMING_PLAYLISTS_PREFIX"
+ base_url: "PEERTUBE_OBJECT_STORAGE_STREAMING_PLAYLISTS_BASE_URL"
+
+ videos:
+ bucket_name: "PEERTUBE_OBJECT_STORAGE_VIDEOS_BUCKET_NAME"
+ prefix: "PEERTUBE_OBJECT_STORAGE_VIDEOS_PREFIX"
+ base_url: "PEERTUBE_OBJECT_STORAGE_VIDEOS_BASE_URL"
+
log:
level: "PEERTUBE_LOG_LEVEL"
log_ping_requests:
diff --git a/yarn.lock b/yarn.lock
index 5590d255d..0f4fe3938 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -49,6 +49,770 @@
resolved "https://registry.yarnpkg.com/@assemblyscript/loader/-/loader-0.10.1.tgz#70e45678f06c72fa2e350e8553ec4a4d72b92e06"
integrity sha512-H71nDOOL8Y7kWRLqf6Sums+01Q5msqBW2KhDUTemh1tvY04eSkSXrK0uj/4mmY0Xr16/3zyZmsrxN7CKuRbNRg==
+"@aws-crypto/crc32@^1.0.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-1.1.0.tgz#aff048e207798fad0b0e7765b12d474c273779b6"
+ integrity sha512-ifvfaaJVvT+JUTi3zSkX4wtuGGVJrAcjN7ftg+JiE/frNBP3zNwo4xipzWBsMLZfNuzMZuaesEYyqkZcs5tzCQ==
+ dependencies:
+ tslib "^1.11.1"
+
+"@aws-crypto/ie11-detection@^1.0.0":
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-1.0.0.tgz#d3a6af29ba7f15458f79c41d1cd8cac3925e726a"
+ integrity sha512-kCKVhCF1oDxFYgQrxXmIrS5oaWulkvRcPz+QBDMsUr2crbF4VGgGT6+uQhSwJFdUAQ2A//Vq+uT83eJrkzFgXA==
+ dependencies:
+ tslib "^1.11.1"
+
+"@aws-crypto/sha256-browser@^1.0.0":
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-1.1.1.tgz#85dddf13e8f8d74c0d6592d993e4bf401da9f420"
+ integrity sha512-nS4vdan97It6HcweV58WXtjPbPSc0JXd3sAwlw3Ou5Mc3WllSycAS32Tv2LRn8butNQoU9AE3jEQAOgiMdNC1Q==
+ dependencies:
+ "@aws-crypto/ie11-detection" "^1.0.0"
+ "@aws-crypto/sha256-js" "^1.1.0"
+ "@aws-crypto/supports-web-crypto" "^1.0.0"
+ "@aws-sdk/types" "^3.1.0"
+ "@aws-sdk/util-locate-window" "^3.0.0"
+ "@aws-sdk/util-utf8-browser" "^3.0.0"
+ tslib "^1.11.1"
+
+"@aws-crypto/sha256-js@^1.0.0", "@aws-crypto/sha256-js@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-1.1.0.tgz#a58386ad18186e392e0f1d98d18831261d27b071"
+ integrity sha512-VIhuqbPgXDVr8sZe2yhgQcDRRmzf4CI8fmC1A3bHiRfE6wlz1d8KpeemqbuoEHotz/Dch9yOxlshyQDNjNFeHA==
+ dependencies:
+ "@aws-sdk/types" "^3.1.0"
+ "@aws-sdk/util-utf8-browser" "^3.0.0"
+ tslib "^1.11.1"
+
+"@aws-crypto/supports-web-crypto@^1.0.0":
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-1.0.0.tgz#c40901bc17ac1e875e248df16a2b47ad8bfd9a93"
+ integrity sha512-IHLfv+WmVH89EW4n6a5eE8/hUlz6qkWGMn/v4r5ZgzcXdTC5nolii2z3k46y01hWRiC2PPhOdeSLzMUCUMco7g==
+ dependencies:
+ tslib "^1.11.1"
+
+"@aws-sdk/abort-controller@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/abort-controller/-/abort-controller-3.25.0.tgz#a9ea250140de378d8beb6d2f427067fa30423e9e"
+ integrity sha512-uEVKqKkPVz6atbCxCNJY5O7V+ieSK8crUswXo8/WePyEbGEgxJ4t9x/WG4lV8kBjelmvQHDR4GqfJmb5Sh9xSg==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/chunked-blob-reader-native@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/chunked-blob-reader-native/-/chunked-blob-reader-native-3.23.0.tgz#72d711e3cc904bb380e99cdd60c59deacd1596ac"
+ integrity sha512-Ya5f8Ntv0EyZw+AHkpV6n6qqHzpCDNlkX50uj/dwFCMmPiHFWsWMvd0Qu04Y7miycJINEatRrJ5V8r/uVvZIDg==
+ dependencies:
+ "@aws-sdk/util-base64-browser" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/chunked-blob-reader@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/chunked-blob-reader/-/chunked-blob-reader-3.23.0.tgz#83eb6a437172b671e699850378bcb558e15374ec"
+ integrity sha512-gmJhCuXrKOOumppviE4K30NvsIQIqqxbGDNptrJrMYBO0qXCbK8/BypZ/hS/oT3loDzlSIxG2z5GDL/va9lbFw==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/client-s3@^3.23.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.25.0.tgz#6b8146c18e76606378c5f736554cb41ad4ce229e"
+ integrity sha512-yVDPo6x7DCt9t833SjqWI+AQBx81/m54gLF/ePQZBeHL5mPMEyMXTF0o22yUP5t8f92U2VAyRCP2NvKtB9WgBg==
+ dependencies:
+ "@aws-crypto/sha256-browser" "^1.0.0"
+ "@aws-crypto/sha256-js" "^1.0.0"
+ "@aws-sdk/client-sts" "3.25.0"
+ "@aws-sdk/config-resolver" "3.25.0"
+ "@aws-sdk/credential-provider-node" "3.25.0"
+ "@aws-sdk/eventstream-serde-browser" "3.25.0"
+ "@aws-sdk/eventstream-serde-config-resolver" "3.25.0"
+ "@aws-sdk/eventstream-serde-node" "3.25.0"
+ "@aws-sdk/fetch-http-handler" "3.25.0"
+ "@aws-sdk/hash-blob-browser" "3.25.0"
+ "@aws-sdk/hash-node" "3.25.0"
+ "@aws-sdk/hash-stream-node" "3.25.0"
+ "@aws-sdk/invalid-dependency" "3.25.0"
+ "@aws-sdk/md5-js" "3.25.0"
+ "@aws-sdk/middleware-apply-body-checksum" "3.25.0"
+ "@aws-sdk/middleware-bucket-endpoint" "3.25.0"
+ "@aws-sdk/middleware-content-length" "3.25.0"
+ "@aws-sdk/middleware-expect-continue" "3.25.0"
+ "@aws-sdk/middleware-host-header" "3.25.0"
+ "@aws-sdk/middleware-location-constraint" "3.25.0"
+ "@aws-sdk/middleware-logger" "3.25.0"
+ "@aws-sdk/middleware-retry" "3.25.0"
+ "@aws-sdk/middleware-sdk-s3" "3.25.0"
+ "@aws-sdk/middleware-serde" "3.25.0"
+ "@aws-sdk/middleware-signing" "3.25.0"
+ "@aws-sdk/middleware-ssec" "3.25.0"
+ "@aws-sdk/middleware-stack" "3.25.0"
+ "@aws-sdk/middleware-user-agent" "3.25.0"
+ "@aws-sdk/node-config-provider" "3.25.0"
+ "@aws-sdk/node-http-handler" "3.25.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/smithy-client" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/url-parser" "3.25.0"
+ "@aws-sdk/util-base64-browser" "3.23.0"
+ "@aws-sdk/util-base64-node" "3.23.0"
+ "@aws-sdk/util-body-length-browser" "3.23.0"
+ "@aws-sdk/util-body-length-node" "3.23.0"
+ "@aws-sdk/util-user-agent-browser" "3.25.0"
+ "@aws-sdk/util-user-agent-node" "3.25.0"
+ "@aws-sdk/util-utf8-browser" "3.23.0"
+ "@aws-sdk/util-utf8-node" "3.23.0"
+ "@aws-sdk/util-waiter" "3.25.0"
+ "@aws-sdk/xml-builder" "3.23.0"
+ entities "2.2.0"
+ fast-xml-parser "3.19.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/client-sso@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.25.0.tgz#9756178afb08e399b5aef5d12dfece3825bc2e26"
+ integrity sha512-b8v4tb7rncnqE5ktBlQEckFdNT+Pk2mBg4e1Uc9C1Z3XmZM+wOWtlbu+KRvgMgDWSx2FzLIjAKe3mLaM4o1Xhg==
+ dependencies:
+ "@aws-crypto/sha256-browser" "^1.0.0"
+ "@aws-crypto/sha256-js" "^1.0.0"
+ "@aws-sdk/config-resolver" "3.25.0"
+ "@aws-sdk/fetch-http-handler" "3.25.0"
+ "@aws-sdk/hash-node" "3.25.0"
+ "@aws-sdk/invalid-dependency" "3.25.0"
+ "@aws-sdk/middleware-content-length" "3.25.0"
+ "@aws-sdk/middleware-host-header" "3.25.0"
+ "@aws-sdk/middleware-logger" "3.25.0"
+ "@aws-sdk/middleware-retry" "3.25.0"
+ "@aws-sdk/middleware-serde" "3.25.0"
+ "@aws-sdk/middleware-stack" "3.25.0"
+ "@aws-sdk/middleware-user-agent" "3.25.0"
+ "@aws-sdk/node-config-provider" "3.25.0"
+ "@aws-sdk/node-http-handler" "3.25.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/smithy-client" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/url-parser" "3.25.0"
+ "@aws-sdk/util-base64-browser" "3.23.0"
+ "@aws-sdk/util-base64-node" "3.23.0"
+ "@aws-sdk/util-body-length-browser" "3.23.0"
+ "@aws-sdk/util-body-length-node" "3.23.0"
+ "@aws-sdk/util-user-agent-browser" "3.25.0"
+ "@aws-sdk/util-user-agent-node" "3.25.0"
+ "@aws-sdk/util-utf8-browser" "3.23.0"
+ "@aws-sdk/util-utf8-node" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/client-sts@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.25.0.tgz#e189c46d560daaa56b872330a5e7d125d00d5a1f"
+ integrity sha512-VQoG4GX+Pf5U/WtUgVgXLF2xC1jK6o4YmOxz09GhPfKT0y26x8hh42jY3zRCys7ldA3VKkfTLCeqMm3UKqXJZg==
+ dependencies:
+ "@aws-crypto/sha256-browser" "^1.0.0"
+ "@aws-crypto/sha256-js" "^1.0.0"
+ "@aws-sdk/config-resolver" "3.25.0"
+ "@aws-sdk/credential-provider-node" "3.25.0"
+ "@aws-sdk/fetch-http-handler" "3.25.0"
+ "@aws-sdk/hash-node" "3.25.0"
+ "@aws-sdk/invalid-dependency" "3.25.0"
+ "@aws-sdk/middleware-content-length" "3.25.0"
+ "@aws-sdk/middleware-host-header" "3.25.0"
+ "@aws-sdk/middleware-logger" "3.25.0"
+ "@aws-sdk/middleware-retry" "3.25.0"
+ "@aws-sdk/middleware-sdk-sts" "3.25.0"
+ "@aws-sdk/middleware-serde" "3.25.0"
+ "@aws-sdk/middleware-signing" "3.25.0"
+ "@aws-sdk/middleware-stack" "3.25.0"
+ "@aws-sdk/middleware-user-agent" "3.25.0"
+ "@aws-sdk/node-config-provider" "3.25.0"
+ "@aws-sdk/node-http-handler" "3.25.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/smithy-client" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/url-parser" "3.25.0"
+ "@aws-sdk/util-base64-browser" "3.23.0"
+ "@aws-sdk/util-base64-node" "3.23.0"
+ "@aws-sdk/util-body-length-browser" "3.23.0"
+ "@aws-sdk/util-body-length-node" "3.23.0"
+ "@aws-sdk/util-user-agent-browser" "3.25.0"
+ "@aws-sdk/util-user-agent-node" "3.25.0"
+ "@aws-sdk/util-utf8-browser" "3.23.0"
+ "@aws-sdk/util-utf8-node" "3.23.0"
+ entities "2.2.0"
+ fast-xml-parser "3.19.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/config-resolver@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/config-resolver/-/config-resolver-3.25.0.tgz#d7caba201a00aeb9d60aeddb8901b7e58f7f5a2b"
+ integrity sha512-t5CE90jYkxQyGGxG22atf8040lHuL17wptGp1kN8nSxaG6PudKhxQuHPAGYt6FHgrqqeyFccp/P3jiDSjqUaVw==
+ dependencies:
+ "@aws-sdk/signature-v4" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/credential-provider-env@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.25.0.tgz#9899ff627f40f09223126d6d2f1153b3ade2e804"
+ integrity sha512-I65/PNGQG+ktt1QSHCWwQ8v7QRK1eRdLkQl3zB5rwBuANbQ3Yu+vA+lAwU+IbpGCOEpHJO3lDN330It5B4Rtvg==
+ dependencies:
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/credential-provider-imds@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.25.0.tgz#c40b76bb6a4561fb4c5fd94ce437aac938aaa23f"
+ integrity sha512-BhPM89tjeXsa0KXxz2UTLeAY798Qg1cddFXPZXaJyHQ6eWsrDSoKbSOaeP+rznp037NNLnLX6PB8MOtfu3MAzw==
+ dependencies:
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/credential-provider-ini@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.25.0.tgz#32652f30247f84dd49e4c96ecec91577f972f2e3"
+ integrity sha512-p6yvqcZMN+eNZbJXnrFQgLpA06pVA2XagGJdkdDb3q9J4HYoWQduocWUfr3dy0HJdjDZ01BVT/ldBanUyhznQQ==
+ dependencies:
+ "@aws-sdk/credential-provider-env" "3.25.0"
+ "@aws-sdk/credential-provider-imds" "3.25.0"
+ "@aws-sdk/credential-provider-sso" "3.25.0"
+ "@aws-sdk/credential-provider-web-identity" "3.25.0"
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/shared-ini-file-loader" "3.23.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-credentials" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/credential-provider-node@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.25.0.tgz#f8f4c9b8ae51a89f44c11fbbf999e1363424f39e"
+ integrity sha512-GZedy79oSpnDr2I54su3EE1fwpTRFBw/Sn4RBE4VWCM8AWq7ZNk7IKAmbnBrmt+gpFpr9k2PifUIJ7fAcbNvJQ==
+ dependencies:
+ "@aws-sdk/credential-provider-env" "3.25.0"
+ "@aws-sdk/credential-provider-imds" "3.25.0"
+ "@aws-sdk/credential-provider-ini" "3.25.0"
+ "@aws-sdk/credential-provider-process" "3.25.0"
+ "@aws-sdk/credential-provider-sso" "3.25.0"
+ "@aws-sdk/credential-provider-web-identity" "3.25.0"
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/shared-ini-file-loader" "3.23.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-credentials" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/credential-provider-process@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.25.0.tgz#472938d6582152252fb69247531125ed24017d4e"
+ integrity sha512-qMldWWDvvy6Q+HMcTAVWUJP7MLjLXqf0P08Vb5oGYOlyh4TCJDorccRVVsQvutjQggpBaIMTQdzjdamqtZ1y+w==
+ dependencies:
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/shared-ini-file-loader" "3.23.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-credentials" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/credential-provider-sso@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.25.0.tgz#e2065ee6aec63a647acc816732ffcd270eb3c669"
+ integrity sha512-cGP1Zcw2fZHn4CYGgq4soody4x5TrsWk0Pf9F8yCjRMSSZqs3rj0+PrXy4xqkiLCvTSrse6p4e4wMMpaFAm7Tg==
+ dependencies:
+ "@aws-sdk/client-sso" "3.25.0"
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/shared-ini-file-loader" "3.23.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-credentials" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/credential-provider-web-identity@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.25.0.tgz#9c330322eea3a5f1f0166556c1f18ecc0992b0bf"
+ integrity sha512-6NvOaynsXGuNYbrGzT5h+kkGMaKtAI6zKgPqS/20NKlO5PJc9Eo56Hdbq0gBohXSBzRJE5Jx/1OOrTdvRlwniw==
+ dependencies:
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/eventstream-marshaller@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-marshaller/-/eventstream-marshaller-3.25.0.tgz#8db1f633a638f50d8e37441f01d739238d374549"
+ integrity sha512-gUZIIxupgCIGyspiIV6bEplSRWnhAR9MkyrCJbHhbs4GjWIYlFqp7W0+Y7HY1tIeeXCUf0O8KE3paUMszKPXtg==
+ dependencies:
+ "@aws-crypto/crc32" "^1.0.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-hex-encoding" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/eventstream-serde-browser@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-browser/-/eventstream-serde-browser-3.25.0.tgz#55481e23acb454d876948fd3b6e609a79977fa7d"
+ integrity sha512-QJF08OIZiufoBPPoVcRwBPvZIpKMSZpISZfpCHcY1GaTpMIzz35N7Nkd10JGpfzpUO9oFcgcmm2q3XHo1XJyyw==
+ dependencies:
+ "@aws-sdk/eventstream-marshaller" "3.25.0"
+ "@aws-sdk/eventstream-serde-universal" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/eventstream-serde-config-resolver@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.25.0.tgz#5b8f4ef24fb1bf6c9f0353fb219a68206bad5eb4"
+ integrity sha512-Fb4VS3waKNzc6pK6tQBmWM+JmCNQJYNG/QBfb8y4AoJOZ+I7yX0Qgo90drh8IiUcIKDeprUFjSi/cGIa/KHIsg==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/eventstream-serde-node@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-node/-/eventstream-serde-node-3.25.0.tgz#7ae7fcb8db1e554638f8f1c0fea514cfb07e2524"
+ integrity sha512-gPs+6w0zXf+p0PuOxxmpAlCvP/7E7+8oAar8Ys27exnLXNgqJJK1k5hMBSrfR9GLVti3EhJ1M9x5Seg1SN0/SA==
+ dependencies:
+ "@aws-sdk/eventstream-marshaller" "3.25.0"
+ "@aws-sdk/eventstream-serde-universal" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/eventstream-serde-universal@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-universal/-/eventstream-serde-universal-3.25.0.tgz#bf84056fcad894c14f7239272248ea5b3ff39d47"
+ integrity sha512-NgsQk5dXg7NlRDEKGRUdiAx7WESQGD1jEhXitklL3/PHRZ7Y9BJugEFlBvKpU7tiHZBcomTbl/gE2o6i2op/jA==
+ dependencies:
+ "@aws-sdk/eventstream-marshaller" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/fetch-http-handler@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.25.0.tgz#0ba013ced267b8ead120be1fcba5bdbbc379b82f"
+ integrity sha512-792kkbfSRBdiFb7Q2cDJts9MKxzAwuQSwUIwRKAOMazU8HkKbKnXXAFSsK3T7VasOFOh7O7YEGN0q9UgEw1q+g==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/querystring-builder" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-base64-browser" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/hash-blob-browser@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/hash-blob-browser/-/hash-blob-browser-3.25.0.tgz#2708daf0f2b53c6670a94276c1048a9a34706108"
+ integrity sha512-dsvV/nkW8v9wIotd3xJn3TQ8AxVLl56H82WkGkHcfw61csRxj3eSUNv0apUBopCcQPK8OK4l2nHAg08r0+LWXg==
+ dependencies:
+ "@aws-sdk/chunked-blob-reader" "3.23.0"
+ "@aws-sdk/chunked-blob-reader-native" "3.23.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/hash-node@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/hash-node/-/hash-node-3.25.0.tgz#b149ddf170f4038c7cc3afe8f12e21b0f63e0771"
+ integrity sha512-qRn6iqG9VLt8D29SBABcbauDLn92ssMjtpyVApiOhDYyFm2VA2avomOHD6y2PRBMwM5FMQAygZbpA2HIN2F96w==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-buffer-from" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/hash-stream-node@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/hash-stream-node/-/hash-stream-node-3.25.0.tgz#6fa38cc349a9037367f20ce2601ff0510035dfa2"
+ integrity sha512-pzScUO9pPEEHQ5YQk1sl1bPlU2tt0OCblxUwboZJ9mRgNnWwkMWxe7Mec5IfyMWVUcbIznUHn7qRYEvJQ9JXmw==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/invalid-dependency@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/invalid-dependency/-/invalid-dependency-3.25.0.tgz#a75dfb7e86a0e1eb6083b61397dc49a1db041434"
+ integrity sha512-ZBXjBAF2JSiO/wGBa1oaXsd1q5YG3diS8TfIUMXeQoe9O66R5LGoGOQeAbB/JjlwFot6DZfAcfocvl6CtWwqkw==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/is-array-buffer@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/is-array-buffer/-/is-array-buffer-3.23.0.tgz#3a5d601b0102ea3a4d832bde647509c8405b2ec9"
+ integrity sha512-XN20/scFthok0lCbjtinW77CoIBoar8cbOzmu+HkYTnBBpJrF6Ai5g9sgglO8r+X+OLn4PrDrTP+BxdpNuIh9g==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/md5-js@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/md5-js/-/md5-js-3.25.0.tgz#32cefc43a8c0ee1d85586b95eba0be4912cde534"
+ integrity sha512-97MtL1VF3JCkyJJnwi8LcXpqItnH1VtgoqtVqmaASYp5GXnlsnA1WDnB0754ufPHlssS1aBj/gkLzMZ0Htw/Rg==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-utf8-browser" "3.23.0"
+ "@aws-sdk/util-utf8-node" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-apply-body-checksum@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-apply-body-checksum/-/middleware-apply-body-checksum-3.25.0.tgz#4263ea8c8e1808e5a4a278fb704ebe7aa891f698"
+ integrity sha512-162qFG7eap4vDKuKrpXWQYE4tbIETNrpTQX6jrPgqostOy1O0Nc5Bn1COIoOMgeMVnkOAZV7qV1J/XAYGz32Yw==
+ dependencies:
+ "@aws-sdk/is-array-buffer" "3.23.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-bucket-endpoint@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.25.0.tgz#d698230ae552533a1b8ded2c3e6885b4a8374795"
+ integrity sha512-r/6ECFiw/TNjzhAuZzUx3M/1mAtezHTp3e8twB4dDbRRQqABrEZ/dynXi1VxrT2kKW0ZgZNXqEer/NfPOtWB8g==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-arn-parser" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-content-length@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-content-length/-/middleware-content-length-3.25.0.tgz#71031d326e52f788396e0ed8216410840059ac53"
+ integrity sha512-uOXus0MmZi/mucRIr5yfwM1vDhYG66CujNfnhyEaq5f4kcDA1Q5qPWSn9dkQPV9JWTZK3WTuYiOPSgtmlAYTAg==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-expect-continue@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.25.0.tgz#bb41ea9d716c6ce04c4d8fb2cc2dd5fd37f6ccd9"
+ integrity sha512-o3euv8NIO0zlHML81krtfs4TrF5gZwoxBYtY+6tRHXlgutsHe1yfg1wrhWnJNbJg1QhPwXxbMNfYX7MM83D8Ng==
+ dependencies:
+ "@aws-sdk/middleware-header-default" "3.25.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-header-default@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-header-default/-/middleware-header-default-3.25.0.tgz#17fec9b1941e81059a1374eba58b52230da35a2b"
+ integrity sha512-xkFfZcctPL0VTxmEKITf6/MSDv/8rY+8uA9OMt/YZqfbg0RfeqR2+R1xlDNDxeHeK/v+g5gTNIYTQLM8L2unNA==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-host-header@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.25.0.tgz#f08dd8c45362cf5cb152c478027092e3d1f4aa58"
+ integrity sha512-xKD/CfsUS3ul2VaQ3IgIUXgA7jU2/Guo/DUhYKrLZTOxm0nuvsIFw0RqSCtRBCLptE5Qi+unkc1LcFDbfqrRbg==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-location-constraint@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.25.0.tgz#7ba5798aa46cd08c90823f649fcdae0ce5227095"
+ integrity sha512-diwmJ+MRQrq3H9VH+8CNAT4dImf2j3CLewlMrUEY+HsJN9xl2mtU6GQaluQg60iw6FjurLUKKGTTZCul4PGkIQ==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-logger@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.25.0.tgz#03294611be7a2f4aba06e9d80e04318c0991d769"
+ integrity sha512-M1F7BlAsDKoEM8hBaU2pHlLSM40rzzgtZ6jFNhfmTwGcjxe1N7JXCH5QPa7aI8wnJq2RoIRHVfVsUH4GwvOZnA==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-retry@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-retry/-/middleware-retry-3.25.0.tgz#e9f1b011494142aa27ece3ef881e8a3d4866797c"
+ integrity sha512-SzdWPo4ESUR6AXvIf4eC8s5sko2G9Hou6cUIr+BWI4h7whA32j/aWUmvcMHxWT/eaSuPeruXrnvKyLvuM0RjJg==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/service-error-classification" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+ uuid "^8.3.2"
+
+"@aws-sdk/middleware-sdk-s3@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.25.0.tgz#64278bbc97c3a2c26411f155642cc35e8de38887"
+ integrity sha512-Y1P6JnpAdj7p5Q43aSLSuYBCc3hKpZ/mrqFSGN8VFXl7Tzo7tYfjpd9SVRxNGJK7O7tDAUsPNmuGqBrdA2tj8w==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-arn-parser" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-sdk-sts@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.25.0.tgz#15d4836958f70187cbb6819a0c0742b751fb44ed"
+ integrity sha512-1SoZZTVejo+32eH0WqXaFvt/NIkVEYWquh3OJpkghMi2oOnMfeIRI0uSoqshL6949f4iSfUvvtuzDpyA7XNCQA==
+ dependencies:
+ "@aws-sdk/middleware-signing" "3.25.0"
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/signature-v4" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-serde@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-serde/-/middleware-serde-3.25.0.tgz#e1284ed4af64b4444cfeb7b5275f489418fa2f58"
+ integrity sha512-065Kugo8yXzBkcVAxctxFCHKlHcINnaQRsJ8ifvgc+UOEgvTG9+LfGWDwfdgarW9CkF7RkCoZOyaqFsO+HJWsg==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-signing@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.25.0.tgz#de19f5b27c34161081553a87285f1b5690e2cb9a"
+ integrity sha512-FkhxGMV3UY5HIAwUcarfxdq/CF/tYukdg+bkbTNluMpkcJczqn6shpEIQAGa5FFQP3Lya+STL1NuNXfOP7bG9w==
+ dependencies:
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/signature-v4" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-ssec@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-ssec/-/middleware-ssec-3.25.0.tgz#f8cf5bb6fe48d842b1df77f35ccb0f77f1a07b71"
+ integrity sha512-bnrHb8oddW+vDexbNzZtpfshshKru+skcmq3dyXlL8LB/NlJsMiQJE8xoGbq5odTLiflIgaDBt527m5q58i+fg==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-stack@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-stack/-/middleware-stack-3.25.0.tgz#8fc022c90b030c80308bf2930c4a7040052234b4"
+ integrity sha512-s2VgdsasOVKHY3/SIGsw9AeZMMsdcIbBGWim9n5IO3j8C8y54EdRLVCEja8ePvMDZKIzuummwatYPHaUrnqPtQ==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/middleware-user-agent@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.25.0.tgz#2033da6cdcfbf4641b991e3ee3c60ba9809898e7"
+ integrity sha512-HXd/Qknq8Cp7fzJYU7jDDpN7ReJ3arUrnt+dAPNaDDrhmrBbCZp+24UXN6X6DAj0JICRoRuF/l7KxjwdF5FShw==
+ dependencies:
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/node-config-provider@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/node-config-provider/-/node-config-provider-3.25.0.tgz#6ec3e9031b7ff0c51d6e0b33aeff3547ea5619b3"
+ integrity sha512-95FiUDuh1YGo0Giti0Xz9l2TV0Wzw75M1xx0TduFcm1dpLKl+znxTgYh+4G+MOSMHNGy+6K91yxurv4PGYgCWw==
+ dependencies:
+ "@aws-sdk/property-provider" "3.25.0"
+ "@aws-sdk/shared-ini-file-loader" "3.23.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/node-http-handler@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/node-http-handler/-/node-http-handler-3.25.0.tgz#b636ea2c39b4a47cf9bffd4cdb6a41c603b99bff"
+ integrity sha512-zVeAM/bXewZiuMtcUZI/xGDID6knkzOv73ueVkzUbP0Ki8bfao7diR3hMbIt5Fy/r8cAVjJce9v6zFqo4sr1WA==
+ dependencies:
+ "@aws-sdk/abort-controller" "3.25.0"
+ "@aws-sdk/protocol-http" "3.25.0"
+ "@aws-sdk/querystring-builder" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/property-provider@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/property-provider/-/property-provider-3.25.0.tgz#2fd7246917b9b6ff448a599163a479bc417a1421"
+ integrity sha512-jUnPDguLWsyGLPfdxGdeaXe3j/CjS3kxBmctvI+soZg57rA2hntP9rm7SUZ2+5rj4mmJaI3bzchiaY3kE3JmpA==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/protocol-http@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/protocol-http/-/protocol-http-3.25.0.tgz#4b638cb90672fc2d6cb6d15bebc8bb1fb297da2e"
+ integrity sha512-4Jebt5G8uIFa+HZO7KOgOtA66E/CXysQekiV5dfAsU8ca+rX5PB6qhpWZ2unX/l6He+oDQ0zMoW70JkNiP4/4w==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/querystring-builder@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-builder/-/querystring-builder-3.25.0.tgz#9e6f5eaa5d6805fbf45ae4a47ccbaf823584a4a2"
+ integrity sha512-o/R3/viOxjWckI+kepkxJSL7fIdg1hHYOW/rOpo9HbXS0CJrHVnB8vlBb+Xwl1IFyY2gg+5YZTjiufcgpgRBkw==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-uri-escape" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/querystring-parser@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-parser/-/querystring-parser-3.25.0.tgz#7fe0a3ddf95a4e5475f53be056fce435fb24b774"
+ integrity sha512-FCNyaOLFLVS5j43MhVA7/VJUDX0t/9RyNTNulHgzFjj6ffsgqcY0uwUq1RO3QCL4asl56zOrLVJgK+Z7wMbvFg==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/service-error-classification@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/service-error-classification/-/service-error-classification-3.25.0.tgz#1f24fe74f0a89f00d4f6f2ad1d7bb6b0e2f871e7"
+ integrity sha512-66FfIab87LnnHtOLrGrVOht9Pw6lE8appyOpBdtoeoU5DP7ARSWuDdsYmKdGdRCWvn/RaVFbSYua9k0M1WsGqg==
+
+"@aws-sdk/shared-ini-file-loader@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.23.0.tgz#574901a31e65e425632a9cae6a64f6382a2b76e8"
+ integrity sha512-YUp46l6E3dLKHp1cKMkZI4slTjsVc/Lm7nPCTVc3oQvZ1MvC99N/jMCmZ7X5YYofuAUSdc9eJ8sYiF2BnUww9g==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/signature-v4@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4/-/signature-v4-3.25.0.tgz#c7fb8184a09593ef6dc62029ca45e252b51247b2"
+ integrity sha512-6KDRRz9XVrj9RxrBLC6dzfnb2TDl3CjIzcNpLdRuKFgzEEdwV+5D+EZuAQU3MuHG5pWTIwG72k/dmCbJ2MDPUQ==
+ dependencies:
+ "@aws-sdk/is-array-buffer" "3.23.0"
+ "@aws-sdk/types" "3.25.0"
+ "@aws-sdk/util-hex-encoding" "3.23.0"
+ "@aws-sdk/util-uri-escape" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/smithy-client@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/smithy-client/-/smithy-client-3.25.0.tgz#bfdf77f1fa82b26bb7893f16056e8e60e49a140a"
+ integrity sha512-+/iMCNziL5/muaY/gl3xkRsSZyeoVCUSjSbbZjDIXbqDbB9SOz4o3UAIgWHoCgYNfsF25GQR6rThLi61FrSyoQ==
+ dependencies:
+ "@aws-sdk/middleware-stack" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/types@3.25.0", "@aws-sdk/types@^3.1.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.25.0.tgz#981210272dae2d259130f6dca8429522d9a564bb"
+ integrity sha512-vS0+cTKwj6CujlR07HmeEBxzWPWSrdmZMYnxn/QC9KW9dFu0lsyCGSCqWsFluI6GI0flsnYYWNkP5y4bfD9tqg==
+
+"@aws-sdk/url-parser@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/url-parser/-/url-parser-3.25.0.tgz#668c7d9d4bc21854c10bfb8bdf762a9206776fae"
+ integrity sha512-qZ3Vq0NjHsE7Qq6R5NVRswIAsiyYjCDnAV+/Vt4jU/K0V3mGumiasiJyRyblW4Da8R6kfcJk0mHSMFRJfoHh8Q==
+ dependencies:
+ "@aws-sdk/querystring-parser" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/util-arn-parser@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-arn-parser/-/util-arn-parser-3.23.0.tgz#7372460ba98a6826f97d9622759764bcf09add79"
+ integrity sha512-J3+/wnC21kbb3UAHo7x31aCZxzIa7GBijt6Q7nad/j2aF38EZtE3SI0aZpD8250Vi+9zsZ4672QDUeSZ5BR5kg==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-base64-browser@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-browser/-/util-base64-browser-3.23.0.tgz#61594ac9529756361c81ece287548ab5b8c5a768"
+ integrity sha512-xlI/qw+uhLJWa3k0mRtRHQ42v5QzsMFEUXScredQMfJ/34qzXyocsG6OHPOTV1I8WSANrxnHR5m1Ae3iU6JuVw==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-base64-node@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-node/-/util-base64-node-3.23.0.tgz#d0da9ed6b8aaa7513ba4b36a20b4794c72c074ce"
+ integrity sha512-Kf8JIAUtjrPcD5CJzrig2B5CtegWswUNpW4zBarww/UJhHlp8WzKlCxxA+yNS1ghT0ZMjrRvxPabKDGpkyUfmQ==
+ dependencies:
+ "@aws-sdk/util-buffer-from" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/util-body-length-browser@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.23.0.tgz#1a5c5e7ea5e15d93bd178021c54d2ea41faeb1cd"
+ integrity sha512-Bi6u/5omQbOBSB5BxqVvaPgVplLRjhhSuqK3XAukbeBPh7lcibIBdy7YvbhQyl4i8Hb2QjFnqqfzA0lNBe5eiw==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-body-length-node@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-node/-/util-body-length-node-3.23.0.tgz#2a7890b4fa6de78a042db9537a67f90ccb2a3034"
+ integrity sha512-8kSczloA78mikPaJ742SU9Wpwfcz3HOruoXiP/pOy69UZEsMe4P7zTZI1bo8BAp7j6IFUPCXth9E3UAtkbz+CQ==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-buffer-from@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-buffer-from/-/util-buffer-from-3.23.0.tgz#3bc02f50c6e8a5c2b9db61faeb3bebc9de701c3b"
+ integrity sha512-axXy1FvEOM1uECgMPmyHF1S3Hd7JI+BerhhcAlGig0bbqUsZVQUNL9yhOsWreA+nf1v08Ucj8P2SHPCT9Hvpgg==
+ dependencies:
+ "@aws-sdk/is-array-buffer" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/util-credentials@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-credentials/-/util-credentials-3.23.0.tgz#6b3138c3853c72adc93c3f57e8fb28f58ffdc364"
+ integrity sha512-6TDGZnFa0kZr+vSsWXXMfWt347jbMGKtzGnBxbrmiQgZMijz9s/wLYxsjglZ+CyqI/QrSMOTtqy6mEgJxdnGWQ==
+ dependencies:
+ "@aws-sdk/shared-ini-file-loader" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/util-hex-encoding@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.23.0.tgz#a8de34faf9e51dd4be379be0e9d3bdc093ae6bf4"
+ integrity sha512-RFDCwNrJMmmPSMVRadxRNePqTXGwtL9s4844x44D0bbGg1TdC42rrg0PRKYkxFL7wd1FbibVQOzciZAvzF+Z+w==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-locate-window@^3.0.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.23.0.tgz#e9bf2a023dce2ea1d13ec2e8c7c92abb333a1442"
+ integrity sha512-mM8kWW7SWIxCshkNllpYqCQi5SzwJ+sv5nURhtquOB5/H3qGqZm0V5lUE3qpE1AYmqKwk6qbGUy1woFn1T5nrw==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-uri-escape@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-uri-escape/-/util-uri-escape-3.23.0.tgz#52539674966eb456d65408d9028ed114e94dfd49"
+ integrity sha512-SvQx2E/FDlI5vLT67wwn/k1j2R/G58tYj4Te6GNgEwPGL43X2+7c0+d/WTgndMaRvxSBHZMUTxBYh1HOeU7loA==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-user-agent-browser@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.25.0.tgz#a0f480f1a5b10350370643445b09413102187935"
+ integrity sha512-qGqiWfs49NRmQVXPsBXgMRVkjDZocicU0V2wak98e0t7TOI+KmP8hnwsTkE6c4KwhsFOOUhAzjn5zk3kOwi6tQ==
+ dependencies:
+ "@aws-sdk/types" "3.25.0"
+ bowser "^2.11.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/util-user-agent-node@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.25.0.tgz#db22cb64893c4635adf17086c5cb4a5070c4ac16"
+ integrity sha512-4AWyCNP3n/qxv36OS+WH3l4ooRvwyfdbYWFXNXeGcxMcLANDG0upJQRT1g7H8+/afMaJ6v/BQM/H6tdocJSKjQ==
+ dependencies:
+ "@aws-sdk/node-config-provider" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/util-utf8-browser@3.23.0", "@aws-sdk/util-utf8-browser@^3.0.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.23.0.tgz#dff7e891c67936de677b7d7a6c796e5c2e1b1510"
+ integrity sha512-fSB95AKnvCnAbCd7o0xLbErfAgD9wnLCaEu23AgfGAiaG3nFF8Z2+wtjebU/9Z4RI9d/x83Ho/yguRnJdkMsPA==
+ dependencies:
+ tslib "^2.3.0"
+
+"@aws-sdk/util-utf8-node@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-node/-/util-utf8-node-3.23.0.tgz#9f9fe76745c79c8a148f15d78e9a5c03d2bf0441"
+ integrity sha512-yao8+8okyfCxRvxZe3GBdO7lJnQEBf3P6rDgleOQD/0DZmMjOQGXCvDd42oagE2TegXhkUnJfVOZU2GqdoR0hg==
+ dependencies:
+ "@aws-sdk/util-buffer-from" "3.23.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/util-waiter@3.25.0":
+ version "3.25.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/util-waiter/-/util-waiter-3.25.0.tgz#cd2252c99f335e461134f55c3b7eb89ef6893dca"
+ integrity sha512-rhJ7Q2fcPD8y4H0qNEpaspkSUya0OaNcVrca9wCZKs7jWnropPzrQ+e2MH7fWJ/8jgcBV890+Txr4fWkD4J01g==
+ dependencies:
+ "@aws-sdk/abort-controller" "3.25.0"
+ "@aws-sdk/types" "3.25.0"
+ tslib "^2.3.0"
+
+"@aws-sdk/xml-builder@3.23.0":
+ version "3.23.0"
+ resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.23.0.tgz#e318f539b68fa9c0a36da49e85a96cdca13a8113"
+ integrity sha512-5LEGdhQIJtGTwg4dIYyNtpz5QvPcQoxsqJygmj+VB8KLd+mWorH1IOpiL74z0infeK9N+ZFUUPKIzPJa9xLPqw==
+ dependencies:
+ tslib "^2.3.0"
+
"@babel/code-frame@7.12.11":
version "7.12.11"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f"
@@ -1652,6 +2416,11 @@ boolean@3.0.4:
resolved "https://registry.yarnpkg.com/boolean/-/boolean-3.0.4.tgz#aa1df8749af41d7211b66b4eee584722ff428c27"
integrity sha512-5pyOr+w2LNN72F2mAq6J0ckHUfJYSgRKma7e/wlcMMhgOLV9OI0ERhERYXxUqo+dPyVxcbXKy9n+wg13+LpNnA==
+bowser@^2.11.0:
+ version "2.11.0"
+ resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f"
+ integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==
+
boxen@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64"
@@ -2909,7 +3678,7 @@ enquirer@^2.3.5:
dependencies:
ansi-colors "^4.1.1"
-entities@^2.0.0:
+entities@2.2.0, entities@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55"
integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==
@@ -3403,7 +4172,7 @@ fast-safe-stringify@^2.0.4, fast-safe-stringify@^2.0.7:
resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.0.8.tgz#dc2af48c46cf712b683e849b2bbd446b32de936f"
integrity sha512-lXatBjf3WPjmWD6DpIZxkeSsCOwqI0maYMpgDlx8g4U2qi4lbjA9oH/HD2a87G+KfsUmo5WbJFmqBZlPxtptag==
-fast-xml-parser@^3.19.0:
+fast-xml-parser@3.19.0, fast-xml-parser@^3.19.0:
version "3.19.0"
resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-3.19.0.tgz#cb637ec3f3999f51406dd8ff0e6fc4d83e520d01"
integrity sha512-4pXwmBplsCPv8FOY1WRakF970TjNGnGnfbOnLqjlYvMiF1SR3yOHyxMR/YCXpPTOspNF5gwudqktIP4VsWkvBg==
@@ -7947,7 +8716,7 @@ tsconfig-paths@^3.9.0:
minimist "^1.2.0"
strip-bom "^3.0.0"
-tslib@^1.8.1, tslib@^1.9.0:
+tslib@^1.11.1, tslib@^1.8.1, tslib@^1.9.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
@@ -7957,6 +8726,11 @@ tslib@^2.0.0, tslib@^2.2.0:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e"
integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==
+tslib@^2.3.0:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01"
+ integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==
+
tsutils@^3.21.0:
version "3.21.0"
resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623"