Add runner server tests
This commit is contained in:
parent
2fe978744e
commit
d102de1b38
|
@ -1,6 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { SQLCommand } from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import {
|
||||
cleanupTests,
|
||||
|
@ -13,6 +14,8 @@ import {
|
|||
|
||||
describe('Test AP cleaner', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
const sqlCommands: SQLCommand[] = []
|
||||
|
||||
let videoUUID1: string
|
||||
let videoUUID2: string
|
||||
let videoUUID3: string
|
||||
|
@ -56,6 +59,8 @@ describe('Test AP cleaner', function () {
|
|||
await server.videos.rate({ id: uuid, rating: 'like' })
|
||||
await server.comments.createThread({ videoId: uuid, text: 'comment' })
|
||||
}
|
||||
|
||||
sqlCommands.push(new SQLCommand(server))
|
||||
}
|
||||
|
||||
await waitJobs(servers)
|
||||
|
@ -75,9 +80,9 @@ describe('Test AP cleaner', function () {
|
|||
it('Should destroy server 3 internal likes and correctly clean them', async function () {
|
||||
this.timeout(20000)
|
||||
|
||||
await servers[2].sql.deleteAll('accountVideoRate')
|
||||
await sqlCommands[2].deleteAll('accountVideoRate')
|
||||
for (const uuid of videoUUIDs) {
|
||||
await servers[2].sql.setVideoField(uuid, 'likes', '0')
|
||||
await sqlCommands[2].setVideoField(uuid, 'likes', '0')
|
||||
}
|
||||
|
||||
await wait(5000)
|
||||
|
@ -121,10 +126,10 @@ describe('Test AP cleaner', function () {
|
|||
it('Should destroy server 3 internal dislikes and correctly clean them', async function () {
|
||||
this.timeout(20000)
|
||||
|
||||
await servers[2].sql.deleteAll('accountVideoRate')
|
||||
await sqlCommands[2].deleteAll('accountVideoRate')
|
||||
|
||||
for (const uuid of videoUUIDs) {
|
||||
await servers[2].sql.setVideoField(uuid, 'dislikes', '0')
|
||||
await sqlCommands[2].setVideoField(uuid, 'dislikes', '0')
|
||||
}
|
||||
|
||||
await wait(5000)
|
||||
|
@ -148,15 +153,15 @@ describe('Test AP cleaner', function () {
|
|||
it('Should destroy server 3 internal shares and correctly clean them', async function () {
|
||||
this.timeout(20000)
|
||||
|
||||
const preCount = await servers[0].sql.getVideoShareCount()
|
||||
const preCount = await sqlCommands[0].getVideoShareCount()
|
||||
expect(preCount).to.equal(6)
|
||||
|
||||
await servers[2].sql.deleteAll('videoShare')
|
||||
await sqlCommands[2].deleteAll('videoShare')
|
||||
await wait(5000)
|
||||
await waitJobs(servers)
|
||||
|
||||
// Still 6 because we don't have remote shares on local videos
|
||||
const postCount = await servers[0].sql.getVideoShareCount()
|
||||
const postCount = await sqlCommands[0].getVideoShareCount()
|
||||
expect(postCount).to.equal(6)
|
||||
})
|
||||
|
||||
|
@ -168,7 +173,7 @@ describe('Test AP cleaner', function () {
|
|||
expect(total).to.equal(3)
|
||||
}
|
||||
|
||||
await servers[2].sql.deleteAll('videoComment')
|
||||
await sqlCommands[2].deleteAll('videoComment')
|
||||
|
||||
await wait(5000)
|
||||
await waitJobs(servers)
|
||||
|
@ -185,7 +190,7 @@ describe('Test AP cleaner', function () {
|
|||
async function check (like: string, ofServerUrl: string, urlSuffix: string, remote: 'true' | 'false') {
|
||||
const query = `SELECT "videoId", "accountVideoRate".url FROM "accountVideoRate" ` +
|
||||
`INNER JOIN video ON "accountVideoRate"."videoId" = video.id AND remote IS ${remote} WHERE "accountVideoRate"."url" LIKE '${like}'`
|
||||
const res = await servers[0].sql.selectQuery<{ url: string }>(query)
|
||||
const res = await sqlCommands[0].selectQuery<{ url: string }>(query)
|
||||
|
||||
for (const rate of res) {
|
||||
const matcher = new RegExp(`^${ofServerUrl}/accounts/root/dislikes/\\d+${urlSuffix}$`)
|
||||
|
@ -214,7 +219,7 @@ describe('Test AP cleaner', function () {
|
|||
|
||||
{
|
||||
const query = `UPDATE "accountVideoRate" SET url = url || 'stan'`
|
||||
await servers[1].sql.updateQuery(query)
|
||||
await sqlCommands[1].updateQuery(query)
|
||||
|
||||
await wait(5000)
|
||||
await waitJobs(servers)
|
||||
|
@ -231,7 +236,7 @@ describe('Test AP cleaner', function () {
|
|||
const query = `SELECT "videoId", "videoComment".url, uuid as "videoUUID" FROM "videoComment" ` +
|
||||
`INNER JOIN video ON "videoComment"."videoId" = video.id AND remote IS ${remote} WHERE "videoComment"."url" LIKE '${like}'`
|
||||
|
||||
const res = await servers[0].sql.selectQuery<{ url: string, videoUUID: string }>(query)
|
||||
const res = await sqlCommands[0].selectQuery<{ url: string, videoUUID: string }>(query)
|
||||
|
||||
for (const comment of res) {
|
||||
const matcher = new RegExp(`${ofServerUrl}/videos/watch/${comment.videoUUID}/comments/\\d+${urlSuffix}`)
|
||||
|
@ -257,7 +262,7 @@ describe('Test AP cleaner', function () {
|
|||
|
||||
{
|
||||
const query = `UPDATE "videoComment" SET url = url || 'kyle'`
|
||||
await servers[1].sql.updateQuery(query)
|
||||
await sqlCommands[1].updateQuery(query)
|
||||
|
||||
await wait(5000)
|
||||
await waitJobs(servers)
|
||||
|
@ -328,6 +333,10 @@ describe('Test AP cleaner', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
for (const sql of sqlCommands) {
|
||||
await sql.cleanup()
|
||||
}
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { SQLCommand } from '@server/tests/shared'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
|
@ -12,6 +13,7 @@ import {
|
|||
|
||||
describe('Test ActivityPub fetcher', function () {
|
||||
let servers: PeerTubeServer[]
|
||||
let sqlCommandServer1: SQLCommand
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
|
||||
|
@ -34,15 +36,17 @@ describe('Test ActivityPub fetcher', function () {
|
|||
const { uuid } = await servers[0].videos.upload({ attributes: { name: 'bad video root' } })
|
||||
await servers[0].videos.upload({ token: userAccessToken, attributes: { name: 'video user' } })
|
||||
|
||||
sqlCommandServer1 = new SQLCommand(servers[0])
|
||||
|
||||
{
|
||||
const to = servers[0].url + '/accounts/user1'
|
||||
const value = servers[1].url + '/accounts/user1'
|
||||
await servers[0].sql.setActorField(to, 'url', value)
|
||||
await sqlCommandServer1.setActorField(to, 'url', value)
|
||||
}
|
||||
|
||||
{
|
||||
const value = servers[2].url + '/videos/watch/' + uuid
|
||||
await servers[0].sql.setVideoField(uuid, 'url', value)
|
||||
await sqlCommandServer1.setVideoField(uuid, 'url', value)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -72,6 +76,7 @@ describe('Test ActivityPub fetcher', function () {
|
|||
after(async function () {
|
||||
this.timeout(20000)
|
||||
|
||||
await sqlCommandServer1.cleanup()
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { SQLCommand } from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode, VideoPlaylistPrivacy } from '@shared/models'
|
||||
import {
|
||||
|
@ -15,6 +16,7 @@ import {
|
|||
|
||||
describe('Test AP refresher', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let sqlCommandServer2: SQLCommand
|
||||
let videoUUID1: string
|
||||
let videoUUID2: string
|
||||
let videoUUID3: string
|
||||
|
@ -61,6 +63,8 @@ describe('Test AP refresher', function () {
|
|||
}
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
sqlCommandServer2 = new SQLCommand(servers[1])
|
||||
})
|
||||
|
||||
describe('Videos refresher', function () {
|
||||
|
@ -71,7 +75,7 @@ describe('Test AP refresher', function () {
|
|||
await wait(10000)
|
||||
|
||||
// Change UUID so the remote server returns a 404
|
||||
await servers[1].sql.setVideoField(videoUUID1, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174f')
|
||||
await sqlCommandServer2.setVideoField(videoUUID1, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174f')
|
||||
|
||||
await servers[0].videos.get({ id: videoUUID1 })
|
||||
await servers[0].videos.get({ id: videoUUID2 })
|
||||
|
@ -87,7 +91,7 @@ describe('Test AP refresher', function () {
|
|||
|
||||
await killallServers([ servers[1] ])
|
||||
|
||||
await servers[1].sql.setVideoField(videoUUID3, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174e')
|
||||
await sqlCommandServer2.setVideoField(videoUUID3, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174e')
|
||||
|
||||
// Video will need a refresh
|
||||
await wait(10000)
|
||||
|
@ -113,7 +117,7 @@ describe('Test AP refresher', function () {
|
|||
|
||||
// Change actor name so the remote server returns a 404
|
||||
const to = servers[1].url + '/accounts/user2'
|
||||
await servers[1].sql.setActorField(to, 'preferredUsername', 'toto')
|
||||
await sqlCommandServer2.setActorField(to, 'preferredUsername', 'toto')
|
||||
|
||||
await command.get({ accountName: 'user1@' + servers[1].host })
|
||||
await command.get({ accountName: 'user2@' + servers[1].host })
|
||||
|
@ -133,7 +137,7 @@ describe('Test AP refresher', function () {
|
|||
await wait(10000)
|
||||
|
||||
// Change UUID so the remote server returns a 404
|
||||
await servers[1].sql.setPlaylistField(playlistUUID2, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b178e')
|
||||
await sqlCommandServer2.setPlaylistField(playlistUUID2, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b178e')
|
||||
|
||||
await servers[0].playlists.get({ playlistId: playlistUUID1 })
|
||||
await servers[0].playlists.get({ playlistId: playlistUUID2 })
|
||||
|
@ -148,6 +152,8 @@ describe('Test AP refresher', function () {
|
|||
after(async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
await sqlCommandServer2.cleanup()
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -5,26 +5,26 @@ import { buildDigest } from '@server/helpers/peertube-crypto'
|
|||
import { ACTIVITY_PUB, HTTP_SIGNATURE } from '@server/initializers/constants'
|
||||
import { activityPubContextify } from '@server/lib/activitypub/context'
|
||||
import { buildGlobalHeaders, signAndContextify } from '@server/lib/activitypub/send'
|
||||
import { makePOSTAPRequest } from '@server/tests/shared'
|
||||
import { makePOSTAPRequest, SQLCommand } from '@server/tests/shared'
|
||||
import { buildAbsoluteFixturePath, wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode } from '@shared/models'
|
||||
import { cleanupTests, createMultipleServers, killallServers, PeerTubeServer } from '@shared/server-commands'
|
||||
|
||||
function setKeysOfServer (onServer: PeerTubeServer, ofServer: PeerTubeServer, publicKey: string, privateKey: string) {
|
||||
const url = ofServer.url + '/accounts/peertube'
|
||||
function setKeysOfServer (onServer: SQLCommand, ofServerUrl: string, publicKey: string, privateKey: string) {
|
||||
const url = ofServerUrl + '/accounts/peertube'
|
||||
|
||||
return Promise.all([
|
||||
onServer.sql.setActorField(url, 'publicKey', publicKey),
|
||||
onServer.sql.setActorField(url, 'privateKey', privateKey)
|
||||
onServer.setActorField(url, 'publicKey', publicKey),
|
||||
onServer.setActorField(url, 'privateKey', privateKey)
|
||||
])
|
||||
}
|
||||
|
||||
function setUpdatedAtOfServer (onServer: PeerTubeServer, ofServer: PeerTubeServer, updatedAt: string) {
|
||||
const url = ofServer.url + '/accounts/peertube'
|
||||
function setUpdatedAtOfServer (onServer: SQLCommand, ofServerUrl: string, updatedAt: string) {
|
||||
const url = ofServerUrl + '/accounts/peertube'
|
||||
|
||||
return Promise.all([
|
||||
onServer.sql.setActorField(url, 'createdAt', updatedAt),
|
||||
onServer.sql.setActorField(url, 'updatedAt', updatedAt)
|
||||
onServer.setActorField(url, 'createdAt', updatedAt),
|
||||
onServer.setActorField(url, 'updatedAt', updatedAt)
|
||||
])
|
||||
}
|
||||
|
||||
|
@ -71,6 +71,8 @@ async function makeFollowRequest (to: { url: string }, by: { url: string, privat
|
|||
|
||||
describe('Test ActivityPub security', function () {
|
||||
let servers: PeerTubeServer[]
|
||||
let sqlCommands: SQLCommand[]
|
||||
|
||||
let url: string
|
||||
|
||||
const keys = require(buildAbsoluteFixturePath('./ap-json/peertube/keys.json'))
|
||||
|
@ -90,10 +92,12 @@ describe('Test ActivityPub security', function () {
|
|||
|
||||
servers = await createMultipleServers(3)
|
||||
|
||||
sqlCommands = servers.map(s => new SQLCommand(s))
|
||||
|
||||
url = servers[0].url + '/inbox'
|
||||
|
||||
await setKeysOfServer(servers[0], servers[1], keys.publicKey, null)
|
||||
await setKeysOfServer(servers[1], servers[1], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[0], servers[1].url, keys.publicKey, null)
|
||||
await setKeysOfServer(sqlCommands[1], servers[1].url, keys.publicKey, keys.privateKey)
|
||||
|
||||
const to = { url: servers[0].url + '/accounts/peertube' }
|
||||
const by = { url: servers[1].url + '/accounts/peertube', privateKey: keys.privateKey }
|
||||
|
@ -130,8 +134,8 @@ describe('Test ActivityPub security', function () {
|
|||
})
|
||||
|
||||
it('Should fail with bad keys', async function () {
|
||||
await setKeysOfServer(servers[0], servers[1], invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setKeysOfServer(servers[1], servers[1], invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[0], servers[1].url, invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[1], servers[1].url, invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
|
||||
const body = await activityPubContextify(getAnnounceWithoutContext(servers[1]), 'Announce')
|
||||
const headers = buildGlobalHeaders(body)
|
||||
|
@ -145,8 +149,8 @@ describe('Test ActivityPub security', function () {
|
|||
})
|
||||
|
||||
it('Should reject requests without appropriate signed headers', async function () {
|
||||
await setKeysOfServer(servers[0], servers[1], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(servers[1], servers[1], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[0], servers[1].url, keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[1], servers[1].url, keys.publicKey, keys.privateKey)
|
||||
|
||||
const body = await activityPubContextify(getAnnounceWithoutContext(servers[1]), 'Announce')
|
||||
const headers = buildGlobalHeaders(body)
|
||||
|
@ -194,8 +198,8 @@ describe('Test ActivityPub security', function () {
|
|||
|
||||
// Update keys of server 2 to invalid keys
|
||||
// Server 1 should refresh the actor and fail
|
||||
await setKeysOfServer(servers[1], servers[1], invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setUpdatedAtOfServer(servers[0], servers[1], '2015-07-17 22:00:00+00')
|
||||
await setKeysOfServer(sqlCommands[1], servers[1].url, invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setUpdatedAtOfServer(sqlCommands[0], servers[1].url, '2015-07-17 22:00:00+00')
|
||||
|
||||
// Invalid peertube actor cache
|
||||
await killallServers([ servers[1] ])
|
||||
|
@ -218,9 +222,9 @@ describe('Test ActivityPub security', function () {
|
|||
before(async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
await setKeysOfServer(servers[0], servers[1], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(servers[1], servers[1], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(servers[2], servers[2], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[0], servers[1].url, keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[1], servers[1].url, keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[2], servers[2].url, keys.publicKey, keys.privateKey)
|
||||
|
||||
const to = { url: servers[0].url + '/accounts/peertube' }
|
||||
const by = { url: servers[2].url + '/accounts/peertube', privateKey: keys.privateKey }
|
||||
|
@ -230,8 +234,8 @@ describe('Test ActivityPub security', function () {
|
|||
it('Should fail with bad keys', async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
await setKeysOfServer(servers[0], servers[2], invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setKeysOfServer(servers[2], servers[2], invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[0], servers[2].url, invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[2], servers[2].url, invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
|
||||
const body = getAnnounceWithoutContext(servers[1])
|
||||
body.actor = servers[2].url + '/accounts/peertube'
|
||||
|
@ -252,8 +256,8 @@ describe('Test ActivityPub security', function () {
|
|||
it('Should fail with an altered body', async function () {
|
||||
this.timeout(10000)
|
||||
|
||||
await setKeysOfServer(servers[0], servers[2], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(servers[0], servers[2], keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[0], servers[2].url, keys.publicKey, keys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[0], servers[2].url, keys.publicKey, keys.privateKey)
|
||||
|
||||
const body = getAnnounceWithoutContext(servers[1])
|
||||
body.actor = servers[2].url + '/accounts/peertube'
|
||||
|
@ -296,7 +300,7 @@ describe('Test ActivityPub security', function () {
|
|||
|
||||
// Update keys of server 3 to invalid keys
|
||||
// Server 1 should refresh the actor and fail
|
||||
await setKeysOfServer(servers[2], servers[2], invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
await setKeysOfServer(sqlCommands[2], servers[2].url, invalidKeys.publicKey, invalidKeys.privateKey)
|
||||
|
||||
const body = getAnnounceWithoutContext(servers[1])
|
||||
body.actor = servers[2].url + '/accounts/peertube'
|
||||
|
@ -316,7 +320,9 @@ describe('Test ActivityPub security', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
this.timeout(10000)
|
||||
for (const sql of sqlCommands) {
|
||||
await sql.cleanup()
|
||||
}
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
|
|
|
@ -103,6 +103,9 @@ describe('Test config API validators', function () {
|
|||
},
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
remoteRunners: {
|
||||
enabled: true
|
||||
},
|
||||
allowAdditionalExtensions: true,
|
||||
allowAudioFiles: true,
|
||||
concurrency: 1,
|
||||
|
@ -140,6 +143,9 @@ describe('Test config API validators', function () {
|
|||
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
remoteRunners: {
|
||||
enabled: true
|
||||
},
|
||||
threads: 4,
|
||||
profile: 'live_profile',
|
||||
resolutions: {
|
||||
|
|
|
@ -16,6 +16,7 @@ import './my-user'
|
|||
import './plugins'
|
||||
import './redundancy'
|
||||
import './registrations'
|
||||
import './runners'
|
||||
import './search'
|
||||
import './services'
|
||||
import './transcoding'
|
||||
|
|
|
@ -0,0 +1,702 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
import { checkBadCountPagination, checkBadSortPagination, checkBadStartPagination } from '@server/tests/shared'
|
||||
import { HttpStatusCode, RunnerJob, RunnerJobState, RunnerJobSuccessPayload, RunnerJobUpdatePayload, VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createSingleServer,
|
||||
makePostBodyRequest,
|
||||
PeerTubeServer,
|
||||
sendRTMPStream,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
stopFfmpeg,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
const badUUID = '910ec12a-d9e6-458b-a274-0abb655f9464'
|
||||
|
||||
describe('Test managing runners', function () {
|
||||
let server: PeerTubeServer
|
||||
|
||||
let userToken: string
|
||||
|
||||
let registrationTokenId: number
|
||||
let registrationToken: string
|
||||
|
||||
let runnerToken: string
|
||||
let runnerToken2: string
|
||||
|
||||
let completedJobToken: string
|
||||
let completedJobUUID: string
|
||||
|
||||
let cancelledJobUUID: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const config = {
|
||||
rates_limit: {
|
||||
api: {
|
||||
max: 5000
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
server = await createSingleServer(1, config)
|
||||
await setAccessTokensToServers([ server ])
|
||||
await setDefaultVideoChannel([ server ])
|
||||
|
||||
userToken = await server.users.generateUserAndToken('user1')
|
||||
|
||||
const { data } = await server.runnerRegistrationTokens.list()
|
||||
registrationToken = data[0].registrationToken
|
||||
registrationTokenId = data[0].id
|
||||
|
||||
await server.config.enableTranscoding(true, true)
|
||||
await server.config.enableRemoteTranscoding()
|
||||
runnerToken = await server.runners.autoRegisterRunner()
|
||||
runnerToken2 = await server.runners.autoRegisterRunner()
|
||||
|
||||
{
|
||||
await server.videos.quickUpload({ name: 'video 1' })
|
||||
await server.videos.quickUpload({ name: 'video 2' })
|
||||
|
||||
await waitJobs([ server ])
|
||||
|
||||
{
|
||||
const job = await server.runnerJobs.autoProcessWebVideoJob(runnerToken)
|
||||
completedJobToken = job.jobToken
|
||||
completedJobUUID = job.uuid
|
||||
}
|
||||
|
||||
{
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken })
|
||||
cancelledJobUUID = job.uuid
|
||||
await server.runnerJobs.cancelByAdmin({ jobUUID: cancelledJobUUID })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
describe('Managing runner registration tokens', function () {
|
||||
|
||||
describe('Common', function () {
|
||||
|
||||
it('Should fail to generate, list or delete runner registration token without oauth token', async function () {
|
||||
const expectedStatus = HttpStatusCode.UNAUTHORIZED_401
|
||||
|
||||
await server.runnerRegistrationTokens.generate({ token: null, expectedStatus })
|
||||
await server.runnerRegistrationTokens.list({ token: null, expectedStatus })
|
||||
await server.runnerRegistrationTokens.delete({ token: null, id: registrationTokenId, expectedStatus })
|
||||
})
|
||||
|
||||
it('Should fail to generate, list or delete runner registration token without admin rights', async function () {
|
||||
const expectedStatus = HttpStatusCode.FORBIDDEN_403
|
||||
|
||||
await server.runnerRegistrationTokens.generate({ token: userToken, expectedStatus })
|
||||
await server.runnerRegistrationTokens.list({ token: userToken, expectedStatus })
|
||||
await server.runnerRegistrationTokens.delete({ token: userToken, id: registrationTokenId, expectedStatus })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Delete', function () {
|
||||
|
||||
it('Should fail to delete with a bad id', async function () {
|
||||
await server.runnerRegistrationTokens.delete({ id: 404, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('List', function () {
|
||||
const path = '/api/v1/runners/registration-tokens'
|
||||
|
||||
it('Should fail to list with a bad start pagination', async function () {
|
||||
await checkBadStartPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should fail to list with a bad count pagination', async function () {
|
||||
await checkBadCountPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should fail to list with an incorrect sort', async function () {
|
||||
await checkBadSortPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should succeed to list with the correct params', async function () {
|
||||
await server.runnerRegistrationTokens.list({ start: 0, count: 5, sort: '-createdAt' })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Managing runners', function () {
|
||||
let toDeleteId: number
|
||||
|
||||
describe('Register', function () {
|
||||
const name = 'runner name'
|
||||
|
||||
it('Should fail with a bad registration token', async function () {
|
||||
const expectedStatus = HttpStatusCode.BAD_REQUEST_400
|
||||
|
||||
await server.runners.register({ name, registrationToken: 'a'.repeat(4000), expectedStatus })
|
||||
await server.runners.register({ name, registrationToken: null, expectedStatus })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown registration token', async function () {
|
||||
await server.runners.register({ name, registrationToken: 'aaa', expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should fail with a bad name', async function () {
|
||||
const expectedStatus = HttpStatusCode.BAD_REQUEST_400
|
||||
|
||||
await server.runners.register({ name: '', registrationToken, expectedStatus })
|
||||
await server.runners.register({ name: 'a'.repeat(200), registrationToken, expectedStatus })
|
||||
})
|
||||
|
||||
it('Should fail with an invalid description', async function () {
|
||||
const expectedStatus = HttpStatusCode.BAD_REQUEST_400
|
||||
|
||||
await server.runners.register({ name, description: '', registrationToken, expectedStatus })
|
||||
await server.runners.register({ name, description: 'a'.repeat(5000), registrationToken, expectedStatus })
|
||||
})
|
||||
|
||||
it('Should succeed with the correct params', async function () {
|
||||
const { id } = await server.runners.register({ name, description: 'super description', registrationToken })
|
||||
|
||||
toDeleteId = id
|
||||
})
|
||||
})
|
||||
|
||||
describe('Delete', function () {
|
||||
|
||||
it('Should fail without oauth token', async function () {
|
||||
await server.runners.delete({ token: null, id: toDeleteId, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 })
|
||||
})
|
||||
|
||||
it('Should fail without admin rights', async function () {
|
||||
await server.runners.delete({ token: userToken, id: toDeleteId, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
|
||||
})
|
||||
|
||||
it('Should fail with a bad id', async function () {
|
||||
await server.runners.delete({ id: 'hi' as any, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown id', async function () {
|
||||
await server.runners.delete({ id: 404, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should succeed with the correct params', async function () {
|
||||
await server.runners.delete({ id: toDeleteId })
|
||||
})
|
||||
})
|
||||
|
||||
describe('List', function () {
|
||||
const path = '/api/v1/runners'
|
||||
|
||||
it('Should fail without oauth token', async function () {
|
||||
await server.runners.list({ token: null, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 })
|
||||
})
|
||||
|
||||
it('Should fail without admin rights', async function () {
|
||||
await server.runners.list({ token: userToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
|
||||
})
|
||||
|
||||
it('Should fail to list with a bad start pagination', async function () {
|
||||
await checkBadStartPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should fail to list with a bad count pagination', async function () {
|
||||
await checkBadCountPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should fail to list with an incorrect sort', async function () {
|
||||
await checkBadSortPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should succeed to list with the correct params', async function () {
|
||||
await server.runners.list({ start: 0, count: 5, sort: '-createdAt' })
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('Runner jobs by admin', function () {
|
||||
|
||||
describe('Cancel', function () {
|
||||
let jobUUID: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await server.videos.quickUpload({ name: 'video' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
jobUUID = availableJobs[0].uuid
|
||||
})
|
||||
|
||||
it('Should fail without oauth token', async function () {
|
||||
await server.runnerJobs.cancelByAdmin({ token: null, jobUUID, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 })
|
||||
})
|
||||
|
||||
it('Should fail without admin rights', async function () {
|
||||
await server.runnerJobs.cancelByAdmin({ token: userToken, jobUUID, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
|
||||
})
|
||||
|
||||
it('Should fail with a bad job uuid', async function () {
|
||||
await server.runnerJobs.cancelByAdmin({ jobUUID: 'hello', expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown job uuid', async function () {
|
||||
const jobUUID = badUUID
|
||||
await server.runnerJobs.cancelByAdmin({ jobUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should succeed with the correct params', async function () {
|
||||
await server.runnerJobs.cancelByAdmin({ jobUUID })
|
||||
})
|
||||
})
|
||||
|
||||
describe('List', function () {
|
||||
const path = '/api/v1/runners/jobs'
|
||||
|
||||
it('Should fail without oauth token', async function () {
|
||||
await server.runnerJobs.list({ token: null, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 })
|
||||
})
|
||||
|
||||
it('Should fail without admin rights', async function () {
|
||||
await server.runnerJobs.list({ token: userToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
|
||||
})
|
||||
|
||||
it('Should fail to list with a bad start pagination', async function () {
|
||||
await checkBadStartPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should fail to list with a bad count pagination', async function () {
|
||||
await checkBadCountPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should fail to list with an incorrect sort', async function () {
|
||||
await checkBadSortPagination(server.url, path, server.accessToken)
|
||||
})
|
||||
|
||||
it('Should succeed to list with the correct params', async function () {
|
||||
await server.runnerJobs.list({ start: 0, count: 5, sort: '-createdAt' })
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('Runner jobs by runners', function () {
|
||||
let jobUUID: string
|
||||
let jobToken: string
|
||||
let videoUUID: string
|
||||
|
||||
let jobUUID2: string
|
||||
let jobToken2: string
|
||||
|
||||
let videoUUID2: string
|
||||
|
||||
let pendingUUID: string
|
||||
|
||||
let liveAcceptedJob: RunnerJob & { jobToken: string }
|
||||
|
||||
async function fetchFiles (options: {
|
||||
jobUUID: string
|
||||
videoUUID: string
|
||||
runnerToken: string
|
||||
jobToken: string
|
||||
expectedStatus: HttpStatusCode
|
||||
}) {
|
||||
const { jobUUID, expectedStatus, videoUUID, runnerToken, jobToken } = options
|
||||
|
||||
const basePath = '/api/v1/runners/jobs/' + jobUUID + '/files/videos/' + videoUUID
|
||||
const paths = [ `${basePath}/max-quality`, `${basePath}/previews/max-quality` ]
|
||||
|
||||
for (const path of paths) {
|
||||
await makePostBodyRequest({ url: server.url, path, fields: { runnerToken, jobToken }, expectedStatus })
|
||||
}
|
||||
}
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
{
|
||||
await server.runnerJobs.cancelAllJobs({ state: RunnerJobState.PENDING })
|
||||
}
|
||||
|
||||
{
|
||||
const { uuid } = await server.videos.quickUpload({ name: 'video' })
|
||||
videoUUID = uuid
|
||||
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken })
|
||||
jobUUID = job.uuid
|
||||
jobToken = job.jobToken
|
||||
}
|
||||
|
||||
{
|
||||
const { uuid } = await server.videos.quickUpload({ name: 'video' })
|
||||
videoUUID2 = uuid
|
||||
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken: runnerToken2 })
|
||||
jobUUID2 = job.uuid
|
||||
jobToken2 = job.jobToken
|
||||
}
|
||||
|
||||
{
|
||||
await server.videos.quickUpload({ name: 'video' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
pendingUUID = availableJobs[0].uuid
|
||||
}
|
||||
|
||||
{
|
||||
await server.config.enableLive({
|
||||
allowReplay: false,
|
||||
resolutions: 'max',
|
||||
transcoding: true
|
||||
})
|
||||
|
||||
const { live } = await server.live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC })
|
||||
|
||||
const ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey })
|
||||
await waitJobs([ server ])
|
||||
|
||||
await server.runnerJobs.requestLiveJob(runnerToken)
|
||||
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'live-rtmp-hls-transcoding' })
|
||||
liveAcceptedJob = job
|
||||
|
||||
await stopFfmpeg(ffmpegCommand)
|
||||
}
|
||||
})
|
||||
|
||||
describe('Common runner tokens validations', function () {
|
||||
|
||||
async function testEndpoints (options: {
|
||||
jobUUID: string
|
||||
runnerToken: string
|
||||
jobToken: string
|
||||
expectedStatus: HttpStatusCode
|
||||
}) {
|
||||
await fetchFiles({ ...options, videoUUID })
|
||||
|
||||
await server.runnerJobs.abort({ ...options, reason: 'reason' })
|
||||
await server.runnerJobs.update({ ...options })
|
||||
await server.runnerJobs.error({ ...options, message: 'message' })
|
||||
await server.runnerJobs.success({ ...options, payload: { videoFile: 'video_short.mp4' } })
|
||||
}
|
||||
|
||||
it('Should fail with an invalid job uuid', async function () {
|
||||
await testEndpoints({ jobUUID: 'a', runnerToken, jobToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown job uuid', async function () {
|
||||
const jobUUID = badUUID
|
||||
await testEndpoints({ jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should fail with an invalid runner token', async function () {
|
||||
await testEndpoints({ jobUUID, runnerToken: '', jobToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown runner token', async function () {
|
||||
const runnerToken = badUUID
|
||||
await testEndpoints({ jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should fail with an invalid job token job uuid', async function () {
|
||||
await testEndpoints({ jobUUID, runnerToken, jobToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown job token job uuid', async function () {
|
||||
const jobToken = badUUID
|
||||
await testEndpoints({ jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should fail with a runner token not associated to this job', async function () {
|
||||
await testEndpoints({ jobUUID, runnerToken: runnerToken2, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should fail with a job uuid not associated to the job token', async function () {
|
||||
await testEndpoints({ jobUUID: jobUUID2, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
await testEndpoints({ jobUUID, runnerToken, jobToken: jobToken2, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Unregister', function () {
|
||||
|
||||
it('Should fail without a runner token', async function () {
|
||||
await server.runners.unregister({ runnerToken: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with a bad a runner token', async function () {
|
||||
await server.runners.unregister({ runnerToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown runner token', async function () {
|
||||
await server.runners.unregister({ runnerToken: badUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Request', function () {
|
||||
|
||||
it('Should fail without a runner token', async function () {
|
||||
await server.runnerJobs.request({ runnerToken: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with a bad a runner token', async function () {
|
||||
await server.runnerJobs.request({ runnerToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown runner token', async function () {
|
||||
await server.runnerJobs.request({ runnerToken: badUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Accept', function () {
|
||||
|
||||
it('Should fail with a bad a job uuid', async function () {
|
||||
await server.runnerJobs.accept({ jobUUID: '', runnerToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown job uuid', async function () {
|
||||
await server.runnerJobs.accept({ jobUUID: badUUID, runnerToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should fail with a job not in pending state', async function () {
|
||||
await server.runnerJobs.accept({ jobUUID: completedJobUUID, runnerToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
await server.runnerJobs.accept({ jobUUID: cancelledJobUUID, runnerToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail without a runner token', async function () {
|
||||
await server.runnerJobs.accept({ jobUUID: pendingUUID, runnerToken: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with a bad a runner token', async function () {
|
||||
await server.runnerJobs.accept({ jobUUID: pendingUUID, runnerToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown runner token', async function () {
|
||||
await server.runnerJobs.accept({ jobUUID: pendingUUID, runnerToken: badUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Abort', function () {
|
||||
|
||||
it('Should fail without a reason', async function () {
|
||||
await server.runnerJobs.abort({ jobUUID, jobToken, runnerToken, reason: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with a bad reason', async function () {
|
||||
const reason = 'reason'.repeat(5000)
|
||||
await server.runnerJobs.abort({ jobUUID, jobToken, runnerToken, reason, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with a job not in processing state', async function () {
|
||||
await server.runnerJobs.abort({
|
||||
jobUUID: completedJobUUID,
|
||||
jobToken: completedJobToken,
|
||||
runnerToken,
|
||||
reason: 'reason',
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Update', function () {
|
||||
|
||||
describe('Common', function () {
|
||||
|
||||
it('Should fail with an invalid progress', async function () {
|
||||
await server.runnerJobs.update({ jobUUID, jobToken, runnerToken, progress: 101, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with a job not in processing state', async function () {
|
||||
await server.runnerJobs.update({
|
||||
jobUUID: completedJobUUID,
|
||||
jobToken: completedJobToken,
|
||||
runnerToken,
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Live RTMP to HLS', function () {
|
||||
const base: RunnerJobUpdatePayload = {
|
||||
masterPlaylistFile: 'live/master.m3u8',
|
||||
resolutionPlaylistFilename: '0.m3u8',
|
||||
resolutionPlaylistFile: 'live/1.m3u8',
|
||||
type: 'add-chunk',
|
||||
videoChunkFile: 'live/1-000069.ts',
|
||||
videoChunkFilename: '1-000068.ts'
|
||||
}
|
||||
|
||||
function testUpdate (payload: RunnerJobUpdatePayload) {
|
||||
return server.runnerJobs.update({
|
||||
jobUUID: liveAcceptedJob.uuid,
|
||||
jobToken: liveAcceptedJob.jobToken,
|
||||
payload,
|
||||
runnerToken,
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
}
|
||||
|
||||
it('Should fail with an invalid resolutionPlaylistFilename', async function () {
|
||||
await testUpdate({ ...base, resolutionPlaylistFilename: undefined })
|
||||
await testUpdate({ ...base, resolutionPlaylistFilename: 'coucou/hello' })
|
||||
await testUpdate({ ...base, resolutionPlaylistFilename: 'hello' })
|
||||
})
|
||||
|
||||
it('Should fail with an invalid videoChunkFilename', async function () {
|
||||
await testUpdate({ ...base, resolutionPlaylistFilename: undefined })
|
||||
await testUpdate({ ...base, resolutionPlaylistFilename: 'coucou/hello' })
|
||||
await testUpdate({ ...base, resolutionPlaylistFilename: 'hello' })
|
||||
})
|
||||
|
||||
it('Should fail with an invalid type', async function () {
|
||||
await testUpdate({ ...base, type: undefined })
|
||||
await testUpdate({ ...base, type: 'toto' as any })
|
||||
})
|
||||
|
||||
it('Should succeed with the correct params', async function () {
|
||||
await server.runnerJobs.update({
|
||||
jobUUID: liveAcceptedJob.uuid,
|
||||
jobToken: liveAcceptedJob.jobToken,
|
||||
payload: base,
|
||||
runnerToken
|
||||
})
|
||||
|
||||
await server.runnerJobs.update({
|
||||
jobUUID: liveAcceptedJob.uuid,
|
||||
jobToken: liveAcceptedJob.jobToken,
|
||||
payload: { ...base, masterPlaylistFile: undefined },
|
||||
runnerToken
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error', function () {
|
||||
|
||||
it('Should fail with a missing error message', async function () {
|
||||
await server.runnerJobs.error({ jobUUID, jobToken, runnerToken, message: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an invalid error messgae', async function () {
|
||||
const message = 'a'.repeat(6000)
|
||||
await server.runnerJobs.error({ jobUUID, jobToken, runnerToken, message, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with a job not in processing state', async function () {
|
||||
await server.runnerJobs.error({
|
||||
jobUUID: completedJobUUID,
|
||||
jobToken: completedJobToken,
|
||||
message: 'my message',
|
||||
runnerToken,
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Success', function () {
|
||||
let vodJobUUID: string
|
||||
let vodJobToken: string
|
||||
|
||||
describe('Common', function () {
|
||||
|
||||
it('Should fail with a job not in processing state', async function () {
|
||||
await server.runnerJobs.success({
|
||||
jobUUID: completedJobUUID,
|
||||
jobToken: completedJobToken,
|
||||
payload: { videoFile: 'video_short.mp4' },
|
||||
runnerToken,
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('VOD', function () {
|
||||
|
||||
it('Should fail with an invalid vod web video payload', async function () {
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'vod-web-video-transcoding' })
|
||||
|
||||
await server.runnerJobs.success({
|
||||
jobUUID: job.uuid,
|
||||
jobToken: job.jobToken,
|
||||
payload: { hello: 'video_short.mp4' } as any,
|
||||
runnerToken,
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
|
||||
vodJobUUID = job.uuid
|
||||
vodJobToken = job.jobToken
|
||||
})
|
||||
|
||||
it('Should fail with an invalid vod hls payload', async function () {
|
||||
// To create HLS jobs
|
||||
const payload: RunnerJobSuccessPayload = { videoFile: 'video_short.mp4' }
|
||||
await server.runnerJobs.success({ runnerToken, jobUUID: vodJobUUID, jobToken: vodJobToken, payload })
|
||||
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'vod-hls-transcoding' })
|
||||
|
||||
await server.runnerJobs.success({
|
||||
jobUUID: job.uuid,
|
||||
jobToken: job.jobToken,
|
||||
payload: { videoFile: 'video_short.mp4' } as any,
|
||||
runnerToken,
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
})
|
||||
|
||||
it('Should fail with an invalid vod audio merge payload', async function () {
|
||||
const attributes = { name: 'audio_with_preview', previewfile: 'preview.jpg', fixture: 'sample.ogg' }
|
||||
await server.videos.upload({ attributes, mode: 'legacy' })
|
||||
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'vod-audio-merge-transcoding' })
|
||||
|
||||
await server.runnerJobs.success({
|
||||
jobUUID: job.uuid,
|
||||
jobToken: job.jobToken,
|
||||
payload: { hello: 'video_short.mp4' } as any,
|
||||
runnerToken,
|
||||
expectedStatus: HttpStatusCode.BAD_REQUEST_400
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Job files', function () {
|
||||
|
||||
describe('Video files', function () {
|
||||
|
||||
it('Should fail with an invalid video id', async function () {
|
||||
await fetchFiles({ videoUUID: 'a', jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should fail with an unknown video id', async function () {
|
||||
const videoUUID = '910ec12a-d9e6-458b-a274-0abb655f9464'
|
||||
await fetchFiles({ videoUUID, jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should fail with a video id not associated to this job', async function () {
|
||||
await fetchFiles({ videoUUID: videoUUID2, jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
|
||||
})
|
||||
|
||||
it('Should succeed with the correct params', async function () {
|
||||
await fetchFiles({ videoUUID, jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.OK_200 })
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
|
@ -278,7 +278,7 @@ describe('Test video blacklist API validators', function () {
|
|||
})
|
||||
|
||||
it('Should fail with an invalid type', async function () {
|
||||
await servers[0].blacklist.list({ type: 0, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
await servers[0].blacklist.list({ type: 0 as any, expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
})
|
||||
|
||||
it('Should succeed with the correct parameters', async function () {
|
||||
|
|
|
@ -239,7 +239,7 @@ describe('Test video playlists API validator', function () {
|
|||
})
|
||||
|
||||
it('Should fail with an incorrect privacy', async function () {
|
||||
const params = getBase({ privacy: 45 })
|
||||
const params = getBase({ privacy: 45 as any })
|
||||
|
||||
await command.create(params)
|
||||
await command.update(getUpdate(params, playlist.shortUUID))
|
||||
|
|
|
@ -421,9 +421,9 @@ describe('Test videos API validator', function () {
|
|||
const error = body as unknown as PeerTubeProblemDocument
|
||||
|
||||
if (mode === 'legacy') {
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/uploadLegacy')
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/uploadLegacy')
|
||||
} else {
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/uploadResumableInit')
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/uploadResumableInit')
|
||||
}
|
||||
|
||||
expect(error.type).to.equal('about:blank')
|
||||
|
@ -680,7 +680,7 @@ describe('Test videos API validator', function () {
|
|||
const res = await makePutBodyRequest({ url: server.url, path: path + video.shortUUID, token: server.accessToken, fields })
|
||||
const error = res.body as PeerTubeProblemDocument
|
||||
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/putVideo')
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/putVideo')
|
||||
|
||||
expect(error.type).to.equal('about:blank')
|
||||
expect(error.title).to.equal('Bad Request')
|
||||
|
@ -729,7 +729,7 @@ describe('Test videos API validator', function () {
|
|||
const body = await server.videos.get({ id: 'hi', expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
const error = body as unknown as PeerTubeProblemDocument
|
||||
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/getVideo')
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/getVideo')
|
||||
|
||||
expect(error.type).to.equal('about:blank')
|
||||
expect(error.title).to.equal('Bad Request')
|
||||
|
@ -835,7 +835,7 @@ describe('Test videos API validator', function () {
|
|||
const body = await server.videos.remove({ id: 'hello', expectedStatus: HttpStatusCode.BAD_REQUEST_400 })
|
||||
const error = body as PeerTubeProblemDocument
|
||||
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/delVideo')
|
||||
expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/delVideo')
|
||||
|
||||
expect(error.type).to.equal('about:blank')
|
||||
expect(error.title).to.equal('Bad Request')
|
||||
|
|
|
@ -5,6 +5,7 @@ import './moderation'
|
|||
import './object-storage'
|
||||
import './notifications'
|
||||
import './redundancy'
|
||||
import './runners'
|
||||
import './search'
|
||||
import './server'
|
||||
import './transcoding'
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
import { expect } from 'chai'
|
||||
import { basename, join } from 'path'
|
||||
import { ffprobePromise, getVideoStream } from '@server/helpers/ffmpeg'
|
||||
import { testImage, testVideoResolutions } from '@server/tests/shared'
|
||||
import { SQLCommand, testImage, testLiveVideoResolutions } from '@server/tests/shared'
|
||||
import { getAllFiles, wait } from '@shared/core-utils'
|
||||
import { ffprobePromise, getVideoStream } from '@shared/ffmpeg'
|
||||
import {
|
||||
HttpStatusCode,
|
||||
LiveVideo,
|
||||
|
@ -365,6 +365,7 @@ describe('Test live', function () {
|
|||
|
||||
describe('Live transcoding', function () {
|
||||
let liveVideoId: string
|
||||
let sqlCommandServer1: SQLCommand
|
||||
|
||||
async function createLiveWrapper (saveReplay: boolean) {
|
||||
const liveAttributes = {
|
||||
|
@ -407,6 +408,8 @@ describe('Test live', function () {
|
|||
|
||||
before(async function () {
|
||||
await updateConf([])
|
||||
|
||||
sqlCommandServer1 = new SQLCommand(servers[0])
|
||||
})
|
||||
|
||||
it('Should enable transcoding without additional resolutions', async function () {
|
||||
|
@ -418,8 +421,9 @@ describe('Test live', function () {
|
|||
await waitUntilLivePublishedOnAllServers(servers, liveVideoId)
|
||||
await waitJobs(servers)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId,
|
||||
resolutions: [ 720 ],
|
||||
|
@ -453,8 +457,9 @@ describe('Test live', function () {
|
|||
await waitUntilLivePublishedOnAllServers(servers, liveVideoId)
|
||||
await waitJobs(servers)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId,
|
||||
resolutions: resolutions.concat([ 720 ]),
|
||||
|
@ -505,8 +510,9 @@ describe('Test live', function () {
|
|||
await waitUntilLivePublishedOnAllServers(servers, liveVideoId)
|
||||
await waitJobs(servers)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId,
|
||||
resolutions,
|
||||
|
@ -601,8 +607,9 @@ describe('Test live', function () {
|
|||
await waitUntilLivePublishedOnAllServers(servers, liveVideoId)
|
||||
await waitJobs(servers)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId,
|
||||
resolutions,
|
||||
|
@ -637,8 +644,9 @@ describe('Test live', function () {
|
|||
await waitUntilLivePublishedOnAllServers(servers, liveVideoId)
|
||||
await waitJobs(servers)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId,
|
||||
resolutions: [ 720 ],
|
||||
|
@ -661,6 +669,10 @@ describe('Test live', function () {
|
|||
|
||||
expect(hlsFiles[0].resolution.id).to.equal(720)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await sqlCommandServer1.cleanup()
|
||||
})
|
||||
})
|
||||
|
||||
describe('After a server restart', function () {
|
||||
|
|
|
@ -7,7 +7,8 @@ import {
|
|||
checkNewPluginVersion,
|
||||
MockJoinPeerTubeVersions,
|
||||
MockSmtpServer,
|
||||
prepareNotificationsTest
|
||||
prepareNotificationsTest,
|
||||
SQLCommand
|
||||
} from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { PluginType, UserNotification, UserNotificationType } from '@shared/models'
|
||||
|
@ -15,6 +16,7 @@ import { cleanupTests, PeerTubeServer } from '@shared/server-commands'
|
|||
|
||||
describe('Test admin notifications', function () {
|
||||
let server: PeerTubeServer
|
||||
let sqlCommand: SQLCommand
|
||||
let userNotifications: UserNotification[] = []
|
||||
let adminNotifications: UserNotification[] = []
|
||||
let emails: object[] = []
|
||||
|
@ -58,6 +60,8 @@ describe('Test admin notifications', function () {
|
|||
|
||||
await server.plugins.install({ npmName: 'peertube-plugin-hello-world' })
|
||||
await server.plugins.install({ npmName: 'peertube-theme-background-red' })
|
||||
|
||||
sqlCommand = new SQLCommand(server)
|
||||
})
|
||||
|
||||
describe('Latest PeerTube version notification', function () {
|
||||
|
@ -116,8 +120,8 @@ describe('Test admin notifications', function () {
|
|||
it('Should send a notification to admins on new plugin version', async function () {
|
||||
this.timeout(30000)
|
||||
|
||||
await server.sql.setPluginVersion('hello-world', '0.0.1')
|
||||
await server.sql.setPluginLatestVersion('hello-world', '0.0.1')
|
||||
await sqlCommand.setPluginVersion('hello-world', '0.0.1')
|
||||
await sqlCommand.setPluginLatestVersion('hello-world', '0.0.1')
|
||||
await wait(6000)
|
||||
|
||||
await checkNewPluginVersion({ ...baseParams, pluginType: PluginType.PLUGIN, pluginName: 'hello-world', checkType: 'presence' })
|
||||
|
@ -138,8 +142,8 @@ describe('Test admin notifications', function () {
|
|||
it('Should send a new notification after a new plugin release', async function () {
|
||||
this.timeout(30000)
|
||||
|
||||
await server.sql.setPluginVersion('hello-world', '0.0.1')
|
||||
await server.sql.setPluginLatestVersion('hello-world', '0.0.1')
|
||||
await sqlCommand.setPluginVersion('hello-world', '0.0.1')
|
||||
await sqlCommand.setPluginLatestVersion('hello-world', '0.0.1')
|
||||
await wait(6000)
|
||||
|
||||
expect(adminNotifications.filter(n => n.type === UserNotificationType.NEW_PEERTUBE_VERSION)).to.have.lengthOf(2)
|
||||
|
@ -149,6 +153,7 @@ describe('Test admin notifications', function () {
|
|||
after(async function () {
|
||||
MockSmtpServer.Instance.kill()
|
||||
|
||||
await sqlCommand.cleanup()
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { expectStartWith, MockObjectStorageProxy, testVideoResolutions } from '@server/tests/shared'
|
||||
import { expectStartWith, MockObjectStorageProxy, SQLCommand, testLiveVideoResolutions } from '@server/tests/shared'
|
||||
import { areMockObjectStorageTestsDisabled } from '@shared/core-utils'
|
||||
import { HttpStatusCode, LiveVideoCreate, VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
|
@ -79,6 +79,7 @@ describe('Object storage for lives', function () {
|
|||
if (areMockObjectStorageTestsDisabled()) return
|
||||
|
||||
let servers: PeerTubeServer[]
|
||||
let sqlCommandServer1: SQLCommand
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
@ -92,6 +93,8 @@ describe('Object storage for lives', function () {
|
|||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
await servers[0].config.enableTranscoding()
|
||||
|
||||
sqlCommandServer1 = new SQLCommand(servers[0])
|
||||
})
|
||||
|
||||
describe('Without live transcoding', function () {
|
||||
|
@ -109,8 +112,9 @@ describe('Object storage for lives', function () {
|
|||
const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID })
|
||||
await waitUntilLivePublishedOnAllServers(servers, videoUUID)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId: videoUUID,
|
||||
resolutions: [ 720 ],
|
||||
|
@ -155,8 +159,9 @@ describe('Object storage for lives', function () {
|
|||
const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDNonPermanent })
|
||||
await waitUntilLivePublishedOnAllServers(servers, videoUUIDNonPermanent)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId: videoUUIDNonPermanent,
|
||||
resolutions,
|
||||
|
@ -194,8 +199,9 @@ describe('Object storage for lives', function () {
|
|||
const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDPermanent })
|
||||
await waitUntilLivePublishedOnAllServers(servers, videoUUIDPermanent)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId: videoUUIDPermanent,
|
||||
resolutions,
|
||||
|
@ -266,8 +272,9 @@ describe('Object storage for lives', function () {
|
|||
const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDPermanent })
|
||||
await waitUntilLivePublishedOnAllServers(servers, videoUUIDPermanent)
|
||||
|
||||
await testVideoResolutions({
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId: videoUUIDPermanent,
|
||||
resolutions: [ 720 ],
|
||||
|
@ -281,6 +288,8 @@ describe('Object storage for lives', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
await sqlCommandServer1.cleanup()
|
||||
|
||||
await killallServers(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import { expect } from 'chai'
|
||||
import { basename } from 'path'
|
||||
import { checkVideoFileTokenReinjection, expectStartWith } from '@server/tests/shared'
|
||||
import { checkVideoFileTokenReinjection, expectStartWith, SQLCommand } from '@server/tests/shared'
|
||||
import { areScalewayObjectStorageTestsDisabled, getAllFiles, getHLS } from '@shared/core-utils'
|
||||
import { HttpStatusCode, LiveVideo, VideoDetails, VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
|
@ -30,6 +30,7 @@ describe('Object storage for video static file privacy', function () {
|
|||
if (areScalewayObjectStorageTestsDisabled()) return
|
||||
|
||||
let server: PeerTubeServer
|
||||
let sqlCommand: SQLCommand
|
||||
let userToken: string
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
@ -44,7 +45,7 @@ describe('Object storage for video static file privacy', function () {
|
|||
}
|
||||
|
||||
for (const file of getAllFiles(video)) {
|
||||
const internalFileUrl = await server.sql.getInternalFileUrl(file.id)
|
||||
const internalFileUrl = await sqlCommand.getInternalFileUrl(file.id)
|
||||
expectStartWith(internalFileUrl, ObjectStorageCommand.getScalewayBaseUrl())
|
||||
await makeRawRequest({ url: internalFileUrl, token: server.accessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
|
||||
}
|
||||
|
@ -99,6 +100,8 @@ describe('Object storage for video static file privacy', function () {
|
|||
await server.config.enableMinimumTranscoding()
|
||||
|
||||
userToken = await server.users.generateUserAndToken('user1')
|
||||
|
||||
sqlCommand = new SQLCommand(server)
|
||||
})
|
||||
|
||||
describe('VOD', function () {
|
||||
|
@ -439,6 +442,7 @@ describe('Object storage for video static file privacy', function () {
|
|||
await server.servers.waitUntilLog('Removed files of video ' + v.url)
|
||||
}
|
||||
|
||||
await sqlCommand.cleanup()
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
||||
|
|
|
@ -6,12 +6,15 @@ import { stat } from 'fs-extra'
|
|||
import { merge } from 'lodash'
|
||||
import {
|
||||
checkTmpIsEmpty,
|
||||
checkWebTorrentWorks,
|
||||
expectLogDoesNotContain,
|
||||
expectStartWith,
|
||||
generateHighBitrateVideo,
|
||||
MockObjectStorageProxy
|
||||
MockObjectStorageProxy,
|
||||
SQLCommand
|
||||
} from '@server/tests/shared'
|
||||
import { areMockObjectStorageTestsDisabled } from '@shared/core-utils'
|
||||
import { sha1 } from '@shared/extra-utils'
|
||||
import { HttpStatusCode, VideoDetails } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
|
@ -23,14 +26,13 @@ import {
|
|||
ObjectStorageCommand,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs,
|
||||
webtorrentAdd
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
import { sha1 } from '@shared/extra-utils'
|
||||
|
||||
async function checkFiles (options: {
|
||||
server: PeerTubeServer
|
||||
originServer: PeerTubeServer
|
||||
originSQLCommand: SQLCommand
|
||||
|
||||
video: VideoDetails
|
||||
|
||||
|
@ -45,6 +47,7 @@ async function checkFiles (options: {
|
|||
const {
|
||||
server,
|
||||
originServer,
|
||||
originSQLCommand,
|
||||
video,
|
||||
playlistBucket,
|
||||
webtorrentBucket,
|
||||
|
@ -104,7 +107,7 @@ async function checkFiles (options: {
|
|||
|
||||
if (originServer.internalServerNumber === server.internalServerNumber) {
|
||||
const infohash = sha1(`${2 + hls.playlistUrl}+V${i}`)
|
||||
const dbInfohashes = await originServer.sql.getPlaylistInfohash(hls.id)
|
||||
const dbInfohashes = await originSQLCommand.getPlaylistInfohash(hls.id)
|
||||
|
||||
expect(dbInfohashes).to.include(infohash)
|
||||
}
|
||||
|
@ -114,11 +117,7 @@ async function checkFiles (options: {
|
|||
}
|
||||
|
||||
for (const file of allFiles) {
|
||||
const torrent = await webtorrentAdd(file.magnetUri, true)
|
||||
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
await checkWebTorrentWorks(file.magnetUri)
|
||||
|
||||
const res = await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
expect(res.body).to.have.length.above(100)
|
||||
|
@ -145,6 +144,7 @@ function runTestSuite (options: {
|
|||
let baseMockUrl: string
|
||||
|
||||
let servers: PeerTubeServer[]
|
||||
let sqlCommands: SQLCommand[]
|
||||
|
||||
let keptUrls: string[] = []
|
||||
|
||||
|
@ -202,6 +202,8 @@ function runTestSuite (options: {
|
|||
const files = await server.videos.listFiles({ id: uuid })
|
||||
keptUrls = keptUrls.concat(files.map(f => f.fileUrl))
|
||||
}
|
||||
|
||||
sqlCommands = servers.map(s => new SQLCommand(s))
|
||||
})
|
||||
|
||||
it('Should upload a video and move it to the object storage without transcoding', async function () {
|
||||
|
@ -214,7 +216,7 @@ function runTestSuite (options: {
|
|||
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: uuid })
|
||||
const files = await checkFiles({ ...options, server, originServer: servers[0], video, baseMockUrl })
|
||||
const files = await checkFiles({ ...options, server, originServer: servers[0], originSQLCommand: sqlCommands[0], video, baseMockUrl })
|
||||
|
||||
deletedUrls = deletedUrls.concat(files)
|
||||
}
|
||||
|
@ -230,7 +232,7 @@ function runTestSuite (options: {
|
|||
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: uuid })
|
||||
const files = await checkFiles({ ...options, server, originServer: servers[0], video, baseMockUrl })
|
||||
const files = await checkFiles({ ...options, server, originServer: servers[0], originSQLCommand: sqlCommands[0], video, baseMockUrl })
|
||||
|
||||
deletedUrls = deletedUrls.concat(files)
|
||||
}
|
||||
|
@ -274,6 +276,10 @@ function runTestSuite (options: {
|
|||
after(async function () {
|
||||
await mockObjectStorageProxy.terminate()
|
||||
|
||||
for (const sqlCommand of sqlCommands) {
|
||||
await sqlCommand.cleanup()
|
||||
}
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
export * from './runner-common'
|
||||
export * from './runner-live-transcoding'
|
||||
export * from './runner-socket'
|
||||
export * from './runner-vod-transcoding'
|
|
@ -0,0 +1,662 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode, Runner, RunnerJob, RunnerJobAdmin, RunnerJobState, RunnerRegistrationToken } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createSingleServer,
|
||||
makePostBodyRequest,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test runner common actions', function () {
|
||||
let server: PeerTubeServer
|
||||
let registrationToken: string
|
||||
let runnerToken: string
|
||||
let jobMaxPriority: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
server = await createSingleServer(1, {
|
||||
remote_runners: {
|
||||
stalled_jobs: {
|
||||
vod: '5 seconds'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await setAccessTokensToServers([ server ])
|
||||
await setDefaultVideoChannel([ server ])
|
||||
|
||||
await server.config.enableTranscoding(true, true)
|
||||
await server.config.enableRemoteTranscoding()
|
||||
})
|
||||
|
||||
describe('Managing runner registration tokens', function () {
|
||||
let base: RunnerRegistrationToken[]
|
||||
let registrationTokenToDelete: RunnerRegistrationToken
|
||||
|
||||
it('Should have a default registration token', async function () {
|
||||
const { total, data } = await server.runnerRegistrationTokens.list()
|
||||
|
||||
expect(total).to.equal(1)
|
||||
expect(data).to.have.lengthOf(1)
|
||||
|
||||
const token = data[0]
|
||||
expect(token.id).to.exist
|
||||
expect(token.createdAt).to.exist
|
||||
expect(token.updatedAt).to.exist
|
||||
expect(token.registeredRunnersCount).to.equal(0)
|
||||
expect(token.registrationToken).to.exist
|
||||
})
|
||||
|
||||
it('Should create other registration tokens', async function () {
|
||||
await server.runnerRegistrationTokens.generate()
|
||||
await server.runnerRegistrationTokens.generate()
|
||||
|
||||
const { total, data } = await server.runnerRegistrationTokens.list()
|
||||
expect(total).to.equal(3)
|
||||
expect(data).to.have.lengthOf(3)
|
||||
})
|
||||
|
||||
it('Should list registration tokens', async function () {
|
||||
{
|
||||
const { total, data } = await server.runnerRegistrationTokens.list({ sort: 'createdAt' })
|
||||
expect(total).to.equal(3)
|
||||
expect(data).to.have.lengthOf(3)
|
||||
expect(new Date(data[0].createdAt)).to.be.below(new Date(data[1].createdAt))
|
||||
expect(new Date(data[1].createdAt)).to.be.below(new Date(data[2].createdAt))
|
||||
|
||||
base = data
|
||||
|
||||
registrationTokenToDelete = data[0]
|
||||
registrationToken = data[1].registrationToken
|
||||
}
|
||||
|
||||
{
|
||||
const { total, data } = await server.runnerRegistrationTokens.list({ sort: '-createdAt', start: 2, count: 1 })
|
||||
expect(total).to.equal(3)
|
||||
expect(data).to.have.lengthOf(1)
|
||||
expect(data[0].registrationToken).to.equal(base[0].registrationToken)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should have appropriate registeredRunnersCount for registration tokens', async function () {
|
||||
await server.runners.register({ name: 'to delete 1', registrationToken: registrationTokenToDelete.registrationToken })
|
||||
await server.runners.register({ name: 'to delete 2', registrationToken: registrationTokenToDelete.registrationToken })
|
||||
|
||||
const { data } = await server.runnerRegistrationTokens.list()
|
||||
|
||||
for (const d of data) {
|
||||
if (d.registrationToken === registrationTokenToDelete.registrationToken) {
|
||||
expect(d.registeredRunnersCount).to.equal(2)
|
||||
} else {
|
||||
expect(d.registeredRunnersCount).to.equal(0)
|
||||
}
|
||||
}
|
||||
|
||||
const { data: runners } = await server.runners.list()
|
||||
expect(runners).to.have.lengthOf(2)
|
||||
})
|
||||
|
||||
it('Should delete a registration token', async function () {
|
||||
await server.runnerRegistrationTokens.delete({ id: registrationTokenToDelete.id })
|
||||
|
||||
const { total, data } = await server.runnerRegistrationTokens.list({ sort: 'createdAt' })
|
||||
expect(total).to.equal(2)
|
||||
expect(data).to.have.lengthOf(2)
|
||||
|
||||
for (const d of data) {
|
||||
expect(d.registeredRunnersCount).to.equal(0)
|
||||
expect(d.registrationToken).to.not.equal(registrationTokenToDelete.registrationToken)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should have removed runners of this registration token', async function () {
|
||||
const { data: runners } = await server.runners.list()
|
||||
expect(runners).to.have.lengthOf(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Managing runners', function () {
|
||||
let toDelete: Runner
|
||||
|
||||
it('Should not have runners available', async function () {
|
||||
const { total, data } = await server.runners.list()
|
||||
|
||||
expect(data).to.have.lengthOf(0)
|
||||
expect(total).to.equal(0)
|
||||
})
|
||||
|
||||
it('Should register runners', async function () {
|
||||
const now = new Date()
|
||||
|
||||
const result = await server.runners.register({
|
||||
name: 'runner 1',
|
||||
description: 'my super runner 1',
|
||||
registrationToken
|
||||
})
|
||||
expect(result.runnerToken).to.exist
|
||||
runnerToken = result.runnerToken
|
||||
|
||||
await server.runners.register({
|
||||
name: 'runner 2',
|
||||
registrationToken
|
||||
})
|
||||
|
||||
const { total, data } = await server.runners.list({ sort: 'createdAt' })
|
||||
expect(total).to.equal(2)
|
||||
expect(data).to.have.lengthOf(2)
|
||||
|
||||
for (const d of data) {
|
||||
expect(d.id).to.exist
|
||||
expect(d.createdAt).to.exist
|
||||
expect(d.updatedAt).to.exist
|
||||
expect(new Date(d.createdAt)).to.be.above(now)
|
||||
expect(new Date(d.updatedAt)).to.be.above(now)
|
||||
expect(new Date(d.lastContact)).to.be.above(now)
|
||||
expect(d.ip).to.exist
|
||||
}
|
||||
|
||||
expect(data[0].name).to.equal('runner 1')
|
||||
expect(data[0].description).to.equal('my super runner 1')
|
||||
|
||||
expect(data[1].name).to.equal('runner 2')
|
||||
expect(data[1].description).to.be.null
|
||||
|
||||
toDelete = data[1]
|
||||
})
|
||||
|
||||
it('Should list runners', async function () {
|
||||
const { total, data } = await server.runners.list({ sort: '-createdAt', start: 1, count: 1 })
|
||||
|
||||
expect(total).to.equal(2)
|
||||
expect(data).to.have.lengthOf(1)
|
||||
expect(data[0].name).to.equal('runner 1')
|
||||
})
|
||||
|
||||
it('Should delete a runner', async function () {
|
||||
await server.runners.delete({ id: toDelete.id })
|
||||
|
||||
const { total, data } = await server.runners.list()
|
||||
|
||||
expect(total).to.equal(1)
|
||||
expect(data).to.have.lengthOf(1)
|
||||
expect(data[0].name).to.equal('runner 1')
|
||||
})
|
||||
|
||||
it('Should unregister a runner', async function () {
|
||||
const registered = await server.runners.autoRegisterRunner()
|
||||
|
||||
{
|
||||
const { total, data } = await server.runners.list()
|
||||
expect(total).to.equal(2)
|
||||
expect(data).to.have.lengthOf(2)
|
||||
}
|
||||
|
||||
await server.runners.unregister({ runnerToken: registered })
|
||||
|
||||
{
|
||||
const { total, data } = await server.runners.list()
|
||||
expect(total).to.equal(1)
|
||||
expect(data).to.have.lengthOf(1)
|
||||
expect(data[0].name).to.equal('runner 1')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Managing runner jobs', function () {
|
||||
let jobUUID: string
|
||||
let jobToken: string
|
||||
let lastRunnerContact: Date
|
||||
let failedJob: RunnerJob
|
||||
|
||||
async function checkMainJobState (
|
||||
mainJobState: RunnerJobState,
|
||||
otherJobStates: RunnerJobState[] = [ RunnerJobState.PENDING, RunnerJobState.WAITING_FOR_PARENT_JOB ]
|
||||
) {
|
||||
const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' })
|
||||
|
||||
for (const job of data) {
|
||||
if (job.uuid === jobUUID) {
|
||||
expect(job.state.id).to.equal(mainJobState)
|
||||
} else {
|
||||
expect(otherJobStates).to.include(job.state.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getMainJob () {
|
||||
return server.runnerJobs.getJob({ uuid: jobUUID })
|
||||
}
|
||||
|
||||
describe('List jobs', function () {
|
||||
|
||||
it('Should not have jobs', async function () {
|
||||
const { total, data } = await server.runnerJobs.list()
|
||||
|
||||
expect(data).to.have.lengthOf(0)
|
||||
expect(total).to.equal(0)
|
||||
})
|
||||
|
||||
it('Should upload a video and have available jobs', async function () {
|
||||
await server.videos.quickUpload({ name: 'to transcode' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { total, data } = await server.runnerJobs.list()
|
||||
|
||||
expect(data).to.have.lengthOf(10)
|
||||
expect(total).to.equal(10)
|
||||
|
||||
for (const job of data) {
|
||||
expect(job.startedAt).to.not.exist
|
||||
expect(job.finishedAt).to.not.exist
|
||||
expect(job.payload).to.exist
|
||||
expect(job.privatePayload).to.exist
|
||||
}
|
||||
|
||||
const hlsJobs = data.filter(d => d.type === 'vod-hls-transcoding')
|
||||
const webVideoJobs = data.filter(d => d.type === 'vod-web-video-transcoding')
|
||||
|
||||
expect(hlsJobs).to.have.lengthOf(5)
|
||||
expect(webVideoJobs).to.have.lengthOf(5)
|
||||
|
||||
const pendingJobs = data.filter(d => d.state.id === RunnerJobState.PENDING)
|
||||
const waitingJobs = data.filter(d => d.state.id === RunnerJobState.WAITING_FOR_PARENT_JOB)
|
||||
|
||||
expect(pendingJobs).to.have.lengthOf(1)
|
||||
expect(waitingJobs).to.have.lengthOf(9)
|
||||
})
|
||||
|
||||
it('Should upload another video and list/sort jobs', async function () {
|
||||
await server.videos.quickUpload({ name: 'to transcode 2' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
{
|
||||
const { total, data } = await server.runnerJobs.list({ start: 0, count: 30 })
|
||||
|
||||
expect(data).to.have.lengthOf(20)
|
||||
expect(total).to.equal(20)
|
||||
|
||||
jobUUID = data[16].uuid
|
||||
}
|
||||
|
||||
{
|
||||
const { total, data } = await server.runnerJobs.list({ start: 3, count: 1, sort: 'createdAt' })
|
||||
expect(total).to.equal(20)
|
||||
|
||||
expect(data).to.have.lengthOf(1)
|
||||
expect(data[0].uuid).to.equal(jobUUID)
|
||||
}
|
||||
|
||||
{
|
||||
let previousPriority = Infinity
|
||||
const { total, data } = await server.runnerJobs.list({ start: 0, count: 100, sort: '-priority' })
|
||||
expect(total).to.equal(20)
|
||||
|
||||
for (const job of data) {
|
||||
expect(job.priority).to.be.at.most(previousPriority)
|
||||
previousPriority = job.priority
|
||||
|
||||
if (job.state.id === RunnerJobState.PENDING) {
|
||||
jobMaxPriority = job.uuid
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should search jobs', async function () {
|
||||
{
|
||||
const { total, data } = await server.runnerJobs.list({ search: jobUUID })
|
||||
|
||||
expect(data).to.have.lengthOf(1)
|
||||
expect(total).to.equal(1)
|
||||
|
||||
expect(data[0].uuid).to.equal(jobUUID)
|
||||
}
|
||||
|
||||
{
|
||||
const { total, data } = await server.runnerJobs.list({ search: 'toto' })
|
||||
|
||||
expect(data).to.have.lengthOf(0)
|
||||
expect(total).to.equal(0)
|
||||
}
|
||||
|
||||
{
|
||||
const { total, data } = await server.runnerJobs.list({ search: 'hls' })
|
||||
|
||||
expect(data).to.not.have.lengthOf(0)
|
||||
expect(total).to.not.equal(0)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Accept/update/abort/process a job', function () {
|
||||
|
||||
it('Should request available jobs', async function () {
|
||||
lastRunnerContact = new Date()
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
|
||||
// Only optimize jobs are available
|
||||
expect(availableJobs).to.have.lengthOf(2)
|
||||
|
||||
for (const job of availableJobs) {
|
||||
expect(job.uuid).to.exist
|
||||
expect(job.payload.input).to.exist
|
||||
expect(job.payload.output).to.exist
|
||||
|
||||
expect((job as RunnerJobAdmin).privatePayload).to.not.exist
|
||||
}
|
||||
|
||||
const hlsJobs = availableJobs.filter(d => d.type === 'vod-hls-transcoding')
|
||||
const webVideoJobs = availableJobs.filter(d => d.type === 'vod-web-video-transcoding')
|
||||
|
||||
expect(hlsJobs).to.have.lengthOf(0)
|
||||
expect(webVideoJobs).to.have.lengthOf(2)
|
||||
|
||||
jobUUID = webVideoJobs[0].uuid
|
||||
})
|
||||
|
||||
it('Should have sorted available jobs by priority', async function () {
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
|
||||
expect(availableJobs[0].uuid).to.equal(jobMaxPriority)
|
||||
})
|
||||
|
||||
it('Should have last runner contact updated', async function () {
|
||||
await wait(1000)
|
||||
|
||||
const { data } = await server.runners.list({ sort: 'createdAt' })
|
||||
expect(new Date(data[0].lastContact)).to.be.above(lastRunnerContact)
|
||||
})
|
||||
|
||||
it('Should accept a job', async function () {
|
||||
const startedAt = new Date()
|
||||
|
||||
const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
const checkProcessingJob = (job: RunnerJob & { jobToken?: string }, fromAccept: boolean) => {
|
||||
expect(job.uuid).to.equal(jobUUID)
|
||||
|
||||
expect(job.type).to.equal('vod-web-video-transcoding')
|
||||
expect(job.state.label).to.equal('Processing')
|
||||
expect(job.state.id).to.equal(RunnerJobState.PROCESSING)
|
||||
|
||||
expect(job.runner).to.exist
|
||||
expect(job.runner.name).to.equal('runner 1')
|
||||
expect(job.runner.description).to.equal('my super runner 1')
|
||||
|
||||
expect(job.progress).to.be.null
|
||||
|
||||
expect(job.startedAt).to.exist
|
||||
expect(new Date(job.startedAt)).to.be.above(startedAt)
|
||||
|
||||
expect(job.finishedAt).to.not.exist
|
||||
|
||||
expect(job.failures).to.equal(0)
|
||||
|
||||
expect(job.payload).to.exist
|
||||
|
||||
if (fromAccept) {
|
||||
expect(job.jobToken).to.exist
|
||||
expect((job as RunnerJobAdmin).privatePayload).to.not.exist
|
||||
} else {
|
||||
expect(job.jobToken).to.not.exist
|
||||
expect((job as RunnerJobAdmin).privatePayload).to.exist
|
||||
}
|
||||
}
|
||||
|
||||
checkProcessingJob(job, true)
|
||||
|
||||
const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' })
|
||||
|
||||
const processingJob = data.find(j => j.uuid === jobUUID)
|
||||
checkProcessingJob(processingJob, false)
|
||||
|
||||
await checkMainJobState(RunnerJobState.PROCESSING)
|
||||
})
|
||||
|
||||
it('Should update a job', async function () {
|
||||
await server.runnerJobs.update({ runnerToken, jobUUID, jobToken, progress: 53 })
|
||||
|
||||
const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' })
|
||||
|
||||
for (const job of data) {
|
||||
if (job.state.id === RunnerJobState.PROCESSING) {
|
||||
expect(job.progress).to.equal(53)
|
||||
} else {
|
||||
expect(job.progress).to.be.null
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should abort a job', async function () {
|
||||
await server.runnerJobs.abort({ runnerToken, jobUUID, jobToken, reason: 'for tests' })
|
||||
|
||||
await checkMainJobState(RunnerJobState.PENDING)
|
||||
|
||||
const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' })
|
||||
for (const job of data) {
|
||||
expect(job.progress).to.be.null
|
||||
}
|
||||
})
|
||||
|
||||
it('Should accept the same job again and post a success', async function () {
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
expect(availableJobs.find(j => j.uuid === jobUUID)).to.exist
|
||||
|
||||
const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
await checkMainJobState(RunnerJobState.PROCESSING)
|
||||
|
||||
const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' })
|
||||
|
||||
for (const job of data) {
|
||||
expect(job.progress).to.be.null
|
||||
}
|
||||
|
||||
const payload = {
|
||||
videoFile: 'video_short.mp4'
|
||||
}
|
||||
|
||||
await server.runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
})
|
||||
|
||||
it('Should not have available jobs anymore', async function () {
|
||||
await checkMainJobState(RunnerJobState.COMPLETED)
|
||||
|
||||
const job = await getMainJob()
|
||||
expect(job.finishedAt).to.exist
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
expect(availableJobs.find(j => j.uuid === jobUUID)).to.not.exist
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error job', function () {
|
||||
|
||||
it('Should accept another job and post an error', async function () {
|
||||
await server.runnerJobs.cancelAllJobs()
|
||||
await server.videos.quickUpload({ name: 'video' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
jobUUID = availableJobs[0].uuid
|
||||
|
||||
const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
await server.runnerJobs.error({ runnerToken, jobUUID, jobToken, message: 'Error' })
|
||||
})
|
||||
|
||||
it('Should have job failures increased', async function () {
|
||||
const job = await getMainJob()
|
||||
expect(job.state.id).to.equal(RunnerJobState.PENDING)
|
||||
expect(job.failures).to.equal(1)
|
||||
expect(job.error).to.be.null
|
||||
expect(job.progress).to.be.null
|
||||
expect(job.finishedAt).to.not.exist
|
||||
})
|
||||
|
||||
it('Should error a job when job attempts is too big', async function () {
|
||||
for (let i = 0; i < 4; i++) {
|
||||
const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
await server.runnerJobs.error({ runnerToken, jobUUID, jobToken, message: 'Error ' + i })
|
||||
}
|
||||
|
||||
const job = await getMainJob()
|
||||
expect(job.failures).to.equal(5)
|
||||
expect(job.state.id).to.equal(RunnerJobState.ERRORED)
|
||||
expect(job.state.label).to.equal('Errored')
|
||||
expect(job.error).to.equal('Error 3')
|
||||
expect(job.progress).to.be.null
|
||||
expect(job.finishedAt).to.exist
|
||||
|
||||
failedJob = job
|
||||
})
|
||||
|
||||
it('Should have failed children jobs too', async function () {
|
||||
const { data } = await server.runnerJobs.list({ count: 50, sort: '-updatedAt' })
|
||||
|
||||
const children = data.filter(j => j.parent?.uuid === failedJob.uuid)
|
||||
expect(children).to.have.lengthOf(9)
|
||||
|
||||
for (const child of children) {
|
||||
expect(child.parent.uuid).to.equal(failedJob.uuid)
|
||||
expect(child.parent.type).to.equal(failedJob.type)
|
||||
expect(child.parent.state.id).to.equal(failedJob.state.id)
|
||||
expect(child.parent.state.label).to.equal(failedJob.state.label)
|
||||
|
||||
expect(child.state.id).to.equal(RunnerJobState.PARENT_ERRORED)
|
||||
expect(child.state.label).to.equal('Parent job failed')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cancel', function () {
|
||||
|
||||
it('Should cancel a pending job', async function () {
|
||||
await server.videos.quickUpload({ name: 'video' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
{
|
||||
const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' })
|
||||
|
||||
const pendingJob = data.find(j => j.state.id === RunnerJobState.PENDING)
|
||||
jobUUID = pendingJob.uuid
|
||||
|
||||
await server.runnerJobs.cancelByAdmin({ jobUUID })
|
||||
}
|
||||
|
||||
{
|
||||
const job = await getMainJob()
|
||||
expect(job.state.id).to.equal(RunnerJobState.CANCELLED)
|
||||
expect(job.state.label).to.equal('Cancelled')
|
||||
}
|
||||
|
||||
{
|
||||
const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' })
|
||||
const children = data.filter(j => j.parent?.uuid === jobUUID)
|
||||
expect(children).to.have.lengthOf(9)
|
||||
|
||||
for (const child of children) {
|
||||
expect(child.state.id).to.equal(RunnerJobState.PARENT_CANCELLED)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should cancel an already accepted job and skip success/error', async function () {
|
||||
await server.videos.quickUpload({ name: 'video' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
jobUUID = availableJobs[0].uuid
|
||||
|
||||
const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
await server.runnerJobs.cancelByAdmin({ jobUUID })
|
||||
|
||||
await server.runnerJobs.abort({ runnerToken, jobUUID, jobToken, reason: 'aborted', expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Stalled jobs', function () {
|
||||
|
||||
it('Should abort stalled jobs', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await server.videos.quickUpload({ name: 'video' })
|
||||
await server.videos.quickUpload({ name: 'video' })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { job: job1 } = await server.runnerJobs.autoAccept({ runnerToken })
|
||||
const { job: stalledJob } = await server.runnerJobs.autoAccept({ runnerToken })
|
||||
|
||||
for (let i = 0; i < 6; i++) {
|
||||
await wait(2000)
|
||||
|
||||
await server.runnerJobs.update({ runnerToken, jobToken: job1.jobToken, jobUUID: job1.uuid })
|
||||
}
|
||||
|
||||
const refreshedJob1 = await server.runnerJobs.getJob({ uuid: job1.uuid })
|
||||
const refreshedStalledJob = await server.runnerJobs.getJob({ uuid: stalledJob.uuid })
|
||||
|
||||
expect(refreshedJob1.state.id).to.equal(RunnerJobState.PROCESSING)
|
||||
expect(refreshedStalledJob.state.id).to.equal(RunnerJobState.PENDING)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Rate limit', function () {
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await server.kill()
|
||||
|
||||
await server.run({
|
||||
rates_limit: {
|
||||
api: {
|
||||
max: 10
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('Should rate limit an unknown runner', async function () {
|
||||
const path = '/api/v1/ping'
|
||||
const fields = { runnerToken: 'toto' }
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
try {
|
||||
await makePostBodyRequest({ url: server.url, path, fields, expectedStatus: HttpStatusCode.OK_200 })
|
||||
} catch {}
|
||||
}
|
||||
|
||||
await makePostBodyRequest({ url: server.url, path, fields, expectedStatus: HttpStatusCode.TOO_MANY_REQUESTS_429 })
|
||||
})
|
||||
|
||||
it('Should not rate limit a registered runner', async function () {
|
||||
const path = '/api/v1/ping'
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
await makePostBodyRequest({ url: server.url, path, fields: { runnerToken }, expectedStatus: HttpStatusCode.OK_200 })
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
|
@ -0,0 +1,330 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { FfmpegCommand } from 'fluent-ffmpeg'
|
||||
import { readFile } from 'fs-extra'
|
||||
import { buildAbsoluteFixturePath, wait } from '@shared/core-utils'
|
||||
import {
|
||||
HttpStatusCode,
|
||||
LiveRTMPHLSTranscodingUpdatePayload,
|
||||
LiveVideo,
|
||||
LiveVideoError,
|
||||
RunnerJob,
|
||||
RunnerJobLiveRTMPHLSTranscodingPayload,
|
||||
Video,
|
||||
VideoPrivacy,
|
||||
VideoState
|
||||
} from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createSingleServer,
|
||||
makeRawRequest,
|
||||
PeerTubeServer,
|
||||
sendRTMPStream,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
stopFfmpeg,
|
||||
testFfmpegStreamError,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test runner live transcoding', function () {
|
||||
let server: PeerTubeServer
|
||||
let runnerToken: string
|
||||
let baseUrl: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
server = await createSingleServer(1)
|
||||
|
||||
await setAccessTokensToServers([ server ])
|
||||
await setDefaultVideoChannel([ server ])
|
||||
|
||||
await server.config.enableRemoteTranscoding()
|
||||
await server.config.enableTranscoding()
|
||||
runnerToken = await server.runners.autoRegisterRunner()
|
||||
|
||||
baseUrl = server.url + '/static/streaming-playlists/hls'
|
||||
})
|
||||
|
||||
describe('Without transcoding enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await server.config.enableLive({
|
||||
allowReplay: false,
|
||||
resolutions: 'min',
|
||||
transcoding: false
|
||||
})
|
||||
})
|
||||
|
||||
it('Should not have available jobs', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { live, video } = await server.live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC })
|
||||
|
||||
const ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey })
|
||||
await server.live.waitUntilPublished({ videoId: video.id })
|
||||
|
||||
await waitJobs([ server ])
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.requestLive({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(0)
|
||||
|
||||
await stopFfmpeg(ffmpegCommand)
|
||||
})
|
||||
})
|
||||
|
||||
describe('With transcoding enabled on classic live', function () {
|
||||
let live: LiveVideo
|
||||
let video: Video
|
||||
let ffmpegCommand: FfmpegCommand
|
||||
let jobUUID: string
|
||||
let acceptedJob: RunnerJob & { jobToken: string }
|
||||
|
||||
async function testPlaylistFile (fixture: string, expected: string) {
|
||||
const text = await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/${fixture}` })
|
||||
expect(await readFile(buildAbsoluteFixturePath(expected), 'utf-8')).to.equal(text)
|
||||
|
||||
}
|
||||
|
||||
async function testTSFile (fixture: string, expected: string) {
|
||||
const { body } = await makeRawRequest({ url: `${baseUrl}/${video.uuid}/${fixture}`, expectedStatus: HttpStatusCode.OK_200 })
|
||||
expect(await readFile(buildAbsoluteFixturePath(expected))).to.deep.equal(body)
|
||||
}
|
||||
|
||||
before(async function () {
|
||||
await server.config.enableLive({
|
||||
allowReplay: true,
|
||||
resolutions: 'max',
|
||||
transcoding: true
|
||||
})
|
||||
})
|
||||
|
||||
it('Should publish a a live and have available jobs', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const data = await server.live.quickCreate({ permanentLive: false, saveReplay: false, privacy: VideoPrivacy.PUBLIC })
|
||||
live = data.live
|
||||
video = data.video
|
||||
|
||||
ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey })
|
||||
await waitJobs([ server ])
|
||||
|
||||
const job = await server.runnerJobs.requestLiveJob(runnerToken)
|
||||
jobUUID = job.uuid
|
||||
|
||||
expect(job.type).to.equal('live-rtmp-hls-transcoding')
|
||||
expect(job.payload.input.rtmpUrl).to.exist
|
||||
|
||||
expect(job.payload.output.toTranscode).to.have.lengthOf(5)
|
||||
|
||||
for (const { resolution, fps } of job.payload.output.toTranscode) {
|
||||
expect([ 720, 480, 360, 240, 144 ]).to.contain(resolution)
|
||||
|
||||
expect(fps).to.be.above(25)
|
||||
expect(fps).to.be.below(70)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should update the live with a new chunk', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { job } = await server.runnerJobs.accept<RunnerJobLiveRTMPHLSTranscodingPayload>({ jobUUID, runnerToken })
|
||||
acceptedJob = job
|
||||
|
||||
{
|
||||
const payload: LiveRTMPHLSTranscodingUpdatePayload = {
|
||||
masterPlaylistFile: 'live/master.m3u8',
|
||||
resolutionPlaylistFile: 'live/0.m3u8',
|
||||
resolutionPlaylistFilename: '0.m3u8',
|
||||
type: 'add-chunk',
|
||||
videoChunkFile: 'live/0-000067.ts',
|
||||
videoChunkFilename: '0-000067.ts'
|
||||
}
|
||||
await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: job.jobToken, payload, progress: 50 })
|
||||
|
||||
const updatedJob = await server.runnerJobs.getJob({ uuid: job.uuid })
|
||||
expect(updatedJob.progress).to.equal(50)
|
||||
}
|
||||
|
||||
{
|
||||
const payload: LiveRTMPHLSTranscodingUpdatePayload = {
|
||||
resolutionPlaylistFile: 'live/1.m3u8',
|
||||
resolutionPlaylistFilename: '1.m3u8',
|
||||
type: 'add-chunk',
|
||||
videoChunkFile: 'live/1-000068.ts',
|
||||
videoChunkFilename: '1-000068.ts'
|
||||
}
|
||||
await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: job.jobToken, payload })
|
||||
}
|
||||
|
||||
await wait(1000)
|
||||
|
||||
await testPlaylistFile('master.m3u8', 'live/master.m3u8')
|
||||
await testPlaylistFile('0.m3u8', 'live/0.m3u8')
|
||||
await testPlaylistFile('1.m3u8', 'live/1.m3u8')
|
||||
|
||||
await testTSFile('0-000067.ts', 'live/0-000067.ts')
|
||||
await testTSFile('1-000068.ts', 'live/1-000068.ts')
|
||||
})
|
||||
|
||||
it('Should replace existing m3u8 on update', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const payload: LiveRTMPHLSTranscodingUpdatePayload = {
|
||||
masterPlaylistFile: 'live/1.m3u8',
|
||||
resolutionPlaylistFilename: '0.m3u8',
|
||||
resolutionPlaylistFile: 'live/1.m3u8',
|
||||
type: 'add-chunk',
|
||||
videoChunkFile: 'live/1-000069.ts',
|
||||
videoChunkFilename: '1-000068.ts'
|
||||
}
|
||||
await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload })
|
||||
await wait(1000)
|
||||
|
||||
await testPlaylistFile('master.m3u8', 'live/1.m3u8')
|
||||
await testPlaylistFile('0.m3u8', 'live/1.m3u8')
|
||||
await testTSFile('1-000068.ts', 'live/1-000069.ts')
|
||||
})
|
||||
|
||||
it('Should update the live with removed chunks', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const payload: LiveRTMPHLSTranscodingUpdatePayload = {
|
||||
resolutionPlaylistFile: 'live/0.m3u8',
|
||||
resolutionPlaylistFilename: '0.m3u8',
|
||||
type: 'remove-chunk',
|
||||
videoChunkFilename: '1-000068.ts'
|
||||
}
|
||||
await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload })
|
||||
|
||||
await wait(1000)
|
||||
|
||||
await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/master.m3u8` })
|
||||
await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/0.m3u8` })
|
||||
await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/1.m3u8` })
|
||||
await makeRawRequest({ url: `${baseUrl}/${video.uuid}/0-000067.ts`, expectedStatus: HttpStatusCode.OK_200 })
|
||||
await makeRawRequest({ url: `${baseUrl}/${video.uuid}/1-000068.ts`, expectedStatus: HttpStatusCode.NOT_FOUND_404 })
|
||||
})
|
||||
|
||||
it('Should complete the live and save the replay', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
for (const segment of [ '0-000069.ts', '0-000070.ts' ]) {
|
||||
const payload: LiveRTMPHLSTranscodingUpdatePayload = {
|
||||
masterPlaylistFile: 'live/master.m3u8',
|
||||
resolutionPlaylistFilename: '0.m3u8',
|
||||
resolutionPlaylistFile: 'live/0.m3u8',
|
||||
type: 'add-chunk',
|
||||
videoChunkFile: 'live/' + segment,
|
||||
videoChunkFilename: segment
|
||||
}
|
||||
await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload })
|
||||
|
||||
await wait(1000)
|
||||
}
|
||||
|
||||
await waitJobs([ server ])
|
||||
|
||||
{
|
||||
const { state } = await server.videos.get({ id: video.uuid })
|
||||
expect(state.id).to.equal(VideoState.PUBLISHED)
|
||||
}
|
||||
|
||||
await stopFfmpeg(ffmpegCommand)
|
||||
|
||||
await server.runnerJobs.success({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload: {} })
|
||||
|
||||
await wait(1500)
|
||||
await waitJobs([ server ])
|
||||
|
||||
{
|
||||
const { state } = await server.videos.get({ id: video.uuid })
|
||||
expect(state.id).to.equal(VideoState.LIVE_ENDED)
|
||||
|
||||
const session = await server.live.findLatestSession({ videoId: video.uuid })
|
||||
expect(session.error).to.be.null
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('With transcoding enabled on cancelled/aborted/errored live', function () {
|
||||
let live: LiveVideo
|
||||
let video: Video
|
||||
let ffmpegCommand: FfmpegCommand
|
||||
|
||||
async function prepare () {
|
||||
ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey })
|
||||
await server.runnerJobs.requestLiveJob(runnerToken)
|
||||
|
||||
const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'live-rtmp-hls-transcoding' })
|
||||
|
||||
return job
|
||||
}
|
||||
|
||||
async function checkSessionError (error: LiveVideoError) {
|
||||
await wait(1500)
|
||||
await waitJobs([ server ])
|
||||
|
||||
const session = await server.live.findLatestSession({ videoId: video.uuid })
|
||||
expect(session.error).to.equal(error)
|
||||
}
|
||||
|
||||
before(async function () {
|
||||
await server.config.enableLive({
|
||||
allowReplay: true,
|
||||
resolutions: 'max',
|
||||
transcoding: true
|
||||
})
|
||||
|
||||
const data = await server.live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC })
|
||||
live = data.live
|
||||
video = data.video
|
||||
})
|
||||
|
||||
it('Should abort a running live', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const job = await prepare()
|
||||
|
||||
await Promise.all([
|
||||
server.runnerJobs.abort({ jobUUID: job.uuid, runnerToken, jobToken: job.jobToken, reason: 'abort' }),
|
||||
testFfmpegStreamError(ffmpegCommand, true)
|
||||
])
|
||||
|
||||
// Abort is not supported
|
||||
await checkSessionError(LiveVideoError.RUNNER_JOB_ERROR)
|
||||
})
|
||||
|
||||
it('Should cancel a running live', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const job = await prepare()
|
||||
|
||||
await Promise.all([
|
||||
server.runnerJobs.cancelByAdmin({ jobUUID: job.uuid }),
|
||||
testFfmpegStreamError(ffmpegCommand, true)
|
||||
])
|
||||
|
||||
await checkSessionError(LiveVideoError.RUNNER_JOB_CANCEL)
|
||||
})
|
||||
|
||||
it('Should error a running live', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const job = await prepare()
|
||||
|
||||
await Promise.all([
|
||||
server.runnerJobs.error({ jobUUID: job.uuid, runnerToken, jobToken: job.jobToken, message: 'error' }),
|
||||
testFfmpegStreamError(ffmpegCommand, true)
|
||||
])
|
||||
|
||||
await checkSessionError(LiveVideoError.RUNNER_JOB_ERROR)
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
|
@ -0,0 +1,116 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import {
|
||||
cleanupTests,
|
||||
createSingleServer,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test runner socket', function () {
|
||||
let server: PeerTubeServer
|
||||
let runnerToken: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
server = await createSingleServer(1)
|
||||
|
||||
await setAccessTokensToServers([ server ])
|
||||
await setDefaultVideoChannel([ server ])
|
||||
|
||||
await server.config.enableTranscoding(true, true)
|
||||
await server.config.enableRemoteTranscoding()
|
||||
runnerToken = await server.runners.autoRegisterRunner()
|
||||
})
|
||||
|
||||
it('Should throw an error without runner token', function (done) {
|
||||
const localSocket = server.socketIO.getRunnersSocket({ runnerToken: null })
|
||||
localSocket.on('connect_error', err => {
|
||||
expect(err.message).to.contain('No runner token provided')
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('Should throw an error with a bad runner token', function (done) {
|
||||
const localSocket = server.socketIO.getRunnersSocket({ runnerToken: 'ergag' })
|
||||
localSocket.on('connect_error', err => {
|
||||
expect(err.message).to.contain('Invalid runner token')
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('Should not send ping if there is no available jobs', async function () {
|
||||
let pings = 0
|
||||
const localSocket = server.socketIO.getRunnersSocket({ runnerToken })
|
||||
localSocket.on('available-jobs', () => pings++)
|
||||
|
||||
expect(pings).to.equal(0)
|
||||
})
|
||||
|
||||
it('Should send a ping on available job', async function () {
|
||||
let pings = 0
|
||||
const localSocket = server.socketIO.getRunnersSocket({ runnerToken })
|
||||
localSocket.on('available-jobs', () => pings++)
|
||||
|
||||
await server.videos.quickUpload({ name: 'video1' })
|
||||
|
||||
// Wait for debounce
|
||||
await wait(1000)
|
||||
await waitJobs([ server ])
|
||||
expect(pings).to.equal(1)
|
||||
|
||||
await server.videos.quickUpload({ name: 'video2' })
|
||||
|
||||
// Wait for debounce
|
||||
await wait(1000)
|
||||
await waitJobs([ server ])
|
||||
expect(pings).to.equal(2)
|
||||
|
||||
await server.runnerJobs.cancelAllJobs()
|
||||
})
|
||||
|
||||
it('Should send a ping when a child is ready', async function () {
|
||||
let pings = 0
|
||||
const localSocket = server.socketIO.getRunnersSocket({ runnerToken })
|
||||
localSocket.on('available-jobs', () => pings++)
|
||||
|
||||
await server.videos.quickUpload({ name: 'video3' })
|
||||
// Wait for debounce
|
||||
await wait(1000)
|
||||
await waitJobs([ server ])
|
||||
|
||||
expect(pings).to.equal(1)
|
||||
|
||||
await server.runnerJobs.autoProcessWebVideoJob(runnerToken)
|
||||
// Wait for debounce
|
||||
await wait(1000)
|
||||
await waitJobs([ server ])
|
||||
|
||||
expect(pings).to.equal(2)
|
||||
})
|
||||
|
||||
it('Should not send a ping if the ended job does not have a child', async function () {
|
||||
let pings = 0
|
||||
const localSocket = server.socketIO.getRunnersSocket({ runnerToken })
|
||||
localSocket.on('available-jobs', () => pings++)
|
||||
|
||||
const { availableJobs } = await server.runnerJobs.request({ runnerToken })
|
||||
const job = availableJobs.find(j => j.type === 'vod-web-video-transcoding')
|
||||
await server.runnerJobs.autoProcessWebVideoJob(runnerToken, job.uuid)
|
||||
|
||||
// Wait for debounce
|
||||
await wait(1000)
|
||||
await waitJobs([ server ])
|
||||
|
||||
expect(pings).to.equal(0)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
|
@ -0,0 +1,541 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { readFile } from 'fs-extra'
|
||||
import { completeCheckHlsPlaylist } from '@server/tests/shared'
|
||||
import { buildAbsoluteFixturePath } from '@shared/core-utils'
|
||||
import {
|
||||
HttpStatusCode,
|
||||
RunnerJobSuccessPayload,
|
||||
RunnerJobVODAudioMergeTranscodingPayload,
|
||||
RunnerJobVODHLSTranscodingPayload,
|
||||
RunnerJobVODPayload,
|
||||
RunnerJobVODWebVideoTranscodingPayload,
|
||||
VideoState,
|
||||
VODAudioMergeTranscodingSuccess,
|
||||
VODHLSTranscodingSuccess,
|
||||
VODWebVideoTranscodingSuccess
|
||||
} from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
makeGetRequest,
|
||||
makeRawRequest,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
async function processAllJobs (server: PeerTubeServer, runnerToken: string) {
|
||||
do {
|
||||
const { availableJobs } = await server.runnerJobs.requestVOD({ runnerToken })
|
||||
if (availableJobs.length === 0) break
|
||||
|
||||
const { job } = await server.runnerJobs.accept<RunnerJobVODPayload>({ runnerToken, jobUUID: availableJobs[0].uuid })
|
||||
|
||||
const payload: RunnerJobSuccessPayload = {
|
||||
videoFile: `video_short_${job.payload.output.resolution}p.mp4`,
|
||||
resolutionPlaylistFile: `video_short_${job.payload.output.resolution}p.m3u8`
|
||||
}
|
||||
await server.runnerJobs.success({ runnerToken, jobUUID: job.uuid, jobToken: job.jobToken, payload })
|
||||
} while (true)
|
||||
|
||||
await waitJobs([ server ])
|
||||
}
|
||||
|
||||
describe('Test runner VOD transcoding', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let runnerToken: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
servers = await createMultipleServers(2)
|
||||
|
||||
await setAccessTokensToServers(servers)
|
||||
await setDefaultVideoChannel(servers)
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
await servers[0].config.enableRemoteTranscoding()
|
||||
runnerToken = await servers[0].runners.autoRegisterRunner()
|
||||
})
|
||||
|
||||
describe('Without transcoding', function () {
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].config.disableTranscoding()
|
||||
await servers[0].videos.quickUpload({ name: 'video' })
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should not have available jobs', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('With classic transcoding enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].config.enableTranscoding(true, true)
|
||||
})
|
||||
|
||||
it('Should error a transcoding job', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].runnerJobs.cancelAllJobs()
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'video' })
|
||||
await waitJobs(servers)
|
||||
|
||||
const { availableJobs } = await servers[0].runnerJobs.request({ runnerToken })
|
||||
const jobUUID = availableJobs[0].uuid
|
||||
|
||||
const { job } = await servers[0].runnerJobs.accept({ runnerToken, jobUUID })
|
||||
const jobToken = job.jobToken
|
||||
|
||||
await servers[0].runnerJobs.error({ runnerToken, jobUUID, jobToken, message: 'Error' })
|
||||
|
||||
const video = await servers[0].videos.get({ id: uuid })
|
||||
expect(video.state.id).to.equal(VideoState.TRANSCODING_FAILED)
|
||||
})
|
||||
|
||||
it('Should cancel a transcoding job', async function () {
|
||||
await servers[0].runnerJobs.cancelAllJobs()
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'video' })
|
||||
await waitJobs(servers)
|
||||
|
||||
const { availableJobs } = await servers[0].runnerJobs.request({ runnerToken })
|
||||
const jobUUID = availableJobs[0].uuid
|
||||
|
||||
await servers[0].runnerJobs.cancelByAdmin({ jobUUID })
|
||||
|
||||
const video = await servers[0].videos.get({ id: uuid })
|
||||
expect(video.state.id).to.equal(VideoState.PUBLISHED)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Web video transcoding only', function () {
|
||||
let videoUUID: string
|
||||
let jobToken: string
|
||||
let jobUUID: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].runnerJobs.cancelAllJobs()
|
||||
await servers[0].config.enableTranscoding(true, false)
|
||||
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'web video', fixture: 'video_short.webm' })
|
||||
videoUUID = uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have jobs available for remote runners', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(1)
|
||||
|
||||
jobUUID = availableJobs[0].uuid
|
||||
})
|
||||
|
||||
it('Should have a valid first transcoding job', async function () {
|
||||
const { job } = await servers[0].runnerJobs.accept<RunnerJobVODWebVideoTranscodingPayload>({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
expect(job.type === 'vod-web-video-transcoding')
|
||||
expect(job.payload.input.videoFileUrl).to.exist
|
||||
expect(job.payload.output.resolution).to.equal(720)
|
||||
expect(job.payload.output.fps).to.equal(25)
|
||||
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken })
|
||||
const inputFile = await readFile(buildAbsoluteFixturePath('video_short.webm'))
|
||||
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
})
|
||||
|
||||
it('Should transcode the max video resolution and send it back to the server', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
const payload: VODWebVideoTranscodingSuccess = {
|
||||
videoFile: 'video_short.mp4'
|
||||
}
|
||||
await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have the video updated', async function () {
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
expect(video.files).to.have.lengthOf(1)
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(0)
|
||||
|
||||
const { body } = await makeRawRequest({ url: video.files[0].fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
expect(body).to.deep.equal(await readFile(buildAbsoluteFixturePath('video_short.mp4')))
|
||||
}
|
||||
})
|
||||
|
||||
it('Should have 4 lower resolution to transcode', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(4)
|
||||
|
||||
for (const resolution of [ 480, 360, 240, 144 ]) {
|
||||
const job = availableJobs.find(j => j.payload.output.resolution === resolution)
|
||||
expect(job).to.exist
|
||||
expect(job.type).to.equal('vod-web-video-transcoding')
|
||||
|
||||
if (resolution === 240) jobUUID = job.uuid
|
||||
}
|
||||
})
|
||||
|
||||
it('Should process one of these transcoding jobs', async function () {
|
||||
const { job } = await servers[0].runnerJobs.accept<RunnerJobVODWebVideoTranscodingPayload>({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken })
|
||||
const inputFile = await readFile(buildAbsoluteFixturePath('video_short.mp4'))
|
||||
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
|
||||
const payload: VODWebVideoTranscodingSuccess = { videoFile: 'video_short_240p.mp4' }
|
||||
await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
})
|
||||
|
||||
it('Should process all other jobs', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(3)
|
||||
|
||||
for (const resolution of [ 480, 360, 144 ]) {
|
||||
const availableJob = availableJobs.find(j => j.payload.output.resolution === resolution)
|
||||
expect(availableJob).to.exist
|
||||
jobUUID = availableJob.uuid
|
||||
|
||||
const { job } = await servers[0].runnerJobs.accept<RunnerJobVODWebVideoTranscodingPayload>({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken })
|
||||
const inputFile = await readFile(buildAbsoluteFixturePath('video_short.mp4'))
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
|
||||
const payload: VODWebVideoTranscodingSuccess = { videoFile: `video_short_${resolution}p.mp4` }
|
||||
await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
}
|
||||
})
|
||||
|
||||
it('Should have the video updated', async function () {
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
expect(video.files).to.have.lengthOf(5)
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(0)
|
||||
|
||||
const { body } = await makeRawRequest({ url: video.files[0].fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
expect(body).to.deep.equal(await readFile(buildAbsoluteFixturePath('video_short.mp4')))
|
||||
|
||||
for (const file of video.files) {
|
||||
await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
await makeRawRequest({ url: file.torrentUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should not have available jobs anymore', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('HLS transcoding only', function () {
|
||||
let videoUUID: string
|
||||
let jobToken: string
|
||||
let jobUUID: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].config.enableTranscoding(false, true)
|
||||
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'hls video', fixture: 'video_short.webm' })
|
||||
videoUUID = uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should run the optimize job', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].runnerJobs.autoProcessWebVideoJob(runnerToken)
|
||||
})
|
||||
|
||||
it('Should have 5 HLS resolution to transcode', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(5)
|
||||
|
||||
for (const resolution of [ 720, 480, 360, 240, 144 ]) {
|
||||
const job = availableJobs.find(j => j.payload.output.resolution === resolution)
|
||||
expect(job).to.exist
|
||||
expect(job.type).to.equal('vod-hls-transcoding')
|
||||
|
||||
if (resolution === 480) jobUUID = job.uuid
|
||||
}
|
||||
})
|
||||
|
||||
it('Should process one of these transcoding jobs', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
const { job } = await servers[0].runnerJobs.accept<RunnerJobVODHLSTranscodingPayload>({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken })
|
||||
const inputFile = await readFile(buildAbsoluteFixturePath('video_short.mp4'))
|
||||
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
|
||||
const payload: VODHLSTranscodingSuccess = {
|
||||
videoFile: 'video_short_480p.mp4',
|
||||
resolutionPlaylistFile: 'video_short_480p.m3u8'
|
||||
}
|
||||
await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have the video updated', async function () {
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(video.files).to.have.lengthOf(1)
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
const hls = video.streamingPlaylists[0]
|
||||
expect(hls.files).to.have.lengthOf(1)
|
||||
|
||||
await completeCheckHlsPlaylist({ videoUUID, hlsOnly: false, servers, resolutions: [ 480 ] })
|
||||
}
|
||||
})
|
||||
|
||||
it('Should process all other jobs', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(4)
|
||||
|
||||
let maxQualityFile = 'video_short.mp4'
|
||||
|
||||
for (const resolution of [ 720, 360, 240, 144 ]) {
|
||||
const availableJob = availableJobs.find(j => j.payload.output.resolution === resolution)
|
||||
expect(availableJob).to.exist
|
||||
jobUUID = availableJob.uuid
|
||||
|
||||
const { job } = await servers[0].runnerJobs.accept<RunnerJobVODHLSTranscodingPayload>({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken })
|
||||
const inputFile = await readFile(buildAbsoluteFixturePath(maxQualityFile))
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
|
||||
const payload: VODHLSTranscodingSuccess = {
|
||||
videoFile: `video_short_${resolution}p.mp4`,
|
||||
resolutionPlaylistFile: `video_short_${resolution}p.m3u8`
|
||||
}
|
||||
await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
|
||||
if (resolution === 720) {
|
||||
maxQualityFile = 'video_short_720p.mp4'
|
||||
}
|
||||
}
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have the video updated', async function () {
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(video.files).to.have.lengthOf(0)
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
const hls = video.streamingPlaylists[0]
|
||||
expect(hls.files).to.have.lengthOf(5)
|
||||
|
||||
await completeCheckHlsPlaylist({ videoUUID, hlsOnly: true, servers, resolutions: [ 720, 480, 360, 240, 144 ] })
|
||||
}
|
||||
})
|
||||
|
||||
it('Should not have available jobs anymore', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Web video and HLS transcoding', function () {
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].config.enableTranscoding(true, true)
|
||||
|
||||
await servers[0].videos.quickUpload({ name: 'web video and hls video', fixture: 'video_short.webm' })
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should process the first optimize job', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].runnerJobs.autoProcessWebVideoJob(runnerToken)
|
||||
})
|
||||
|
||||
it('Should have 9 jobs to process', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
|
||||
expect(availableJobs).to.have.lengthOf(9)
|
||||
|
||||
const webVideoJobs = availableJobs.filter(j => j.type === 'vod-web-video-transcoding')
|
||||
const hlsJobs = availableJobs.filter(j => j.type === 'vod-hls-transcoding')
|
||||
|
||||
expect(webVideoJobs).to.have.lengthOf(4)
|
||||
expect(hlsJobs).to.have.lengthOf(5)
|
||||
})
|
||||
|
||||
it('Should process all available jobs', async function () {
|
||||
await processAllJobs(servers[0], runnerToken)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Audio merge transcoding', function () {
|
||||
let videoUUID: string
|
||||
let jobToken: string
|
||||
let jobUUID: string
|
||||
|
||||
before(async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].config.enableTranscoding(true, true)
|
||||
|
||||
const attributes = { name: 'audio_with_preview', previewfile: 'preview.jpg', fixture: 'sample.ogg' }
|
||||
const { uuid } = await servers[0].videos.upload({ attributes, mode: 'legacy' })
|
||||
videoUUID = uuid
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have an audio merge transcoding job', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(1)
|
||||
|
||||
expect(availableJobs[0].type).to.equal('vod-audio-merge-transcoding')
|
||||
|
||||
jobUUID = availableJobs[0].uuid
|
||||
})
|
||||
|
||||
it('Should have a valid remote audio merge transcoding job', async function () {
|
||||
const { job } = await servers[0].runnerJobs.accept<RunnerJobVODAudioMergeTranscodingPayload>({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
expect(job.type === 'vod-audio-merge-transcoding')
|
||||
expect(job.payload.input.audioFileUrl).to.exist
|
||||
expect(job.payload.input.previewFileUrl).to.exist
|
||||
expect(job.payload.output.resolution).to.equal(480)
|
||||
|
||||
{
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.audioFileUrl, jobToken, runnerToken })
|
||||
const inputFile = await readFile(buildAbsoluteFixturePath('sample.ogg'))
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
}
|
||||
|
||||
{
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.previewFileUrl, jobToken, runnerToken })
|
||||
|
||||
const video = await servers[0].videos.get({ id: videoUUID })
|
||||
const { body: inputFile } = await makeGetRequest({
|
||||
url: servers[0].url,
|
||||
path: video.previewPath,
|
||||
expectedStatus: HttpStatusCode.OK_200
|
||||
})
|
||||
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
}
|
||||
})
|
||||
|
||||
it('Should merge the audio', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
const payload: VODAudioMergeTranscodingSuccess = { videoFile: 'video_short_480p.mp4' }
|
||||
await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have the video updated', async function () {
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
expect(video.files).to.have.lengthOf(1)
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(0)
|
||||
|
||||
const { body } = await makeRawRequest({ url: video.files[0].fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
expect(body).to.deep.equal(await readFile(buildAbsoluteFixturePath('video_short_480p.mp4')))
|
||||
}
|
||||
})
|
||||
|
||||
it('Should have 7 lower resolutions to transcode', async function () {
|
||||
const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken })
|
||||
expect(availableJobs).to.have.lengthOf(7)
|
||||
|
||||
for (const resolution of [ 360, 240, 144 ]) {
|
||||
const jobs = availableJobs.filter(j => j.payload.output.resolution === resolution)
|
||||
expect(jobs).to.have.lengthOf(2)
|
||||
}
|
||||
|
||||
jobUUID = availableJobs.find(j => j.payload.output.resolution === 480).uuid
|
||||
})
|
||||
|
||||
it('Should process one other job', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
const { job } = await servers[0].runnerJobs.accept<RunnerJobVODHLSTranscodingPayload>({ runnerToken, jobUUID })
|
||||
jobToken = job.jobToken
|
||||
|
||||
const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken })
|
||||
const inputFile = await readFile(buildAbsoluteFixturePath('video_short_480p.mp4'))
|
||||
expect(body).to.deep.equal(inputFile)
|
||||
|
||||
const payload: VODHLSTranscodingSuccess = {
|
||||
videoFile: `video_short_480p.mp4`,
|
||||
resolutionPlaylistFile: `video_short_480p.m3u8`
|
||||
}
|
||||
await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have the video updated', async function () {
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
|
||||
expect(video.files).to.have.lengthOf(1)
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
const hls = video.streamingPlaylists[0]
|
||||
expect(hls.files).to.have.lengthOf(1)
|
||||
|
||||
await completeCheckHlsPlaylist({ videoUUID, hlsOnly: false, servers, resolutions: [ 480 ] })
|
||||
}
|
||||
})
|
||||
|
||||
it('Should process all available jobs', async function () {
|
||||
await processAllJobs(servers[0], runnerToken)
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
|
@ -63,6 +63,7 @@ function checkInitialConfig (server: PeerTubeServer, data: CustomConfig) {
|
|||
expect(data.videoChannels.maxPerUser).to.equal(20)
|
||||
|
||||
expect(data.transcoding.enabled).to.be.false
|
||||
expect(data.transcoding.remoteRunners.enabled).to.be.false
|
||||
expect(data.transcoding.allowAdditionalExtensions).to.be.false
|
||||
expect(data.transcoding.allowAudioFiles).to.be.false
|
||||
expect(data.transcoding.threads).to.equal(2)
|
||||
|
@ -87,6 +88,7 @@ function checkInitialConfig (server: PeerTubeServer, data: CustomConfig) {
|
|||
expect(data.live.maxInstanceLives).to.equal(20)
|
||||
expect(data.live.maxUserLives).to.equal(3)
|
||||
expect(data.live.transcoding.enabled).to.be.false
|
||||
expect(data.live.transcoding.remoteRunners.enabled).to.be.false
|
||||
expect(data.live.transcoding.threads).to.equal(2)
|
||||
expect(data.live.transcoding.profile).to.equal('default')
|
||||
expect(data.live.transcoding.resolutions['144p']).to.be.false
|
||||
|
@ -172,6 +174,7 @@ function checkUpdatedConfig (data: CustomConfig) {
|
|||
expect(data.videoChannels.maxPerUser).to.equal(24)
|
||||
|
||||
expect(data.transcoding.enabled).to.be.true
|
||||
expect(data.transcoding.remoteRunners.enabled).to.be.true
|
||||
expect(data.transcoding.threads).to.equal(1)
|
||||
expect(data.transcoding.concurrency).to.equal(3)
|
||||
expect(data.transcoding.allowAdditionalExtensions).to.be.true
|
||||
|
@ -195,6 +198,7 @@ function checkUpdatedConfig (data: CustomConfig) {
|
|||
expect(data.live.maxInstanceLives).to.equal(-1)
|
||||
expect(data.live.maxUserLives).to.equal(10)
|
||||
expect(data.live.transcoding.enabled).to.be.true
|
||||
expect(data.live.transcoding.remoteRunners.enabled).to.be.true
|
||||
expect(data.live.transcoding.threads).to.equal(4)
|
||||
expect(data.live.transcoding.profile).to.equal('live_profile')
|
||||
expect(data.live.transcoding.resolutions['144p']).to.be.true
|
||||
|
@ -313,6 +317,9 @@ const newCustomConfig: CustomConfig = {
|
|||
},
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
remoteRunners: {
|
||||
enabled: true
|
||||
},
|
||||
allowAdditionalExtensions: true,
|
||||
allowAudioFiles: true,
|
||||
threads: 1,
|
||||
|
@ -348,6 +355,9 @@ const newCustomConfig: CustomConfig = {
|
|||
maxUserLives: 10,
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
remoteRunners: {
|
||||
enabled: true
|
||||
},
|
||||
threads: 4,
|
||||
profile: 'live_profile',
|
||||
resolutions: {
|
||||
|
|
|
@ -146,7 +146,7 @@ describe('Test follow constraints', function () {
|
|||
const body = await servers[0].videos.get({ id: video2UUID, expectedStatus: HttpStatusCode.FORBIDDEN_403 })
|
||||
const error = body as unknown as PeerTubeProblemDocument
|
||||
|
||||
const doc = 'https://docs.joinpeertube.org/api/rest-reference.html#section/Errors/does_not_respect_follow_constraints'
|
||||
const doc = 'https://docs.joinpeertube.org/api-rest-reference.html#section/Errors/does_not_respect_follow_constraints'
|
||||
expect(error.type).to.equal(doc)
|
||||
expect(error.code).to.equal(ServerErrorCode.DOES_NOT_RESPECT_FOLLOW_CONSTRAINTS)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import { expect } from 'chai'
|
||||
import { completeVideoCheck, dateIsValid, expectAccountFollows, expectChannelsFollows, testCaptionFile } from '@server/tests/shared'
|
||||
import { VideoCreateResult, VideoPrivacy } from '@shared/models'
|
||||
import { Video, VideoPrivacy } from '@shared/models'
|
||||
import { cleanupTests, createMultipleServers, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/server-commands'
|
||||
|
||||
describe('Test follows', function () {
|
||||
|
@ -357,7 +357,7 @@ describe('Test follows', function () {
|
|||
})
|
||||
|
||||
describe('Should propagate data on a new server follow', function () {
|
||||
let video4: VideoCreateResult
|
||||
let video4: Video
|
||||
|
||||
before(async function () {
|
||||
this.timeout(50000)
|
||||
|
@ -372,19 +372,19 @@ describe('Test follows', function () {
|
|||
|
||||
await servers[2].videos.upload({ attributes: { name: 'server3-2' } })
|
||||
await servers[2].videos.upload({ attributes: { name: 'server3-3' } })
|
||||
video4 = await servers[2].videos.upload({ attributes: video4Attributes })
|
||||
const video4CreateResult = await servers[2].videos.upload({ attributes: video4Attributes })
|
||||
await servers[2].videos.upload({ attributes: { name: 'server3-5' } })
|
||||
await servers[2].videos.upload({ attributes: { name: 'server3-6' } })
|
||||
|
||||
{
|
||||
const userAccessToken = await servers[2].users.generateUserAndToken('captain')
|
||||
|
||||
await servers[2].videos.rate({ id: video4.id, rating: 'like' })
|
||||
await servers[2].videos.rate({ token: userAccessToken, id: video4.id, rating: 'dislike' })
|
||||
await servers[2].videos.rate({ id: video4CreateResult.id, rating: 'like' })
|
||||
await servers[2].videos.rate({ token: userAccessToken, id: video4CreateResult.id, rating: 'dislike' })
|
||||
}
|
||||
|
||||
{
|
||||
await servers[2].comments.createThread({ videoId: video4.id, text: 'my super first comment' })
|
||||
await servers[2].comments.createThread({ videoId: video4CreateResult.id, text: 'my super first comment' })
|
||||
|
||||
await servers[2].comments.addReplyToLastThread({ text: 'my super answer to thread 1' })
|
||||
await servers[2].comments.addReplyToLastReply({ text: 'my super answer to answer of thread 1' })
|
||||
|
@ -392,20 +392,20 @@ describe('Test follows', function () {
|
|||
}
|
||||
|
||||
{
|
||||
const { id: threadId } = await servers[2].comments.createThread({ videoId: video4.id, text: 'will be deleted' })
|
||||
const { id: threadId } = await servers[2].comments.createThread({ videoId: video4CreateResult.id, text: 'will be deleted' })
|
||||
await servers[2].comments.addReplyToLastThread({ text: 'answer to deleted' })
|
||||
|
||||
const { id: replyId } = await servers[2].comments.addReplyToLastThread({ text: 'will also be deleted' })
|
||||
|
||||
await servers[2].comments.addReplyToLastReply({ text: 'my second answer to deleted' })
|
||||
|
||||
await servers[2].comments.delete({ videoId: video4.id, commentId: threadId })
|
||||
await servers[2].comments.delete({ videoId: video4.id, commentId: replyId })
|
||||
await servers[2].comments.delete({ videoId: video4CreateResult.id, commentId: threadId })
|
||||
await servers[2].comments.delete({ videoId: video4CreateResult.id, commentId: replyId })
|
||||
}
|
||||
|
||||
await servers[2].captions.add({
|
||||
language: 'ar',
|
||||
videoId: video4.id,
|
||||
videoId: video4CreateResult.id,
|
||||
fixture: 'subtitle-good2.vtt'
|
||||
})
|
||||
|
||||
|
@ -479,7 +479,12 @@ describe('Test follows', function () {
|
|||
}
|
||||
]
|
||||
}
|
||||
await completeVideoCheck(servers[0], video4, checkAttributes)
|
||||
await completeVideoCheck({
|
||||
server: servers[0],
|
||||
originServer: servers[2],
|
||||
videoUUID: video4.uuid,
|
||||
attributes: checkAttributes
|
||||
})
|
||||
})
|
||||
|
||||
it('Should have propagated comments', async function () {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { completeVideoCheck } from '@server/tests/shared'
|
||||
import { completeVideoCheck, SQLCommand } from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode, JobState, VideoCreateResult, VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
|
@ -16,6 +16,8 @@ import {
|
|||
|
||||
describe('Test handle downs', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let sqlCommands: SQLCommand[]
|
||||
|
||||
let threadIdServer1: number
|
||||
let threadIdServer2: number
|
||||
let commentIdServer1: number
|
||||
|
@ -88,6 +90,8 @@ describe('Test handle downs', function () {
|
|||
|
||||
// Get the access tokens
|
||||
await setAccessTokensToServers(servers)
|
||||
|
||||
sqlCommands = servers.map(s => new SQLCommand(s))
|
||||
})
|
||||
|
||||
it('Should remove followers that are often down', async function () {
|
||||
|
@ -209,7 +213,7 @@ describe('Test handle downs', function () {
|
|||
|
||||
// Check unlisted video
|
||||
const video = await servers[2].videos.get({ id: unlistedVideo.uuid })
|
||||
await completeVideoCheck(servers[2], video, unlistedCheckAttributes)
|
||||
await completeVideoCheck({ server: servers[2], originServer: servers[0], videoUUID: video.uuid, attributes: unlistedCheckAttributes })
|
||||
})
|
||||
|
||||
it('Should send comments on a video to server 3, and automatically fetch the video', async function () {
|
||||
|
@ -292,7 +296,7 @@ describe('Test handle downs', function () {
|
|||
}
|
||||
|
||||
await waitJobs(servers)
|
||||
await servers[1].sql.setActorFollowScores(20)
|
||||
await sqlCommands[1].setActorFollowScores(20)
|
||||
|
||||
// Wait video expiration
|
||||
await wait(11000)
|
||||
|
@ -325,6 +329,10 @@ describe('Test handle downs', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
for (const sqlCommand of sqlCommands) {
|
||||
await sqlCommand.cleanup()
|
||||
}
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import { expect } from 'chai'
|
||||
import { pathExists, remove } from 'fs-extra'
|
||||
import { join } from 'path'
|
||||
import { testHelloWorldRegisteredSettings } from '@server/tests/shared'
|
||||
import { SQLCommand, testHelloWorldRegisteredSettings } from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode, PluginType } from '@shared/models'
|
||||
import {
|
||||
|
@ -17,7 +17,8 @@ import {
|
|||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test plugins', function () {
|
||||
let server: PeerTubeServer = null
|
||||
let server: PeerTubeServer
|
||||
let sqlCommand: SQLCommand
|
||||
let command: PluginsCommand
|
||||
|
||||
before(async function () {
|
||||
|
@ -32,6 +33,8 @@ describe('Test plugins', function () {
|
|||
await setAccessTokensToServers([ server ])
|
||||
|
||||
command = server.plugins
|
||||
|
||||
sqlCommand = new SQLCommand(server)
|
||||
})
|
||||
|
||||
it('Should list and search available plugins and themes', async function () {
|
||||
|
@ -236,7 +239,7 @@ describe('Test plugins', function () {
|
|||
|
||||
async function testUpdate (type: 'plugin' | 'theme', name: string) {
|
||||
// Fake update our plugin version
|
||||
await server.sql.setPluginVersion(name, '0.0.1')
|
||||
await sqlCommand.setPluginVersion(name, '0.0.1')
|
||||
|
||||
// Fake update package.json
|
||||
const packageJSON = await command.getPackageJSON(`peertube-${type}-${name}`)
|
||||
|
@ -366,7 +369,7 @@ describe('Test plugins', function () {
|
|||
})
|
||||
|
||||
const query = `UPDATE "application" SET "nodeABIVersion" = 1`
|
||||
await server.sql.updateQuery(query)
|
||||
await sqlCommand.updateQuery(query)
|
||||
|
||||
const baseNativeModule = server.servers.buildDirectory(join('plugins', 'node_modules', 'a-native-example'))
|
||||
|
||||
|
@ -401,6 +404,8 @@ describe('Test plugins', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
await sqlCommand.cleanup()
|
||||
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { getAudioStream, getVideoStreamDimensionsInfo } from '@server/helpers/ffmpeg'
|
||||
import { getAudioStream, getVideoStreamDimensionsInfo } from '@shared/ffmpeg'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { canDoQuickTranscode } from '@server/helpers/ffmpeg'
|
||||
import { generateHighBitrateVideo, generateVideoWithFramerate } from '@server/tests/shared'
|
||||
import { canDoQuickTranscode } from '@server/lib/transcoding/transcoding-quick-transcode'
|
||||
import { checkWebTorrentWorks, generateHighBitrateVideo, generateVideoWithFramerate } from '@server/tests/shared'
|
||||
import { buildAbsoluteFixturePath, getAllFiles, getMaxBitrate, getMinLimitBitrate, omit } from '@shared/core-utils'
|
||||
import {
|
||||
buildFileMetadata,
|
||||
ffprobePromise,
|
||||
getAudioStream,
|
||||
getVideoStreamBitrate,
|
||||
getVideoStreamDimensionsInfo,
|
||||
getVideoStreamFPS,
|
||||
hasAudioStream
|
||||
} from '@shared/extra-utils'
|
||||
import { HttpStatusCode, VideoState } from '@shared/models'
|
||||
} from '@shared/ffmpeg'
|
||||
import { HttpStatusCode, VideoFileMetadata, VideoState } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
|
@ -20,8 +20,7 @@ import {
|
|||
makeGetRequest,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs,
|
||||
webtorrentAdd
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
function updateConfigForTranscoding (server: PeerTubeServer) {
|
||||
|
@ -90,10 +89,7 @@ describe('Test video transcoding', function () {
|
|||
const magnetUri = videoDetails.files[0].magnetUri
|
||||
expect(magnetUri).to.match(/\.webm/)
|
||||
|
||||
const torrent = await webtorrentAdd(magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).match(/\.webm$/)
|
||||
await checkWebTorrentWorks(magnetUri, /\.webm$/)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -120,10 +116,7 @@ describe('Test video transcoding', function () {
|
|||
const magnetUri = videoDetails.files[0].magnetUri
|
||||
expect(magnetUri).to.match(/\.mp4/)
|
||||
|
||||
const torrent = await webtorrentAdd(magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).match(/\.mp4$/)
|
||||
await checkWebTorrentWorks(magnetUri, /\.mp4$/)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -639,7 +632,9 @@ describe('Test video transcoding', function () {
|
|||
const video = await servers[1].videos.get({ id: videoUUID })
|
||||
const file = video.files.find(f => f.resolution.id === 240)
|
||||
const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl)
|
||||
const metadata = await buildFileMetadata(path)
|
||||
|
||||
const probe = await ffprobePromise(path)
|
||||
const metadata = new VideoFileMetadata(probe)
|
||||
|
||||
// expected format properties
|
||||
for (const p of [
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { SQLCommand } from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode, OAuth2ErrorCode, PeerTubeProblemDocument } from '@shared/models'
|
||||
import { cleanupTests, createSingleServer, killallServers, PeerTubeServer, setAccessTokensToServers } from '@shared/server-commands'
|
||||
|
||||
describe('Test oauth', function () {
|
||||
let server: PeerTubeServer
|
||||
let sqlCommand: SQLCommand
|
||||
|
||||
before(async function () {
|
||||
this.timeout(30000)
|
||||
|
@ -20,6 +22,8 @@ describe('Test oauth', function () {
|
|||
})
|
||||
|
||||
await setAccessTokensToServers([ server ])
|
||||
|
||||
sqlCommand = new SQLCommand(server)
|
||||
})
|
||||
|
||||
describe('OAuth client', function () {
|
||||
|
@ -118,8 +122,8 @@ describe('Test oauth', function () {
|
|||
it('Should have an expired access token', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await server.sql.setTokenField(server.accessToken, 'accessTokenExpiresAt', new Date().toISOString())
|
||||
await server.sql.setTokenField(server.accessToken, 'refreshTokenExpiresAt', new Date().toISOString())
|
||||
await sqlCommand.setTokenField(server.accessToken, 'accessTokenExpiresAt', new Date().toISOString())
|
||||
await sqlCommand.setTokenField(server.accessToken, 'refreshTokenExpiresAt', new Date().toISOString())
|
||||
|
||||
await killallServers([ server ])
|
||||
await server.run()
|
||||
|
@ -135,7 +139,7 @@ describe('Test oauth', function () {
|
|||
this.timeout(50000)
|
||||
|
||||
const futureDate = new Date(new Date().getTime() + 1000 * 60).toISOString()
|
||||
await server.sql.setTokenField(server.accessToken, 'refreshTokenExpiresAt', futureDate)
|
||||
await sqlCommand.setTokenField(server.accessToken, 'refreshTokenExpiresAt', futureDate)
|
||||
|
||||
await killallServers([ server ])
|
||||
await server.run()
|
||||
|
@ -187,6 +191,7 @@ describe('Test oauth', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
await sqlCommand.cleanup()
|
||||
await cleanupTests([ server ])
|
||||
})
|
||||
})
|
||||
|
|
|
@ -5,6 +5,7 @@ import request from 'supertest'
|
|||
import {
|
||||
checkTmpIsEmpty,
|
||||
checkVideoFilesWereRemoved,
|
||||
checkWebTorrentWorks,
|
||||
completeVideoCheck,
|
||||
dateIsValid,
|
||||
saveVideoInServers,
|
||||
|
@ -21,8 +22,7 @@ import {
|
|||
setAccessTokensToServers,
|
||||
setDefaultAccountAvatar,
|
||||
setDefaultChannelAvatar,
|
||||
waitJobs,
|
||||
webtorrentAdd
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test multiple servers', function () {
|
||||
|
@ -134,7 +134,7 @@ describe('Test multiple servers', function () {
|
|||
expect(data.length).to.equal(1)
|
||||
const video = data[0]
|
||||
|
||||
await completeVideoCheck(server, video, checkAttributes)
|
||||
await completeVideoCheck({ server, originServer: servers[0], videoUUID: video.uuid, attributes: checkAttributes })
|
||||
publishedAt = video.publishedAt as string
|
||||
|
||||
expect(video.channel.avatars).to.have.lengthOf(2)
|
||||
|
@ -238,7 +238,7 @@ describe('Test multiple servers', function () {
|
|||
expect(data.length).to.equal(2)
|
||||
const video = data[1]
|
||||
|
||||
await completeVideoCheck(server, video, checkAttributes)
|
||||
await completeVideoCheck({ server, originServer: servers[1], videoUUID: video.uuid, attributes: checkAttributes })
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -328,7 +328,7 @@ describe('Test multiple servers', function () {
|
|||
}
|
||||
]
|
||||
}
|
||||
await completeVideoCheck(server, video1, checkAttributesVideo1)
|
||||
await completeVideoCheck({ server, originServer: servers[2], videoUUID: video1.uuid, attributes: checkAttributesVideo1 })
|
||||
|
||||
const checkAttributesVideo2 = {
|
||||
name: 'my super name for server 3-2',
|
||||
|
@ -362,7 +362,7 @@ describe('Test multiple servers', function () {
|
|||
}
|
||||
]
|
||||
}
|
||||
await completeVideoCheck(server, video2, checkAttributesVideo2)
|
||||
await completeVideoCheck({ server, originServer: servers[2], videoUUID: video2.uuid, attributes: checkAttributesVideo2 })
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -408,10 +408,8 @@ describe('Test multiple servers', function () {
|
|||
toRemove.push(data[3])
|
||||
|
||||
const videoDetails = await servers[2].videos.get({ id: video.id })
|
||||
const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
|
||||
await checkWebTorrentWorks(videoDetails.files[0].magnetUri)
|
||||
})
|
||||
|
||||
it('Should add the file 2 by asking server 1', async function () {
|
||||
|
@ -422,10 +420,7 @@ describe('Test multiple servers', function () {
|
|||
const video = data[1]
|
||||
const videoDetails = await servers[0].videos.get({ id: video.id })
|
||||
|
||||
const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
await checkWebTorrentWorks(videoDetails.files[0].magnetUri)
|
||||
})
|
||||
|
||||
it('Should add the file 3 by asking server 2', async function () {
|
||||
|
@ -436,10 +431,7 @@ describe('Test multiple servers', function () {
|
|||
const video = data[2]
|
||||
const videoDetails = await servers[1].videos.get({ id: video.id })
|
||||
|
||||
const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
await checkWebTorrentWorks(videoDetails.files[0].magnetUri)
|
||||
})
|
||||
|
||||
it('Should add the file 3-2 by asking server 1', async function () {
|
||||
|
@ -450,10 +442,7 @@ describe('Test multiple servers', function () {
|
|||
const video = data[3]
|
||||
const videoDetails = await servers[0].videos.get({ id: video.id })
|
||||
|
||||
const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
await checkWebTorrentWorks(videoDetails.files[0].magnetUri)
|
||||
})
|
||||
|
||||
it('Should add the file 2 in 360p by asking server 1', async function () {
|
||||
|
@ -467,10 +456,7 @@ describe('Test multiple servers', function () {
|
|||
const file = videoDetails.files.find(f => f.resolution.id === 360)
|
||||
expect(file).not.to.be.undefined
|
||||
|
||||
const torrent = await webtorrentAdd(file.magnetUri)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
await checkWebTorrentWorks(file.magnetUri)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -685,7 +671,7 @@ describe('Test multiple servers', function () {
|
|||
thumbnailfile: 'thumbnail',
|
||||
previewfile: 'preview'
|
||||
}
|
||||
await completeVideoCheck(server, videoUpdated, checkAttributes)
|
||||
await completeVideoCheck({ server, originServer: servers[2], videoUUID: videoUpdated.uuid, attributes: checkAttributes })
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -1087,7 +1073,7 @@ describe('Test multiple servers', function () {
|
|||
}
|
||||
]
|
||||
}
|
||||
await completeVideoCheck(server, video, checkAttributes)
|
||||
await completeVideoCheck({ server, originServer: servers[1], videoUUID: video.uuid, attributes: checkAttributes })
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
|
@ -261,7 +261,7 @@ describe('Test resumable upload', function () {
|
|||
pathUploadId: uploadId,
|
||||
token: server.accessToken,
|
||||
digestBuilder: () => 'sha=' + 'a'.repeat(40),
|
||||
expectedStatus: 460
|
||||
expectedStatus: 460 as any
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -164,14 +164,14 @@ describe('Test a single server', function () {
|
|||
expect(data.length).to.equal(1)
|
||||
|
||||
const video = data[0]
|
||||
await completeVideoCheck(server, video, getCheckAttributes())
|
||||
await completeVideoCheck({ server, originServer: server, videoUUID: video.uuid, attributes: getCheckAttributes() })
|
||||
})
|
||||
|
||||
it('Should get the video by UUID', async function () {
|
||||
this.timeout(5000)
|
||||
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
await completeVideoCheck(server, video, getCheckAttributes())
|
||||
await completeVideoCheck({ server, originServer: server, videoUUID: video.uuid, attributes: getCheckAttributes() })
|
||||
})
|
||||
|
||||
it('Should have the views updated', async function () {
|
||||
|
@ -360,7 +360,7 @@ describe('Test a single server', function () {
|
|||
|
||||
const video = await server.videos.get({ id: videoId })
|
||||
|
||||
await completeVideoCheck(server, video, updateCheckAttributes())
|
||||
await completeVideoCheck({ server, originServer: server, videoUUID: video.uuid, attributes: updateCheckAttributes() })
|
||||
})
|
||||
|
||||
it('Should update only the tags of a video', async function () {
|
||||
|
@ -371,7 +371,12 @@ describe('Test a single server', function () {
|
|||
|
||||
const video = await server.videos.get({ id: videoId })
|
||||
|
||||
await completeVideoCheck(server, video, Object.assign(updateCheckAttributes(), attributes))
|
||||
await completeVideoCheck({
|
||||
server,
|
||||
originServer: server,
|
||||
videoUUID: video.uuid,
|
||||
attributes: Object.assign(updateCheckAttributes(), attributes)
|
||||
})
|
||||
})
|
||||
|
||||
it('Should update only the description of a video', async function () {
|
||||
|
@ -382,8 +387,12 @@ describe('Test a single server', function () {
|
|||
|
||||
const video = await server.videos.get({ id: videoId })
|
||||
|
||||
const expectedAttributes = Object.assign(updateCheckAttributes(), { tags: [ 'supertag', 'tag1', 'tag2' ] }, attributes)
|
||||
await completeVideoCheck(server, video, expectedAttributes)
|
||||
await completeVideoCheck({
|
||||
server,
|
||||
originServer: server,
|
||||
videoUUID: video.uuid,
|
||||
attributes: Object.assign(updateCheckAttributes(), { tags: [ 'supertag', 'tag1', 'tag2' ] }, attributes)
|
||||
})
|
||||
})
|
||||
|
||||
it('Should like a video', async function () {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { FIXTURE_URLS } from '@server/tests/shared'
|
||||
import { FIXTURE_URLS, SQLCommand } from '@server/tests/shared'
|
||||
import { areHttpImportTestsDisabled } from '@shared/core-utils'
|
||||
import { VideoChannelSyncState, VideoInclude, VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
|
@ -23,6 +23,7 @@ describe('Test channel synchronizations', function () {
|
|||
|
||||
describe('Sync using ' + mode, function () {
|
||||
let servers: PeerTubeServer[]
|
||||
let sqlCommands: SQLCommand[]
|
||||
|
||||
let startTestDate: Date
|
||||
|
||||
|
@ -36,7 +37,7 @@ describe('Test channel synchronizations', function () {
|
|||
}
|
||||
|
||||
async function changeDateForSync (channelSyncId: number, newDate: string) {
|
||||
await servers[0].sql.updateQuery(
|
||||
await sqlCommands[0].updateQuery(
|
||||
`UPDATE "videoChannelSync" ` +
|
||||
`SET "createdAt"='${newDate}', "lastSyncAt"='${newDate}' ` +
|
||||
`WHERE id=${channelSyncId}`
|
||||
|
@ -82,6 +83,8 @@ describe('Test channel synchronizations', function () {
|
|||
const { videoChannels } = await servers[0].users.getMyInfo({ token: userInfo.accessToken })
|
||||
userInfo.channelId = videoChannels[0].id
|
||||
}
|
||||
|
||||
sqlCommands = servers.map(s => new SQLCommand(s))
|
||||
})
|
||||
|
||||
it('Should fetch the latest channel videos of a remote channel', async function () {
|
||||
|
@ -302,6 +305,10 @@ describe('Test channel synchronizations', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
for (const sqlCommand of sqlCommands) {
|
||||
await sqlCommand.cleanup()
|
||||
}
|
||||
|
||||
await killallServers(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import { expect } from 'chai'
|
||||
import { basename } from 'path'
|
||||
import { ACTOR_IMAGES_SIZE } from '@server/initializers/constants'
|
||||
import { testFileExistsOrNot, testImage } from '@server/tests/shared'
|
||||
import { SQLCommand, testFileExistsOrNot, testImage } from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { ActorImageType, User, VideoChannel } from '@shared/models'
|
||||
import {
|
||||
|
@ -25,6 +25,8 @@ async function findChannel (server: PeerTubeServer, channelId: number) {
|
|||
|
||||
describe('Test video channels', function () {
|
||||
let servers: PeerTubeServer[]
|
||||
let sqlCommands: SQLCommand[]
|
||||
|
||||
let userInfo: User
|
||||
let secondVideoChannelId: number
|
||||
let totoChannel: number
|
||||
|
@ -45,6 +47,8 @@ describe('Test video channels', function () {
|
|||
await setDefaultAccountAvatar(servers)
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
sqlCommands = servers.map(s => new SQLCommand(s))
|
||||
})
|
||||
|
||||
it('Should have one video channel (created with root)', async () => {
|
||||
|
@ -278,7 +282,9 @@ describe('Test video channels', function () {
|
|||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
for (let i = 0; i < servers.length; i++) {
|
||||
const server = servers[i]
|
||||
|
||||
const videoChannel = await findChannel(server, secondVideoChannelId)
|
||||
const expectedSizes = ACTOR_IMAGES_SIZE[ActorImageType.AVATAR]
|
||||
|
||||
|
@ -289,7 +295,7 @@ describe('Test video channels', function () {
|
|||
await testImage(server.url, `avatar-resized-${avatar.width}x${avatar.width}`, avatarPaths[server.port], '.png')
|
||||
await testFileExistsOrNot(server, 'avatars', basename(avatarPaths[server.port]), true)
|
||||
|
||||
const row = await server.sql.getActorImage(basename(avatarPaths[server.port]))
|
||||
const row = await sqlCommands[i].getActorImage(basename(avatarPaths[server.port]))
|
||||
|
||||
expect(expectedSizes.some(({ height, width }) => row.height === height && row.width === width)).to.equal(true)
|
||||
}
|
||||
|
@ -309,14 +315,16 @@ describe('Test video channels', function () {
|
|||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
for (let i = 0; i < servers.length; i++) {
|
||||
const server = servers[i]
|
||||
|
||||
const videoChannel = await server.channels.get({ channelName: 'second_video_channel@' + servers[0].host })
|
||||
|
||||
bannerPaths[server.port] = videoChannel.banners[0].path
|
||||
await testImage(server.url, 'banner-resized', bannerPaths[server.port])
|
||||
await testFileExistsOrNot(server, 'avatars', basename(bannerPaths[server.port]), true)
|
||||
|
||||
const row = await server.sql.getActorImage(basename(bannerPaths[server.port]))
|
||||
const row = await sqlCommands[i].getActorImage(basename(bannerPaths[server.port]))
|
||||
expect(row.height).to.equal(ACTOR_IMAGES_SIZE[ActorImageType.BANNER][0].height)
|
||||
expect(row.width).to.equal(ACTOR_IMAGES_SIZE[ActorImageType.BANNER][0].width)
|
||||
}
|
||||
|
@ -546,6 +554,10 @@ describe('Test video channels', function () {
|
|||
})
|
||||
|
||||
after(async function () {
|
||||
for (const sqlCommand of sqlCommands) {
|
||||
await sqlCommand.cleanup()
|
||||
}
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import { expect } from 'chai'
|
||||
import { decode } from 'magnet-uri'
|
||||
import { checkVideoFileTokenReinjection, expectStartWith } from '@server/tests/shared'
|
||||
import { checkVideoFileTokenReinjection, expectStartWith, parseTorrentVideo } from '@server/tests/shared'
|
||||
import { getAllFiles, wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode, LiveVideo, VideoDetails, VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
|
@ -10,7 +10,6 @@ import {
|
|||
createSingleServer,
|
||||
findExternalSavedVideo,
|
||||
makeRawRequest,
|
||||
parseTorrentVideo,
|
||||
PeerTubeServer,
|
||||
sendRTMPStream,
|
||||
setAccessTokensToServers,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { SQLCommand } from '@server/tests/shared'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import {
|
||||
cleanupTests,
|
||||
|
@ -14,6 +15,7 @@ import {
|
|||
|
||||
describe('Test video views cleaner', function () {
|
||||
let servers: PeerTubeServer[]
|
||||
let sqlCommands: SQLCommand[]
|
||||
|
||||
let videoIdServer1: string
|
||||
let videoIdServer2: string
|
||||
|
@ -37,6 +39,8 @@ describe('Test video views cleaner', function () {
|
|||
await servers[1].views.simulateView({ id: videoIdServer2 })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
sqlCommands = servers.map(s => new SQLCommand(s))
|
||||
})
|
||||
|
||||
it('Should not clean old video views', async function () {
|
||||
|
@ -50,18 +54,14 @@ describe('Test video views cleaner', function () {
|
|||
|
||||
// Should still have views
|
||||
|
||||
{
|
||||
for (const server of servers) {
|
||||
const total = await server.sql.countVideoViewsOf(videoIdServer1)
|
||||
expect(total).to.equal(2, 'Server ' + server.serverNumber + ' does not have the correct amount of views')
|
||||
}
|
||||
for (let i = 0; i < servers.length; i++) {
|
||||
const total = await sqlCommands[i].countVideoViewsOf(videoIdServer1)
|
||||
expect(total).to.equal(2, 'Server ' + servers[i].serverNumber + ' does not have the correct amount of views')
|
||||
}
|
||||
|
||||
{
|
||||
for (const server of servers) {
|
||||
const total = await server.sql.countVideoViewsOf(videoIdServer2)
|
||||
expect(total).to.equal(2, 'Server ' + server.serverNumber + ' does not have the correct amount of views')
|
||||
}
|
||||
for (let i = 0; i < servers.length; i++) {
|
||||
const total = await sqlCommands[i].countVideoViewsOf(videoIdServer2)
|
||||
expect(total).to.equal(2, 'Server ' + servers[i].serverNumber + ' does not have the correct amount of views')
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -76,23 +76,23 @@ describe('Test video views cleaner', function () {
|
|||
|
||||
// Should still have views
|
||||
|
||||
{
|
||||
for (const server of servers) {
|
||||
const total = await server.sql.countVideoViewsOf(videoIdServer1)
|
||||
expect(total).to.equal(2)
|
||||
}
|
||||
for (let i = 0; i < servers.length; i++) {
|
||||
const total = await sqlCommands[i].countVideoViewsOf(videoIdServer1)
|
||||
expect(total).to.equal(2)
|
||||
}
|
||||
|
||||
{
|
||||
const totalServer1 = await servers[0].sql.countVideoViewsOf(videoIdServer2)
|
||||
expect(totalServer1).to.equal(0)
|
||||
const totalServer1 = await sqlCommands[0].countVideoViewsOf(videoIdServer2)
|
||||
expect(totalServer1).to.equal(0)
|
||||
|
||||
const totalServer2 = await servers[1].sql.countVideoViewsOf(videoIdServer2)
|
||||
expect(totalServer2).to.equal(2)
|
||||
}
|
||||
const totalServer2 = await sqlCommands[1].countVideoViewsOf(videoIdServer2)
|
||||
expect(totalServer2).to.equal(2)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
for (const sqlCommand of sqlCommands) {
|
||||
await sqlCommand.cleanup()
|
||||
}
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,262 +0,0 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { areMockObjectStorageTestsDisabled } from '@shared/core-utils'
|
||||
import { HttpStatusCode, VideoFile } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
makeRawRequest,
|
||||
ObjectStorageCommand,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
import { checkResolutionsInMasterPlaylist, expectStartWith } from '../shared'
|
||||
|
||||
async function checkFilesInObjectStorage (files: VideoFile[], type: 'webtorrent' | 'playlist') {
|
||||
for (const file of files) {
|
||||
const shouldStartWith = type === 'webtorrent'
|
||||
? ObjectStorageCommand.getMockWebTorrentBaseUrl()
|
||||
: ObjectStorageCommand.getMockPlaylistBaseUrl()
|
||||
|
||||
expectStartWith(file.fileUrl, shouldStartWith)
|
||||
|
||||
await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
}
|
||||
}
|
||||
|
||||
function runTests (objectStorage: boolean) {
|
||||
let servers: PeerTubeServer[] = []
|
||||
const videosUUID: string[] = []
|
||||
const publishedAt: string[] = []
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const config = objectStorage
|
||||
? ObjectStorageCommand.getDefaultMockConfig()
|
||||
: {}
|
||||
|
||||
// Run server 2 to have transcoding enabled
|
||||
servers = await createMultipleServers(2, config)
|
||||
await setAccessTokensToServers(servers)
|
||||
|
||||
await servers[0].config.disableTranscoding()
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
if (objectStorage) await ObjectStorageCommand.prepareDefaultMockBuckets()
|
||||
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
const { uuid, shortUUID } = await servers[0].videos.upload({ attributes: { name: 'video' + i } })
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
const video = await servers[0].videos.get({ id: uuid })
|
||||
publishedAt.push(video.publishedAt as string)
|
||||
|
||||
if (i > 2) {
|
||||
videosUUID.push(uuid)
|
||||
} else {
|
||||
videosUUID.push(shortUUID)
|
||||
}
|
||||
}
|
||||
|
||||
await waitJobs(servers)
|
||||
})
|
||||
|
||||
it('Should have two video files on each server', async function () {
|
||||
this.timeout(30000)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
expect(data).to.have.lengthOf(videosUUID.length)
|
||||
|
||||
for (const video of data) {
|
||||
const videoDetail = await server.videos.get({ id: video.uuid })
|
||||
expect(videoDetail.files).to.have.lengthOf(1)
|
||||
expect(videoDetail.streamingPlaylists).to.have.lengthOf(0)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should run a transcoding job on video 2', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[1]}`)
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
|
||||
let infoHashes: { [id: number]: string }
|
||||
|
||||
for (const video of data) {
|
||||
const videoDetails = await server.videos.get({ id: video.uuid })
|
||||
|
||||
if (video.shortUUID === videosUUID[1] || video.uuid === videosUUID[1]) {
|
||||
expect(videoDetails.files).to.have.lengthOf(4)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(0)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
|
||||
|
||||
if (!infoHashes) {
|
||||
infoHashes = {}
|
||||
|
||||
for (const file of videoDetails.files) {
|
||||
infoHashes[file.resolution.id.toString()] = file.magnetUri
|
||||
}
|
||||
} else {
|
||||
for (const resolution of Object.keys(infoHashes)) {
|
||||
const file = videoDetails.files.find(f => f.resolution.id.toString() === resolution)
|
||||
expect(file.magnetUri).to.equal(infoHashes[resolution])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should run a transcoding job on video 1 with resolution', async function () {
|
||||
this.timeout(60000)
|
||||
|
||||
await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[0]} -r 480`)
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const { data } = await server.videos.list()
|
||||
expect(data).to.have.lengthOf(videosUUID.length)
|
||||
|
||||
const videoDetails = await server.videos.get({ id: videosUUID[0] })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(2)
|
||||
expect(videoDetails.files[0].resolution.id).to.equal(720)
|
||||
expect(videoDetails.files[1].resolution.id).to.equal(480)
|
||||
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(0)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
|
||||
}
|
||||
})
|
||||
|
||||
it('Should generate an HLS resolution', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[2]} --generate-hls -r 480`)
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videosUUID[2] })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
|
||||
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
const hlsPlaylist = videoDetails.streamingPlaylists[0]
|
||||
|
||||
const files = hlsPlaylist.files
|
||||
expect(files).to.have.lengthOf(1)
|
||||
expect(files[0].resolution.id).to.equal(480)
|
||||
|
||||
if (objectStorage) {
|
||||
await checkFilesInObjectStorage(files, 'playlist')
|
||||
|
||||
const resolutions = files.map(f => f.resolution.id)
|
||||
await checkResolutionsInMasterPlaylist({ server, playlistUrl: hlsPlaylist.playlistUrl, resolutions })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should not duplicate an HLS resolution', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[2]} --generate-hls -r 480`)
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videosUUID[2] })
|
||||
|
||||
const files = videoDetails.streamingPlaylists[0].files
|
||||
expect(files).to.have.lengthOf(1)
|
||||
expect(files[0].resolution.id).to.equal(480)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(files, 'playlist')
|
||||
}
|
||||
})
|
||||
|
||||
it('Should generate all HLS resolutions', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[3]} --generate-hls`)
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videosUUID[3] })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
const files = videoDetails.streamingPlaylists[0].files
|
||||
expect(files).to.have.lengthOf(4)
|
||||
|
||||
if (objectStorage) await checkFilesInObjectStorage(files, 'playlist')
|
||||
}
|
||||
})
|
||||
|
||||
it('Should optimize the video file and generate HLS videos if enabled in config', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
await servers[0].config.enableTranscoding()
|
||||
await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[4]}`)
|
||||
|
||||
await waitJobs(servers)
|
||||
|
||||
for (const server of servers) {
|
||||
const videoDetails = await server.videos.get({ id: videosUUID[4] })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(5)
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(5)
|
||||
|
||||
if (objectStorage) {
|
||||
await checkFilesInObjectStorage(videoDetails.files, 'webtorrent')
|
||||
await checkFilesInObjectStorage(videoDetails.streamingPlaylists[0].files, 'playlist')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should not have updated published at attributes', async function () {
|
||||
for (const id of videosUUID) {
|
||||
const video = await servers[0].videos.get({ id })
|
||||
|
||||
expect(publishedAt.some(p => video.publishedAt === p)).to.be.true
|
||||
}
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
}
|
||||
|
||||
describe('Test create transcoding jobs', function () {
|
||||
|
||||
describe('On filesystem', function () {
|
||||
runTests(false)
|
||||
})
|
||||
|
||||
describe('On object storage', function () {
|
||||
if (areMockObjectStorageTestsDisabled()) return
|
||||
|
||||
runTests(true)
|
||||
})
|
||||
})
|
|
@ -1,10 +1,8 @@
|
|||
// Order of the tests we want to execute
|
||||
import './create-import-video-file-job'
|
||||
import './create-transcoding-job'
|
||||
import './create-move-video-storage-job'
|
||||
import './peertube'
|
||||
import './plugins'
|
||||
import './print-transcode-command'
|
||||
import './prune-storage'
|
||||
import './regenerate-thumbnails'
|
||||
import './reset-password'
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { buildAbsoluteFixturePath } from '@shared/core-utils'
|
||||
import { CLICommand } from '@shared/server-commands'
|
||||
import { VideoResolution } from '../../../shared/models/videos'
|
||||
|
||||
describe('Test print transcode jobs', function () {
|
||||
|
||||
it('Should print the correct command for each resolution', async function () {
|
||||
const fixturePath = buildAbsoluteFixturePath('video_short.webm')
|
||||
|
||||
for (const resolution of [
|
||||
VideoResolution.H_720P,
|
||||
VideoResolution.H_1080P
|
||||
]) {
|
||||
const command = await CLICommand.exec(`npm run print-transcode-command -- ${fixturePath} -r ${resolution}`)
|
||||
|
||||
expect(command).to.includes(`-vf scale=w=-2:h=${resolution}`)
|
||||
expect(command).to.includes(`-y -acodec aac -vcodec libx264`)
|
||||
expect(command).to.includes('-f mp4')
|
||||
expect(command).to.includes('-movflags faststart')
|
||||
expect(command).to.includes('-b:a 256k')
|
||||
expect(command).to.includes('-r 25')
|
||||
expect(command).to.includes('-level:v 3.1')
|
||||
expect(command).to.includes('-g:v 50')
|
||||
expect(command).to.includes(`-maxrate:v `)
|
||||
expect(command).to.includes(`-bufsize:v `)
|
||||
}
|
||||
})
|
||||
})
|
|
@ -7,11 +7,11 @@ import {
|
|||
createSingleServer,
|
||||
killallServers,
|
||||
makeActivityPubGetRequest,
|
||||
parseTorrentVideo,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
import { parseTorrentVideo } from '../shared'
|
||||
|
||||
describe('Test update host scripts', function () {
|
||||
let server: PeerTubeServer
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,14 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:6
|
||||
#EXT-X-TARGETDURATION:2
|
||||
#EXT-X-MEDIA-SEQUENCE:68
|
||||
#EXT-X-INDEPENDENT-SEGMENTS
|
||||
#EXTINF:2.000000,
|
||||
#EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:39.019+0200
|
||||
0-000068.ts
|
||||
#EXTINF:2.000000,
|
||||
#EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:41.019+0200
|
||||
0-000069.ts
|
||||
#EXTINF:2.000000,
|
||||
#EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:43.019+0200
|
||||
0-000070.
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,14 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:6
|
||||
#EXT-X-TARGETDURATION:2
|
||||
#EXT-X-MEDIA-SEQUENCE:68
|
||||
#EXT-X-INDEPENDENT-SEGMENTS
|
||||
#EXTINF:2.000000,
|
||||
#EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:39.019+0200
|
||||
1-000068.ts
|
||||
#EXTINF:2.000000,
|
||||
#EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:41.019+0200
|
||||
1-000069.ts
|
||||
#EXTINF:2.000000,
|
||||
#EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:43.019+0200
|
||||
1-000070.ts
|
|
@ -0,0 +1,8 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:6
|
||||
#EXT-X-STREAM-INF:BANDWIDTH=1287342,RESOLUTION=640x360,CODECS="avc1.64001f,mp4a.40.2"
|
||||
0.m3u8
|
||||
|
||||
#EXT-X-STREAM-INF:BANDWIDTH=3051742,RESOLUTION=1280x720,CODECS="avc1.64001f,mp4a.40.2"
|
||||
1.m3u8
|
||||
|
Binary file not shown.
|
@ -0,0 +1,13 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:7
|
||||
#EXT-X-TARGETDURATION:4
|
||||
#EXT-X-MEDIA-SEQUENCE:0
|
||||
#EXT-X-PLAYLIST-TYPE:VOD
|
||||
#EXT-X-MAP:URI="3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4",BYTERANGE="1375@0"
|
||||
#EXTINF:4.000000,
|
||||
#EXT-X-BYTERANGE:10518@1375
|
||||
3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4
|
||||
#EXTINF:1.000000,
|
||||
#EXT-X-BYTERANGE:3741@11893
|
||||
3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4
|
||||
#EXT-X-ENDLIST
|
Binary file not shown.
|
@ -0,0 +1,13 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:7
|
||||
#EXT-X-TARGETDURATION:4
|
||||
#EXT-X-MEDIA-SEQUENCE:0
|
||||
#EXT-X-PLAYLIST-TYPE:VOD
|
||||
#EXT-X-MAP:URI="3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4",BYTERANGE="1375@0"
|
||||
#EXTINF:4.000000,
|
||||
#EXT-X-BYTERANGE:10518@1375
|
||||
3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4
|
||||
#EXTINF:1.000000,
|
||||
#EXT-X-BYTERANGE:3741@11893
|
||||
3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4
|
||||
#EXT-X-ENDLIST
|
Binary file not shown.
|
@ -0,0 +1,13 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:7
|
||||
#EXT-X-TARGETDURATION:4
|
||||
#EXT-X-MEDIA-SEQUENCE:0
|
||||
#EXT-X-PLAYLIST-TYPE:VOD
|
||||
#EXT-X-MAP:URI="05c40acd-3e94-4d25-ade8-97f7ff2cf0ac-360-fragmented.mp4",BYTERANGE="1376@0"
|
||||
#EXTINF:4.000000,
|
||||
#EXT-X-BYTERANGE:19987@1376
|
||||
05c40acd-3e94-4d25-ade8-97f7ff2cf0ac-360-fragmented.mp4
|
||||
#EXTINF:1.000000,
|
||||
#EXT-X-BYTERANGE:9147@21363
|
||||
05c40acd-3e94-4d25-ade8-97f7ff2cf0ac-360-fragmented.mp4
|
||||
#EXT-X-ENDLIST
|
Binary file not shown.
|
@ -0,0 +1,13 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:7
|
||||
#EXT-X-TARGETDURATION:4
|
||||
#EXT-X-MEDIA-SEQUENCE:0
|
||||
#EXT-X-PLAYLIST-TYPE:VOD
|
||||
#EXT-X-MAP:URI="f9377e69-d8f2-4de8-8087-ddbca6629829-480-fragmented.mp4",BYTERANGE="1376@0"
|
||||
#EXTINF:4.000000,
|
||||
#EXT-X-BYTERANGE:26042@1376
|
||||
f9377e69-d8f2-4de8-8087-ddbca6629829-480-fragmented.mp4
|
||||
#EXTINF:1.000000,
|
||||
#EXT-X-BYTERANGE:12353@27418
|
||||
f9377e69-d8f2-4de8-8087-ddbca6629829-480-fragmented.mp4
|
||||
#EXT-X-ENDLIST
|
Binary file not shown.
|
@ -0,0 +1,13 @@
|
|||
#EXTM3U
|
||||
#EXT-X-VERSION:7
|
||||
#EXT-X-TARGETDURATION:4
|
||||
#EXT-X-MEDIA-SEQUENCE:0
|
||||
#EXT-X-PLAYLIST-TYPE:VOD
|
||||
#EXT-X-MAP:URI="c1014aa4-d1f4-4b66-927b-c23d283fcae0-720-fragmented.mp4",BYTERANGE="1356@0"
|
||||
#EXTINF:4.000000,
|
||||
#EXT-X-BYTERANGE:39260@1356
|
||||
c1014aa4-d1f4-4b66-927b-c23d283fcae0-720-fragmented.mp4
|
||||
#EXTINF:1.000000,
|
||||
#EXT-X-BYTERANGE:18493@40616
|
||||
c1014aa4-d1f4-4b66-927b-c23d283fcae0-720-fragmented.mp4
|
||||
#EXT-X-ENDLIST
|
Binary file not shown.
|
@ -4,6 +4,7 @@ import './misc-endpoints'
|
|||
import './feeds/'
|
||||
import './cli/'
|
||||
import './api/'
|
||||
import './peertube-runner/'
|
||||
import './plugins/'
|
||||
import './helpers/'
|
||||
import './lib/'
|
||||
|
|
|
@ -63,7 +63,7 @@ describe('VideoConstantManagerFactory', function () {
|
|||
it('Should be able to add a video licence constant', () => {
|
||||
const successfullyAdded = videoLicenceManager.addConstant(42, 'European Union Public Licence')
|
||||
expect(successfullyAdded).to.be.true
|
||||
expect(videoLicenceManager.getConstantValue(42)).to.equal('European Union Public Licence')
|
||||
expect(videoLicenceManager.getConstantValue(42 as any)).to.equal('European Union Public Licence')
|
||||
})
|
||||
|
||||
it('Should be able to reset video licence constants', () => {
|
||||
|
@ -87,9 +87,9 @@ describe('VideoConstantManagerFactory', function () {
|
|||
})
|
||||
|
||||
it('Should be able to add a video playlist privacy constant', () => {
|
||||
const successfullyAdded = playlistPrivacyManager.addConstant(42, 'Friends only')
|
||||
const successfullyAdded = playlistPrivacyManager.addConstant(42 as any, 'Friends only')
|
||||
expect(successfullyAdded).to.be.true
|
||||
expect(playlistPrivacyManager.getConstantValue(42)).to.equal('Friends only')
|
||||
expect(playlistPrivacyManager.getConstantValue(42 as any)).to.equal('Friends only')
|
||||
})
|
||||
|
||||
it('Should be able to reset video playlist privacy constants', () => {
|
||||
|
@ -113,9 +113,9 @@ describe('VideoConstantManagerFactory', function () {
|
|||
})
|
||||
|
||||
it('Should be able to add a video privacy constant', () => {
|
||||
const successfullyAdded = videoPrivacyManager.addConstant(42, 'Friends only')
|
||||
const successfullyAdded = videoPrivacyManager.addConstant(42 as any, 'Friends only')
|
||||
expect(successfullyAdded).to.be.true
|
||||
expect(videoPrivacyManager.getConstantValue(42)).to.equal('Friends only')
|
||||
expect(videoPrivacyManager.getConstantValue(42 as any)).to.equal('Friends only')
|
||||
})
|
||||
|
||||
it('Should be able to reset video privacy constants', () => {
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { PeerTubeRunnerProcess } from '@server/tests/shared'
|
||||
import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands'
|
||||
|
||||
describe('Test peertube-runner program client CLI', function () {
|
||||
let server: PeerTubeServer
|
||||
let peertubeRunner: PeerTubeRunnerProcess
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
server = await createSingleServer(1)
|
||||
|
||||
await setAccessTokensToServers([ server ])
|
||||
await setDefaultVideoChannel([ server ])
|
||||
|
||||
await server.config.enableRemoteTranscoding()
|
||||
|
||||
peertubeRunner = new PeerTubeRunnerProcess()
|
||||
await peertubeRunner.runServer()
|
||||
})
|
||||
|
||||
it('Should not have PeerTube instance listed', async function () {
|
||||
const data = await peertubeRunner.listRegisteredPeerTubeInstances()
|
||||
|
||||
expect(data).to.not.contain(server.url)
|
||||
})
|
||||
|
||||
it('Should register a new PeerTube instance', async function () {
|
||||
const registrationToken = await server.runnerRegistrationTokens.getFirstRegistrationToken()
|
||||
|
||||
await peertubeRunner.registerPeerTubeInstance({
|
||||
server,
|
||||
registrationToken,
|
||||
runnerName: 'my super runner',
|
||||
runnerDescription: 'super description'
|
||||
})
|
||||
})
|
||||
|
||||
it('Should list this new PeerTube instance', async function () {
|
||||
const data = await peertubeRunner.listRegisteredPeerTubeInstances()
|
||||
|
||||
expect(data).to.contain(server.url)
|
||||
expect(data).to.contain('my super runner')
|
||||
expect(data).to.contain('super description')
|
||||
})
|
||||
|
||||
it('Should still have the configuration after a restart', async function () {
|
||||
peertubeRunner.kill()
|
||||
|
||||
await peertubeRunner.runServer()
|
||||
})
|
||||
|
||||
it('Should unregister the PeerTube instance', async function () {
|
||||
await peertubeRunner.unregisterPeerTubeInstance({ server })
|
||||
})
|
||||
|
||||
it('Should not have PeerTube instance listed', async function () {
|
||||
const data = await peertubeRunner.listRegisteredPeerTubeInstances()
|
||||
|
||||
expect(data).to.not.contain(server.url)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await cleanupTests([ server ])
|
||||
|
||||
peertubeRunner.kill()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,3 @@
|
|||
export * from './client-cli'
|
||||
export * from './live-transcoding'
|
||||
export * from './vod-transcoding'
|
|
@ -0,0 +1,178 @@
|
|||
import { expect } from 'chai'
|
||||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
import { expectStartWith, PeerTubeRunnerProcess, SQLCommand, testLiveVideoResolutions } from '@server/tests/shared'
|
||||
import { areMockObjectStorageTestsDisabled, wait } from '@shared/core-utils'
|
||||
import { HttpStatusCode, VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
findExternalSavedVideo,
|
||||
makeRawRequest,
|
||||
ObjectStorageCommand,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
stopFfmpeg,
|
||||
waitJobs,
|
||||
waitUntilLivePublishedOnAllServers,
|
||||
waitUntilLiveWaitingOnAllServers
|
||||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test Live transcoding in peertube-runner program', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let peertubeRunner: PeerTubeRunnerProcess
|
||||
let sqlCommandServer1: SQLCommand
|
||||
|
||||
function runSuite (options: {
|
||||
objectStorage: boolean
|
||||
}) {
|
||||
const { objectStorage } = options
|
||||
|
||||
it('Should enable transcoding without additional resolutions', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { video } = await servers[0].live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC })
|
||||
|
||||
const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: video.uuid })
|
||||
await waitUntilLivePublishedOnAllServers(servers, video.uuid)
|
||||
await waitJobs(servers)
|
||||
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId: video.uuid,
|
||||
resolutions: [ 720, 480, 360, 240, 144 ],
|
||||
objectStorage,
|
||||
transcoded: true
|
||||
})
|
||||
|
||||
await stopFfmpeg(ffmpegCommand)
|
||||
|
||||
await waitUntilLiveWaitingOnAllServers(servers, video.uuid)
|
||||
await servers[0].videos.remove({ id: video.id })
|
||||
})
|
||||
|
||||
it('Should transcode audio only RTMP stream', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { video } = await servers[0].live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.UNLISTED })
|
||||
|
||||
const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: video.uuid, fixtureName: 'video_short_no_audio.mp4' })
|
||||
await waitUntilLivePublishedOnAllServers(servers, video.uuid)
|
||||
await waitJobs(servers)
|
||||
|
||||
await stopFfmpeg(ffmpegCommand)
|
||||
|
||||
await waitUntilLiveWaitingOnAllServers(servers, video.uuid)
|
||||
await servers[0].videos.remove({ id: video.id })
|
||||
})
|
||||
|
||||
it('Should save a replay', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { video } = await servers[0].live.quickCreate({ permanentLive: true, saveReplay: true })
|
||||
|
||||
const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: video.uuid })
|
||||
await waitUntilLivePublishedOnAllServers(servers, video.uuid)
|
||||
|
||||
await testLiveVideoResolutions({
|
||||
originServer: servers[0],
|
||||
sqlCommand: sqlCommandServer1,
|
||||
servers,
|
||||
liveVideoId: video.uuid,
|
||||
resolutions: [ 720, 480, 360, 240, 144 ],
|
||||
objectStorage,
|
||||
transcoded: true
|
||||
})
|
||||
|
||||
await stopFfmpeg(ffmpegCommand)
|
||||
|
||||
await waitUntilLiveWaitingOnAllServers(servers, video.uuid)
|
||||
await waitJobs(servers)
|
||||
|
||||
const session = await servers[0].live.findLatestSession({ videoId: video.uuid })
|
||||
expect(session.endingProcessed).to.be.true
|
||||
expect(session.endDate).to.exist
|
||||
expect(session.saveReplay).to.be.true
|
||||
|
||||
const videoLiveDetails = await servers[0].videos.get({ id: video.uuid })
|
||||
const replay = await findExternalSavedVideo(servers[0], videoLiveDetails)
|
||||
|
||||
for (const server of servers) {
|
||||
const video = await server.videos.get({ id: replay.uuid })
|
||||
|
||||
expect(video.files).to.have.lengthOf(0)
|
||||
expect(video.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
||||
const files = video.streamingPlaylists[0].files
|
||||
expect(files).to.have.lengthOf(5)
|
||||
|
||||
for (const file of files) {
|
||||
if (objectStorage) {
|
||||
expectStartWith(file.fileUrl, ObjectStorageCommand.getMockPlaylistBaseUrl())
|
||||
}
|
||||
|
||||
await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
servers = await createMultipleServers(2)
|
||||
|
||||
await setAccessTokensToServers(servers)
|
||||
await setDefaultVideoChannel(servers)
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
sqlCommandServer1 = new SQLCommand(servers[0])
|
||||
|
||||
await servers[0].config.enableRemoteTranscoding()
|
||||
await servers[0].config.enableTranscoding(true, true, true)
|
||||
await servers[0].config.enableLive({ allowReplay: true, resolutions: 'max', transcoding: true })
|
||||
|
||||
const registrationToken = await servers[0].runnerRegistrationTokens.getFirstRegistrationToken()
|
||||
|
||||
peertubeRunner = new PeerTubeRunnerProcess()
|
||||
await peertubeRunner.runServer({ hideLogs: false })
|
||||
await peertubeRunner.registerPeerTubeInstance({ server: servers[0], registrationToken, runnerName: 'runner' })
|
||||
})
|
||||
|
||||
describe('With lives on local filesystem storage', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.enableTranscoding(true, false, true)
|
||||
})
|
||||
|
||||
runSuite({ objectStorage: false })
|
||||
})
|
||||
|
||||
describe('With lives on object storage', function () {
|
||||
if (areMockObjectStorageTestsDisabled()) return
|
||||
|
||||
before(async function () {
|
||||
await ObjectStorageCommand.prepareDefaultMockBuckets()
|
||||
|
||||
await servers[0].kill()
|
||||
|
||||
await servers[0].run(ObjectStorageCommand.getDefaultMockConfig())
|
||||
|
||||
// Wait for peertube runner socket reconnection
|
||||
await wait(1500)
|
||||
})
|
||||
|
||||
runSuite({ objectStorage: true })
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await peertubeRunner.unregisterPeerTubeInstance({ server: servers[0] })
|
||||
peertubeRunner.kill()
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,330 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
import { expect } from 'chai'
|
||||
import { completeCheckHlsPlaylist, completeWebVideoFilesCheck, PeerTubeRunnerProcess } from '@server/tests/shared'
|
||||
import { areMockObjectStorageTestsDisabled, getAllFiles, wait } from '@shared/core-utils'
|
||||
import { VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
createMultipleServers,
|
||||
doubleFollow,
|
||||
ObjectStorageCommand,
|
||||
PeerTubeServer,
|
||||
setAccessTokensToServers,
|
||||
setDefaultVideoChannel,
|
||||
waitJobs
|
||||
} from '@shared/server-commands'
|
||||
|
||||
describe('Test VOD transcoding in peertube-runner program', function () {
|
||||
let servers: PeerTubeServer[] = []
|
||||
let peertubeRunner: PeerTubeRunnerProcess
|
||||
|
||||
function runSuite (options: {
|
||||
webtorrentEnabled: boolean
|
||||
hlsEnabled: boolean
|
||||
objectStorage: boolean
|
||||
}) {
|
||||
const { webtorrentEnabled, hlsEnabled, objectStorage } = options
|
||||
|
||||
const objectStorageBaseUrlWebTorrent = objectStorage
|
||||
? ObjectStorageCommand.getMockWebTorrentBaseUrl()
|
||||
: undefined
|
||||
|
||||
const objectStorageBaseUrlHLS = objectStorage
|
||||
? ObjectStorageCommand.getMockPlaylistBaseUrl()
|
||||
: undefined
|
||||
|
||||
it('Should upload a classic video mp4 and transcode it', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'mp4', fixture: 'video_short.mp4' })
|
||||
|
||||
await waitJobs(servers, { runnerJobs: true })
|
||||
|
||||
for (const server of servers) {
|
||||
if (webtorrentEnabled) {
|
||||
await completeWebVideoFilesCheck({
|
||||
server,
|
||||
originServer: servers[0],
|
||||
fixture: 'video_short.mp4',
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlWebTorrent,
|
||||
files: [
|
||||
{ resolution: 0 },
|
||||
{ resolution: 144 },
|
||||
{ resolution: 240 },
|
||||
{ resolution: 360 },
|
||||
{ resolution: 480 },
|
||||
{ resolution: 720 }
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (hlsEnabled) {
|
||||
await completeCheckHlsPlaylist({
|
||||
hlsOnly: !webtorrentEnabled,
|
||||
servers,
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlHLS,
|
||||
resolutions: [ 720, 480, 360, 240, 144, 0 ]
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should upload a webm video and transcode it', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'mp4', fixture: 'video_short.webm' })
|
||||
|
||||
await waitJobs(servers, { runnerJobs: true })
|
||||
|
||||
for (const server of servers) {
|
||||
if (webtorrentEnabled) {
|
||||
await completeWebVideoFilesCheck({
|
||||
server,
|
||||
originServer: servers[0],
|
||||
fixture: 'video_short.webm',
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlWebTorrent,
|
||||
files: [
|
||||
{ resolution: 0 },
|
||||
{ resolution: 144 },
|
||||
{ resolution: 240 },
|
||||
{ resolution: 360 },
|
||||
{ resolution: 480 },
|
||||
{ resolution: 720 }
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (hlsEnabled) {
|
||||
await completeCheckHlsPlaylist({
|
||||
hlsOnly: !webtorrentEnabled,
|
||||
servers,
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlHLS,
|
||||
resolutions: [ 720, 480, 360, 240, 144, 0 ]
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should upload an audio only video and transcode it', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const attributes = { name: 'audio_without_preview', fixture: 'sample.ogg' }
|
||||
const { uuid } = await servers[0].videos.upload({ attributes, mode: 'resumable' })
|
||||
|
||||
await waitJobs(servers, { runnerJobs: true })
|
||||
|
||||
for (const server of servers) {
|
||||
if (webtorrentEnabled) {
|
||||
await completeWebVideoFilesCheck({
|
||||
server,
|
||||
originServer: servers[0],
|
||||
fixture: 'sample.ogg',
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlWebTorrent,
|
||||
files: [
|
||||
{ resolution: 0 },
|
||||
{ resolution: 144 },
|
||||
{ resolution: 240 },
|
||||
{ resolution: 360 },
|
||||
{ resolution: 480 }
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (hlsEnabled) {
|
||||
await completeCheckHlsPlaylist({
|
||||
hlsOnly: !webtorrentEnabled,
|
||||
servers,
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlHLS,
|
||||
resolutions: [ 480, 360, 240, 144, 0 ]
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Should upload a private video and transcode it', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'mp4', fixture: 'video_short.mp4', privacy: VideoPrivacy.PRIVATE })
|
||||
|
||||
await waitJobs(servers, { runnerJobs: true })
|
||||
|
||||
if (webtorrentEnabled) {
|
||||
await completeWebVideoFilesCheck({
|
||||
server: servers[0],
|
||||
originServer: servers[0],
|
||||
fixture: 'video_short.mp4',
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlWebTorrent,
|
||||
files: [
|
||||
{ resolution: 0 },
|
||||
{ resolution: 144 },
|
||||
{ resolution: 240 },
|
||||
{ resolution: 360 },
|
||||
{ resolution: 480 },
|
||||
{ resolution: 720 }
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (hlsEnabled) {
|
||||
await completeCheckHlsPlaylist({
|
||||
hlsOnly: !webtorrentEnabled,
|
||||
servers: [ servers[0] ],
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlHLS,
|
||||
resolutions: [ 720, 480, 360, 240, 144, 0 ]
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
it('Should transcode videos on manual run', async function () {
|
||||
this.timeout(120000)
|
||||
|
||||
await servers[0].config.disableTranscoding()
|
||||
|
||||
const { uuid } = await servers[0].videos.quickUpload({ name: 'manual transcoding', fixture: 'video_short.mp4' })
|
||||
await waitJobs(servers, { runnerJobs: true })
|
||||
|
||||
{
|
||||
const video = await servers[0].videos.get({ id: uuid })
|
||||
expect(getAllFiles(video)).to.have.lengthOf(1)
|
||||
}
|
||||
|
||||
await servers[0].config.enableTranscoding(true, true, true)
|
||||
|
||||
await servers[0].videos.runTranscoding({ transcodingType: 'webtorrent', videoId: uuid })
|
||||
await waitJobs(servers, { runnerJobs: true })
|
||||
|
||||
await completeWebVideoFilesCheck({
|
||||
server: servers[0],
|
||||
originServer: servers[0],
|
||||
fixture: 'video_short.mp4',
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlWebTorrent,
|
||||
files: [
|
||||
{ resolution: 0 },
|
||||
{ resolution: 144 },
|
||||
{ resolution: 240 },
|
||||
{ resolution: 360 },
|
||||
{ resolution: 480 },
|
||||
{ resolution: 720 }
|
||||
]
|
||||
})
|
||||
|
||||
await servers[0].videos.runTranscoding({ transcodingType: 'hls', videoId: uuid })
|
||||
await waitJobs(servers, { runnerJobs: true })
|
||||
|
||||
await completeCheckHlsPlaylist({
|
||||
hlsOnly: false,
|
||||
servers: [ servers[0] ],
|
||||
videoUUID: uuid,
|
||||
objectStorageBaseUrl: objectStorageBaseUrlHLS,
|
||||
resolutions: [ 720, 480, 360, 240, 144, 0 ]
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
before(async function () {
|
||||
this.timeout(120_000)
|
||||
|
||||
servers = await createMultipleServers(2)
|
||||
|
||||
await setAccessTokensToServers(servers)
|
||||
await setDefaultVideoChannel(servers)
|
||||
|
||||
await doubleFollow(servers[0], servers[1])
|
||||
|
||||
await servers[0].config.enableRemoteTranscoding()
|
||||
|
||||
const registrationToken = await servers[0].runnerRegistrationTokens.getFirstRegistrationToken()
|
||||
|
||||
peertubeRunner = new PeerTubeRunnerProcess()
|
||||
await peertubeRunner.runServer()
|
||||
await peertubeRunner.registerPeerTubeInstance({ server: servers[0], registrationToken, runnerName: 'runner' })
|
||||
})
|
||||
|
||||
describe('With videos on local filesystem storage', function () {
|
||||
|
||||
describe('Web video only enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.enableTranscoding(true, false, true)
|
||||
})
|
||||
|
||||
runSuite({ webtorrentEnabled: true, hlsEnabled: false, objectStorage: false })
|
||||
})
|
||||
|
||||
describe('HLS videos only enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.enableTranscoding(false, true, true)
|
||||
})
|
||||
|
||||
runSuite({ webtorrentEnabled: false, hlsEnabled: true, objectStorage: false })
|
||||
})
|
||||
|
||||
describe('Web video & HLS enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.enableTranscoding(true, true, true)
|
||||
})
|
||||
|
||||
runSuite({ webtorrentEnabled: true, hlsEnabled: true, objectStorage: false })
|
||||
})
|
||||
})
|
||||
|
||||
describe('With videos on object storage', function () {
|
||||
if (areMockObjectStorageTestsDisabled()) return
|
||||
|
||||
before(async function () {
|
||||
await ObjectStorageCommand.prepareDefaultMockBuckets()
|
||||
|
||||
await servers[0].kill()
|
||||
|
||||
await servers[0].run(ObjectStorageCommand.getDefaultMockConfig())
|
||||
|
||||
// Wait for peertube runner socket reconnection
|
||||
await wait(1500)
|
||||
})
|
||||
|
||||
describe('Web video only enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.enableTranscoding(true, false, true)
|
||||
})
|
||||
|
||||
runSuite({ webtorrentEnabled: true, hlsEnabled: false, objectStorage: true })
|
||||
})
|
||||
|
||||
describe('HLS videos only enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.enableTranscoding(false, true, true)
|
||||
})
|
||||
|
||||
runSuite({ webtorrentEnabled: false, hlsEnabled: true, objectStorage: true })
|
||||
})
|
||||
|
||||
describe('Web video & HLS enabled', function () {
|
||||
|
||||
before(async function () {
|
||||
await servers[0].config.enableTranscoding(true, true, true)
|
||||
})
|
||||
|
||||
runSuite({ webtorrentEnabled: true, hlsEnabled: true, objectStorage: true })
|
||||
})
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
await peertubeRunner.unregisterPeerTubeInstance({ server: servers[0] })
|
||||
peertubeRunner.kill()
|
||||
|
||||
await cleanupTests(servers)
|
||||
})
|
||||
})
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
||||
|
||||
import { expect } from 'chai'
|
||||
import { getAudioStream, getVideoStreamFPS, getVideoStream } from '@server/helpers/ffmpeg'
|
||||
import { getAudioStream, getVideoStream, getVideoStreamFPS } from '@shared/ffmpeg'
|
||||
import { VideoPrivacy } from '@shared/models'
|
||||
import {
|
||||
cleanupTests,
|
||||
|
|
|
@ -11,7 +11,7 @@ import { HttpStatusCode } from '@shared/models'
|
|||
import { makeGetRequest, PeerTubeServer } from '@shared/server-commands'
|
||||
|
||||
// Default interval -> 5 minutes
|
||||
function dateIsValid (dateString: string, interval = 300000) {
|
||||
function dateIsValid (dateString: string | Date, interval = 300000) {
|
||||
const dateToCheck = new Date(dateString)
|
||||
const now = new Date()
|
||||
|
||||
|
@ -90,6 +90,8 @@ async function testFileExistsOrNot (server: PeerTubeServer, directory: string, f
|
|||
expect(await pathExists(join(base, filePath))).to.equal(exist)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function checkBadStartPagination (url: string, path: string, token?: string, query = {}) {
|
||||
return makeGetRequest({
|
||||
url,
|
||||
|
|
|
@ -3,7 +3,7 @@ import ffmpeg from 'fluent-ffmpeg'
|
|||
import { ensureDir, pathExists } from 'fs-extra'
|
||||
import { dirname } from 'path'
|
||||
import { buildAbsoluteFixturePath, getMaxBitrate } from '@shared/core-utils'
|
||||
import { getVideoStreamBitrate, getVideoStreamFPS, getVideoStreamDimensionsInfo } from '@shared/extra-utils'
|
||||
import { getVideoStreamBitrate, getVideoStreamDimensionsInfo, getVideoStreamFPS } from '@shared/ffmpeg'
|
||||
|
||||
async function ensureHasTooBigBitrate (fixturePath: string) {
|
||||
const bitrate = await getVideoStreamBitrate(fixturePath)
|
||||
|
|
|
@ -6,11 +6,14 @@ export * from './directories'
|
|||
export * from './generate'
|
||||
export * from './live'
|
||||
export * from './notifications'
|
||||
export * from './peertube-runner-process'
|
||||
export * from './video-playlists'
|
||||
export * from './plugins'
|
||||
export * from './requests'
|
||||
export * from './sql-command'
|
||||
export * from './streaming-playlists'
|
||||
export * from './tests'
|
||||
export * from './tracker'
|
||||
export * from './videos'
|
||||
export * from './views'
|
||||
export * from './webtorrent'
|
||||
|
|
|
@ -6,6 +6,7 @@ import { join } from 'path'
|
|||
import { sha1 } from '@shared/extra-utils'
|
||||
import { LiveVideo, VideoStreamingPlaylistType } from '@shared/models'
|
||||
import { ObjectStorageCommand, PeerTubeServer } from '@shared/server-commands'
|
||||
import { SQLCommand } from './sql-command'
|
||||
import { checkLiveSegmentHash, checkResolutionsInMasterPlaylist } from './streaming-playlists'
|
||||
|
||||
async function checkLiveCleanup (options: {
|
||||
|
@ -36,8 +37,10 @@ async function checkLiveCleanup (options: {
|
|||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function testVideoResolutions (options: {
|
||||
async function testLiveVideoResolutions (options: {
|
||||
sqlCommand: SQLCommand
|
||||
originServer: PeerTubeServer
|
||||
|
||||
servers: PeerTubeServer[]
|
||||
liveVideoId: string
|
||||
resolutions: number[]
|
||||
|
@ -48,6 +51,7 @@ async function testVideoResolutions (options: {
|
|||
}) {
|
||||
const {
|
||||
originServer,
|
||||
sqlCommand,
|
||||
servers,
|
||||
liveVideoId,
|
||||
resolutions,
|
||||
|
@ -116,7 +120,7 @@ async function testVideoResolutions (options: {
|
|||
|
||||
if (originServer.internalServerNumber === server.internalServerNumber) {
|
||||
const infohash = sha1(`${2 + hlsPlaylist.playlistUrl}+V${i}`)
|
||||
const dbInfohashes = await originServer.sql.getPlaylistInfohash(hlsPlaylist.id)
|
||||
const dbInfohashes = await sqlCommand.getPlaylistInfohash(hlsPlaylist.id)
|
||||
|
||||
expect(dbInfohashes).to.include(infohash)
|
||||
}
|
||||
|
@ -128,7 +132,7 @@ async function testVideoResolutions (options: {
|
|||
|
||||
export {
|
||||
checkLiveCleanup,
|
||||
testVideoResolutions
|
||||
testLiveVideoResolutions
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
import { ChildProcess, fork } from 'child_process'
|
||||
import execa from 'execa'
|
||||
import { join } from 'path'
|
||||
import { root } from '@shared/core-utils'
|
||||
import { PeerTubeServer } from '@shared/server-commands'
|
||||
|
||||
export class PeerTubeRunnerProcess {
|
||||
private app?: ChildProcess
|
||||
|
||||
runServer (options: {
|
||||
hideLogs?: boolean // default true
|
||||
} = {}) {
|
||||
const { hideLogs = true } = options
|
||||
|
||||
return new Promise<void>((res, rej) => {
|
||||
const args = [ 'server', '--verbose', '--id', 'test' ]
|
||||
|
||||
const forkOptions = {
|
||||
detached: false,
|
||||
silent: true
|
||||
}
|
||||
this.app = fork(this.getRunnerPath(), args, forkOptions)
|
||||
|
||||
this.app.stdout.on('data', data => {
|
||||
const str = data.toString() as string
|
||||
|
||||
if (!hideLogs) {
|
||||
console.log(str)
|
||||
}
|
||||
})
|
||||
|
||||
res()
|
||||
})
|
||||
}
|
||||
|
||||
registerPeerTubeInstance (options: {
|
||||
server: PeerTubeServer
|
||||
registrationToken: string
|
||||
runnerName: string
|
||||
runnerDescription?: string
|
||||
}) {
|
||||
const { server, registrationToken, runnerName, runnerDescription } = options
|
||||
|
||||
const args = [
|
||||
'register',
|
||||
'--url', server.url,
|
||||
'--registration-token', registrationToken,
|
||||
'--runner-name', runnerName,
|
||||
'--id', 'test'
|
||||
]
|
||||
|
||||
if (runnerDescription) {
|
||||
args.push('--runner-description')
|
||||
args.push(runnerDescription)
|
||||
}
|
||||
|
||||
return execa.node(this.getRunnerPath(), args)
|
||||
}
|
||||
|
||||
unregisterPeerTubeInstance (options: {
|
||||
server: PeerTubeServer
|
||||
}) {
|
||||
const { server } = options
|
||||
|
||||
const args = [ 'unregister', '--url', server.url, '--id', 'test' ]
|
||||
return execa.node(this.getRunnerPath(), args)
|
||||
}
|
||||
|
||||
async listRegisteredPeerTubeInstances () {
|
||||
const args = [ 'list-registered', '--id', 'test' ]
|
||||
const { stdout } = await execa.node(this.getRunnerPath(), args)
|
||||
|
||||
return stdout
|
||||
}
|
||||
|
||||
kill () {
|
||||
if (!this.app) return
|
||||
|
||||
process.kill(this.app.pid)
|
||||
|
||||
this.app = null
|
||||
}
|
||||
|
||||
private getRunnerPath () {
|
||||
return join(root(), 'packages', 'peertube-runner', 'dist', 'peertube-runner.js')
|
||||
}
|
||||
}
|
|
@ -1,10 +1,14 @@
|
|||
import { QueryTypes, Sequelize } from 'sequelize'
|
||||
import { forceNumber } from '@shared/core-utils'
|
||||
import { AbstractCommand } from '../shared'
|
||||
import { PeerTubeServer } from '@shared/server-commands'
|
||||
|
||||
export class SQLCommand extends AbstractCommand {
|
||||
export class SQLCommand {
|
||||
private sequelize: Sequelize
|
||||
|
||||
constructor (private readonly server: PeerTubeServer) {
|
||||
|
||||
}
|
||||
|
||||
deleteAll (table: string) {
|
||||
const seq = this.getSequelize()
|
||||
|
|
@ -4,10 +4,11 @@ import { expect } from 'chai'
|
|||
import { basename, dirname, join } from 'path'
|
||||
import { removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils'
|
||||
import { sha256 } from '@shared/extra-utils'
|
||||
import { HttpStatusCode, VideoStreamingPlaylist, VideoStreamingPlaylistType } from '@shared/models'
|
||||
import { makeRawRequest, PeerTubeServer, webtorrentAdd } from '@shared/server-commands'
|
||||
import { HttpStatusCode, VideoPrivacy, VideoResolution, VideoStreamingPlaylist, VideoStreamingPlaylistType } from '@shared/models'
|
||||
import { makeRawRequest, PeerTubeServer } from '@shared/server-commands'
|
||||
import { expectStartWith } from './checks'
|
||||
import { hlsInfohashExist } from './tracker'
|
||||
import { checkWebTorrentWorks } from './webtorrent'
|
||||
|
||||
async function checkSegmentHash (options: {
|
||||
server: PeerTubeServer
|
||||
|
@ -15,14 +16,15 @@ async function checkSegmentHash (options: {
|
|||
baseUrlSegment: string
|
||||
resolution: number
|
||||
hlsPlaylist: VideoStreamingPlaylist
|
||||
token?: string
|
||||
}) {
|
||||
const { server, baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist } = options
|
||||
const { server, baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist, token } = options
|
||||
const command = server.streamingPlaylists
|
||||
|
||||
const file = hlsPlaylist.files.find(f => f.resolution.id === resolution)
|
||||
const videoName = basename(file.fileUrl)
|
||||
|
||||
const playlist = await command.get({ url: `${baseUrlPlaylist}/${removeFragmentedMP4Ext(videoName)}.m3u8` })
|
||||
const playlist = await command.get({ url: `${baseUrlPlaylist}/${removeFragmentedMP4Ext(videoName)}.m3u8`, token })
|
||||
|
||||
const matches = /#EXT-X-BYTERANGE:(\d+)@(\d+)/.exec(playlist)
|
||||
|
||||
|
@ -33,11 +35,12 @@ async function checkSegmentHash (options: {
|
|||
const segmentBody = await command.getFragmentedSegment({
|
||||
url: `${baseUrlSegment}/${videoName}`,
|
||||
expectedStatus: HttpStatusCode.PARTIAL_CONTENT_206,
|
||||
range: `bytes=${range}`
|
||||
range: `bytes=${range}`,
|
||||
token
|
||||
})
|
||||
|
||||
const shaBody = await command.getSegmentSha256({ url: hlsPlaylist.segmentsSha256Url })
|
||||
expect(sha256(segmentBody)).to.equal(shaBody[videoName][range])
|
||||
const shaBody = await command.getSegmentSha256({ url: hlsPlaylist.segmentsSha256Url, token })
|
||||
expect(sha256(segmentBody)).to.equal(shaBody[videoName][range], `Invalid sha256 result for ${videoName} range ${range}`)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
@ -64,19 +67,24 @@ async function checkResolutionsInMasterPlaylist (options: {
|
|||
server: PeerTubeServer
|
||||
playlistUrl: string
|
||||
resolutions: number[]
|
||||
token?: string
|
||||
transcoded?: boolean // default true
|
||||
withRetry?: boolean // default false
|
||||
}) {
|
||||
const { server, playlistUrl, resolutions, withRetry = false, transcoded = true } = options
|
||||
const { server, playlistUrl, resolutions, token, withRetry = false, transcoded = true } = options
|
||||
|
||||
const masterPlaylist = await server.streamingPlaylists.get({ url: playlistUrl, withRetry })
|
||||
const masterPlaylist = await server.streamingPlaylists.get({ url: playlistUrl, token, withRetry })
|
||||
|
||||
for (const resolution of resolutions) {
|
||||
const reg = transcoded
|
||||
? new RegExp('#EXT-X-STREAM-INF:BANDWIDTH=\\d+,RESOLUTION=\\d+x' + resolution + ',(FRAME-RATE=\\d+,)?CODECS="avc1.64001f,mp4a.40.2"')
|
||||
: new RegExp('#EXT-X-STREAM-INF:BANDWIDTH=\\d+,RESOLUTION=\\d+x' + resolution + '')
|
||||
const base = '#EXT-X-STREAM-INF:BANDWIDTH=\\d+,RESOLUTION=\\d+x' + resolution
|
||||
|
||||
expect(masterPlaylist).to.match(reg)
|
||||
if (resolution === VideoResolution.H_NOVIDEO) {
|
||||
expect(masterPlaylist).to.match(new RegExp(`${base},CODECS="mp4a.40.2"`))
|
||||
} else if (transcoded) {
|
||||
expect(masterPlaylist).to.match(new RegExp(`${base},(FRAME-RATE=\\d+,)?CODECS="avc1.64001f,mp4a.40.2"`))
|
||||
} else {
|
||||
expect(masterPlaylist).to.match(new RegExp(`${base}`))
|
||||
}
|
||||
}
|
||||
|
||||
const playlistsLength = masterPlaylist.split('\n').filter(line => line.startsWith('#EXT-X-STREAM-INF:BANDWIDTH='))
|
||||
|
@ -89,14 +97,23 @@ async function completeCheckHlsPlaylist (options: {
|
|||
hlsOnly: boolean
|
||||
|
||||
resolutions?: number[]
|
||||
objectStorageBaseUrl: string
|
||||
objectStorageBaseUrl?: string
|
||||
}) {
|
||||
const { videoUUID, hlsOnly, objectStorageBaseUrl } = options
|
||||
|
||||
const resolutions = options.resolutions ?? [ 240, 360, 480, 720 ]
|
||||
|
||||
for (const server of options.servers) {
|
||||
const videoDetails = await server.videos.get({ id: videoUUID })
|
||||
const videoDetails = await server.videos.getWithToken({ id: videoUUID })
|
||||
const requiresAuth = videoDetails.privacy.id === VideoPrivacy.PRIVATE || videoDetails.privacy.id === VideoPrivacy.INTERNAL
|
||||
|
||||
const privatePath = requiresAuth
|
||||
? 'private/'
|
||||
: ''
|
||||
const token = requiresAuth
|
||||
? server.accessToken
|
||||
: undefined
|
||||
|
||||
const baseUrl = `http://${videoDetails.account.host}`
|
||||
|
||||
expect(videoDetails.streamingPlaylists).to.have.lengthOf(1)
|
||||
|
@ -115,35 +132,55 @@ async function completeCheckHlsPlaylist (options: {
|
|||
const file = hlsFiles.find(f => f.resolution.id === resolution)
|
||||
expect(file).to.not.be.undefined
|
||||
|
||||
expect(file.magnetUri).to.have.lengthOf.above(2)
|
||||
expect(file.torrentUrl).to.match(
|
||||
new RegExp(`${server.url}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}-hls.torrent`)
|
||||
)
|
||||
|
||||
if (objectStorageBaseUrl) {
|
||||
expectStartWith(file.fileUrl, objectStorageBaseUrl)
|
||||
if (file.resolution.id === VideoResolution.H_NOVIDEO) {
|
||||
expect(file.resolution.label).to.equal('Audio')
|
||||
} else {
|
||||
expect(file.fileUrl).to.match(
|
||||
new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`)
|
||||
)
|
||||
expect(file.resolution.label).to.equal(resolution + 'p')
|
||||
}
|
||||
|
||||
expect(file.resolution.label).to.equal(resolution + 'p')
|
||||
expect(file.magnetUri).to.have.lengthOf.above(2)
|
||||
await checkWebTorrentWorks(file.magnetUri)
|
||||
|
||||
await makeRawRequest({ url: file.torrentUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
{
|
||||
const nameReg = `${uuidRegex}-${file.resolution.id}`
|
||||
|
||||
const torrent = await webtorrentAdd(file.magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
expect(file.torrentUrl).to.match(new RegExp(`${server.url}/lazy-static/torrents/${nameReg}-hls.torrent`))
|
||||
|
||||
if (objectStorageBaseUrl && requiresAuth) {
|
||||
// eslint-disable-next-line max-len
|
||||
expect(file.fileUrl).to.match(new RegExp(`${server.url}/object-storage-proxy/streaming-playlists/hls/${privatePath}${videoDetails.uuid}/${nameReg}-fragmented.mp4`))
|
||||
} else if (objectStorageBaseUrl) {
|
||||
expectStartWith(file.fileUrl, objectStorageBaseUrl)
|
||||
} else {
|
||||
expect(file.fileUrl).to.match(
|
||||
new RegExp(`${baseUrl}/static/streaming-playlists/hls/${privatePath}${videoDetails.uuid}/${nameReg}-fragmented.mp4`)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
await Promise.all([
|
||||
makeRawRequest({ url: file.torrentUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.torrentDownloadUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.metadataUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.fileUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
|
||||
makeRawRequest({
|
||||
url: file.fileDownloadUrl,
|
||||
token,
|
||||
expectedStatus: objectStorageBaseUrl
|
||||
? HttpStatusCode.FOUND_302
|
||||
: HttpStatusCode.OK_200
|
||||
})
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
// Check master playlist
|
||||
{
|
||||
await checkResolutionsInMasterPlaylist({ server, playlistUrl: hlsPlaylist.playlistUrl, resolutions })
|
||||
await checkResolutionsInMasterPlaylist({ server, token, playlistUrl: hlsPlaylist.playlistUrl, resolutions })
|
||||
|
||||
const masterPlaylist = await server.streamingPlaylists.get({ url: hlsPlaylist.playlistUrl })
|
||||
const masterPlaylist = await server.streamingPlaylists.get({ url: hlsPlaylist.playlistUrl, token })
|
||||
|
||||
let i = 0
|
||||
for (const resolution of resolutions) {
|
||||
|
@ -163,11 +200,16 @@ async function completeCheckHlsPlaylist (options: {
|
|||
const file = hlsFiles.find(f => f.resolution.id === resolution)
|
||||
const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8'
|
||||
|
||||
const url = objectStorageBaseUrl
|
||||
? `${objectStorageBaseUrl}hls/${videoUUID}/${playlistName}`
|
||||
: `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}`
|
||||
let url: string
|
||||
if (objectStorageBaseUrl && requiresAuth) {
|
||||
url = `${baseUrl}/object-storage-proxy/streaming-playlists/hls/${privatePath}${videoUUID}/${playlistName}`
|
||||
} else if (objectStorageBaseUrl) {
|
||||
url = `${objectStorageBaseUrl}hls/${videoUUID}/${playlistName}`
|
||||
} else {
|
||||
url = `${baseUrl}/static/streaming-playlists/hls/${privatePath}${videoUUID}/${playlistName}`
|
||||
}
|
||||
|
||||
const subPlaylist = await server.streamingPlaylists.get({ url })
|
||||
const subPlaylist = await server.streamingPlaylists.get({ url, token })
|
||||
|
||||
expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`))
|
||||
expect(subPlaylist).to.contain(basename(file.fileUrl))
|
||||
|
@ -175,13 +217,19 @@ async function completeCheckHlsPlaylist (options: {
|
|||
}
|
||||
|
||||
{
|
||||
const baseUrlAndPath = objectStorageBaseUrl
|
||||
? objectStorageBaseUrl + 'hls/' + videoUUID
|
||||
: baseUrl + '/static/streaming-playlists/hls/' + videoUUID
|
||||
let baseUrlAndPath: string
|
||||
if (objectStorageBaseUrl && requiresAuth) {
|
||||
baseUrlAndPath = `${baseUrl}/object-storage-proxy/streaming-playlists/hls/${privatePath}${videoUUID}`
|
||||
} else if (objectStorageBaseUrl) {
|
||||
baseUrlAndPath = `${objectStorageBaseUrl}hls/${videoUUID}`
|
||||
} else {
|
||||
baseUrlAndPath = `${baseUrl}/static/streaming-playlists/hls/${privatePath}${videoUUID}`
|
||||
}
|
||||
|
||||
for (const resolution of resolutions) {
|
||||
await checkSegmentHash({
|
||||
server,
|
||||
token,
|
||||
baseUrlPlaylist: baseUrlAndPath,
|
||||
baseUrlSegment: baseUrlAndPath,
|
||||
resolution,
|
||||
|
|
|
@ -4,16 +4,106 @@ import { expect } from 'chai'
|
|||
import { pathExists, readdir } from 'fs-extra'
|
||||
import { basename, join } from 'path'
|
||||
import { loadLanguages, VIDEO_CATEGORIES, VIDEO_LANGUAGES, VIDEO_LICENCES, VIDEO_PRIVACIES } from '@server/initializers/constants'
|
||||
import { getLowercaseExtension, uuidRegex } from '@shared/core-utils'
|
||||
import { HttpStatusCode, VideoCaption, VideoDetails } from '@shared/models'
|
||||
import { makeRawRequest, PeerTubeServer, VideoEdit, waitJobs, webtorrentAdd } from '@shared/server-commands'
|
||||
import { dateIsValid, testImage } from './checks'
|
||||
import { getLowercaseExtension, pick, uuidRegex } from '@shared/core-utils'
|
||||
import { HttpStatusCode, VideoCaption, VideoDetails, VideoPrivacy, VideoResolution } from '@shared/models'
|
||||
import { makeRawRequest, PeerTubeServer, VideoEdit, waitJobs } from '@shared/server-commands'
|
||||
import { dateIsValid, expectStartWith, testImage } from './checks'
|
||||
import { checkWebTorrentWorks } from './webtorrent'
|
||||
|
||||
loadLanguages()
|
||||
|
||||
async function completeVideoCheck (
|
||||
server: PeerTubeServer,
|
||||
video: any,
|
||||
async function completeWebVideoFilesCheck (options: {
|
||||
server: PeerTubeServer
|
||||
originServer: PeerTubeServer
|
||||
videoUUID: string
|
||||
fixture: string
|
||||
files: {
|
||||
resolution: number
|
||||
size?: number
|
||||
}[]
|
||||
objectStorageBaseUrl?: string
|
||||
}) {
|
||||
const { originServer, server, videoUUID, files, fixture, objectStorageBaseUrl } = options
|
||||
const video = await server.videos.getWithToken({ id: videoUUID })
|
||||
const serverConfig = await originServer.config.getConfig()
|
||||
const requiresAuth = video.privacy.id === VideoPrivacy.PRIVATE || video.privacy.id === VideoPrivacy.INTERNAL
|
||||
|
||||
const transcodingEnabled = serverConfig.transcoding.webtorrent.enabled
|
||||
|
||||
for (const attributeFile of files) {
|
||||
const file = video.files.find(f => f.resolution.id === attributeFile.resolution)
|
||||
expect(file, `resolution ${attributeFile.resolution} does not exist`).not.to.be.undefined
|
||||
|
||||
let extension = getLowercaseExtension(fixture)
|
||||
// Transcoding enabled: extension will always be .mp4
|
||||
if (transcodingEnabled) extension = '.mp4'
|
||||
|
||||
expect(file.id).to.exist
|
||||
expect(file.magnetUri).to.have.lengthOf.above(2)
|
||||
|
||||
{
|
||||
const privatePath = requiresAuth
|
||||
? 'private/'
|
||||
: ''
|
||||
const nameReg = `${uuidRegex}-${file.resolution.id}`
|
||||
|
||||
expect(file.torrentDownloadUrl).to.match(new RegExp(`${server.url}/download/torrents/${nameReg}.torrent`))
|
||||
expect(file.torrentUrl).to.match(new RegExp(`${server.url}/lazy-static/torrents/${nameReg}.torrent`))
|
||||
|
||||
if (objectStorageBaseUrl && requiresAuth) {
|
||||
expect(file.fileUrl).to.match(new RegExp(`${originServer.url}/object-storage-proxy/webseed/${privatePath}${nameReg}${extension}`))
|
||||
} else if (objectStorageBaseUrl) {
|
||||
expectStartWith(file.fileUrl, objectStorageBaseUrl)
|
||||
} else {
|
||||
expect(file.fileUrl).to.match(new RegExp(`${originServer.url}/static/webseed/${privatePath}${nameReg}${extension}`))
|
||||
}
|
||||
|
||||
expect(file.fileDownloadUrl).to.match(new RegExp(`${originServer.url}/download/videos/${nameReg}${extension}`))
|
||||
}
|
||||
|
||||
{
|
||||
const token = requiresAuth
|
||||
? server.accessToken
|
||||
: undefined
|
||||
|
||||
await Promise.all([
|
||||
makeRawRequest({ url: file.torrentUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.torrentDownloadUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.metadataUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.fileUrl, token, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({
|
||||
url: file.fileDownloadUrl,
|
||||
token,
|
||||
expectedStatus: objectStorageBaseUrl ? HttpStatusCode.FOUND_302 : HttpStatusCode.OK_200
|
||||
})
|
||||
])
|
||||
}
|
||||
|
||||
expect(file.resolution.id).to.equal(attributeFile.resolution)
|
||||
|
||||
if (file.resolution.id === VideoResolution.H_NOVIDEO) {
|
||||
expect(file.resolution.label).to.equal('Audio')
|
||||
} else {
|
||||
expect(file.resolution.label).to.equal(attributeFile.resolution + 'p')
|
||||
}
|
||||
|
||||
if (attributeFile.size) {
|
||||
const minSize = attributeFile.size - ((10 * attributeFile.size) / 100)
|
||||
const maxSize = attributeFile.size + ((10 * attributeFile.size) / 100)
|
||||
expect(
|
||||
file.size,
|
||||
'File size for resolution ' + file.resolution.label + ' outside confidence interval (' + minSize + '> size <' + maxSize + ')'
|
||||
).to.be.above(minSize).and.below(maxSize)
|
||||
}
|
||||
|
||||
await checkWebTorrentWorks(file.magnetUri)
|
||||
}
|
||||
}
|
||||
|
||||
async function completeVideoCheck (options: {
|
||||
server: PeerTubeServer
|
||||
originServer: PeerTubeServer
|
||||
videoUUID: string
|
||||
attributes: {
|
||||
name: string
|
||||
category: number
|
||||
|
@ -50,13 +140,14 @@ async function completeVideoCheck (
|
|||
thumbnailfile?: string
|
||||
previewfile?: string
|
||||
}
|
||||
) {
|
||||
}) {
|
||||
const { attributes, originServer, server, videoUUID } = options
|
||||
|
||||
const video = await server.videos.get({ id: videoUUID })
|
||||
|
||||
if (!attributes.likes) attributes.likes = 0
|
||||
if (!attributes.dislikes) attributes.dislikes = 0
|
||||
|
||||
const host = new URL(server.url).host
|
||||
const originHost = attributes.account.host
|
||||
|
||||
expect(video.name).to.equal(attributes.name)
|
||||
expect(video.category.id).to.equal(attributes.category)
|
||||
expect(video.category.label).to.equal(attributes.category !== null ? VIDEO_CATEGORIES[attributes.category] : 'Unknown')
|
||||
|
@ -77,7 +168,7 @@ async function completeVideoCheck (
|
|||
expect(video.dislikes).to.equal(attributes.dislikes)
|
||||
expect(video.isLocal).to.equal(attributes.isLocal)
|
||||
expect(video.duration).to.equal(attributes.duration)
|
||||
expect(video.url).to.contain(originHost)
|
||||
expect(video.url).to.contain(originServer.host)
|
||||
expect(dateIsValid(video.createdAt)).to.be.true
|
||||
expect(dateIsValid(video.publishedAt)).to.be.true
|
||||
expect(dateIsValid(video.updatedAt)).to.be.true
|
||||
|
@ -92,67 +183,28 @@ async function completeVideoCheck (
|
|||
expect(video.originallyPublishedAt).to.be.null
|
||||
}
|
||||
|
||||
const videoDetails = await server.videos.get({ id: video.uuid })
|
||||
|
||||
expect(videoDetails.files).to.have.lengthOf(attributes.files.length)
|
||||
expect(videoDetails.tags).to.deep.equal(attributes.tags)
|
||||
expect(videoDetails.account.name).to.equal(attributes.account.name)
|
||||
expect(videoDetails.account.host).to.equal(attributes.account.host)
|
||||
expect(video.files).to.have.lengthOf(attributes.files.length)
|
||||
expect(video.tags).to.deep.equal(attributes.tags)
|
||||
expect(video.account.name).to.equal(attributes.account.name)
|
||||
expect(video.account.host).to.equal(attributes.account.host)
|
||||
expect(video.channel.displayName).to.equal(attributes.channel.displayName)
|
||||
expect(video.channel.name).to.equal(attributes.channel.name)
|
||||
expect(videoDetails.channel.host).to.equal(attributes.account.host)
|
||||
expect(videoDetails.channel.isLocal).to.equal(attributes.channel.isLocal)
|
||||
expect(dateIsValid(videoDetails.channel.createdAt.toString())).to.be.true
|
||||
expect(dateIsValid(videoDetails.channel.updatedAt.toString())).to.be.true
|
||||
expect(videoDetails.commentsEnabled).to.equal(attributes.commentsEnabled)
|
||||
expect(videoDetails.downloadEnabled).to.equal(attributes.downloadEnabled)
|
||||
expect(video.channel.host).to.equal(attributes.account.host)
|
||||
expect(video.channel.isLocal).to.equal(attributes.channel.isLocal)
|
||||
expect(dateIsValid(video.channel.createdAt.toString())).to.be.true
|
||||
expect(dateIsValid(video.channel.updatedAt.toString())).to.be.true
|
||||
expect(video.commentsEnabled).to.equal(attributes.commentsEnabled)
|
||||
expect(video.downloadEnabled).to.equal(attributes.downloadEnabled)
|
||||
|
||||
for (const attributeFile of attributes.files) {
|
||||
const file = videoDetails.files.find(f => f.resolution.id === attributeFile.resolution)
|
||||
expect(file).not.to.be.undefined
|
||||
|
||||
let extension = getLowercaseExtension(attributes.fixture)
|
||||
// Transcoding enabled: extension will always be .mp4
|
||||
if (attributes.files.length > 1) extension = '.mp4'
|
||||
|
||||
expect(file.id).to.exist
|
||||
expect(file.magnetUri).to.have.lengthOf.above(2)
|
||||
|
||||
expect(file.torrentDownloadUrl).to.match(new RegExp(`http://${host}/download/torrents/${uuidRegex}-${file.resolution.id}.torrent`))
|
||||
expect(file.torrentUrl).to.match(new RegExp(`http://${host}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}.torrent`))
|
||||
|
||||
expect(file.fileUrl).to.match(new RegExp(`http://${originHost}/static/webseed/${uuidRegex}-${file.resolution.id}${extension}`))
|
||||
expect(file.fileDownloadUrl).to.match(new RegExp(`http://${originHost}/download/videos/${uuidRegex}-${file.resolution.id}${extension}`))
|
||||
|
||||
await Promise.all([
|
||||
makeRawRequest({ url: file.torrentUrl, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.torrentDownloadUrl, expectedStatus: HttpStatusCode.OK_200 }),
|
||||
makeRawRequest({ url: file.metadataUrl, expectedStatus: HttpStatusCode.OK_200 })
|
||||
])
|
||||
|
||||
expect(file.resolution.id).to.equal(attributeFile.resolution)
|
||||
expect(file.resolution.label).to.equal(attributeFile.resolution + 'p')
|
||||
|
||||
const minSize = attributeFile.size - ((10 * attributeFile.size) / 100)
|
||||
const maxSize = attributeFile.size + ((10 * attributeFile.size) / 100)
|
||||
expect(
|
||||
file.size,
|
||||
'File size for resolution ' + file.resolution.label + ' outside confidence interval (' + minSize + '> size <' + maxSize + ')'
|
||||
).to.be.above(minSize).and.below(maxSize)
|
||||
|
||||
const torrent = await webtorrentAdd(file.magnetUri, true)
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
}
|
||||
|
||||
expect(videoDetails.thumbnailPath).to.exist
|
||||
await testImage(server.url, attributes.thumbnailfile || attributes.fixture, videoDetails.thumbnailPath)
|
||||
expect(video.thumbnailPath).to.exist
|
||||
await testImage(server.url, attributes.thumbnailfile || attributes.fixture, video.thumbnailPath)
|
||||
|
||||
if (attributes.previewfile) {
|
||||
expect(videoDetails.previewPath).to.exist
|
||||
await testImage(server.url, attributes.previewfile, videoDetails.previewPath)
|
||||
expect(video.previewPath).to.exist
|
||||
await testImage(server.url, attributes.previewfile, video.previewPath)
|
||||
}
|
||||
|
||||
await completeWebVideoFilesCheck({ server, originServer, videoUUID: video.uuid, ...pick(attributes, [ 'fixture', 'files' ]) })
|
||||
}
|
||||
|
||||
async function checkVideoFilesWereRemoved (options: {
|
||||
|
@ -245,6 +297,7 @@ async function uploadRandomVideoOnServers (
|
|||
|
||||
export {
|
||||
completeVideoCheck,
|
||||
completeWebVideoFilesCheck,
|
||||
checkUploadVideoParam,
|
||||
uploadRandomVideoOnServers,
|
||||
checkVideoFilesWereRemoved,
|
||||
|
|
|
@ -1,12 +1,38 @@
|
|||
import { expect } from 'chai'
|
||||
import { readFile } from 'fs-extra'
|
||||
import parseTorrent from 'parse-torrent'
|
||||
import { basename, join } from 'path'
|
||||
import * as WebTorrent from 'webtorrent'
|
||||
import { VideoFile } from '@shared/models'
|
||||
import { PeerTubeServer } from '../server'
|
||||
import { PeerTubeServer } from '@shared/server-commands'
|
||||
|
||||
let webtorrent: WebTorrent.Instance
|
||||
|
||||
export async function checkWebTorrentWorks (magnetUri: string, pathMatch?: RegExp) {
|
||||
const torrent = await webtorrentAdd(magnetUri, true)
|
||||
|
||||
expect(torrent.files).to.be.an('array')
|
||||
expect(torrent.files.length).to.equal(1)
|
||||
expect(torrent.files[0].path).to.exist.and.to.not.equal('')
|
||||
|
||||
if (pathMatch) {
|
||||
expect(torrent.files[0].path).match(pathMatch)
|
||||
}
|
||||
}
|
||||
|
||||
export async function parseTorrentVideo (server: PeerTubeServer, file: VideoFile) {
|
||||
const torrentName = basename(file.torrentUrl)
|
||||
const torrentPath = server.servers.buildDirectory(join('torrents', torrentName))
|
||||
|
||||
const data = await readFile(torrentPath)
|
||||
|
||||
return parseTorrent(data)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Private
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function webtorrentAdd (torrentId: string, refreshWebTorrent = false) {
|
||||
const WebTorrent = require('webtorrent')
|
||||
|
||||
|
@ -30,17 +56,3 @@ function webtorrentAdd (torrentId: string, refreshWebTorrent = false) {
|
|||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function parseTorrentVideo (server: PeerTubeServer, file: VideoFile) {
|
||||
const torrentName = basename(file.torrentUrl)
|
||||
const torrentPath = server.servers.buildDirectory(join('torrents', torrentName))
|
||||
|
||||
const data = await readFile(torrentPath)
|
||||
|
||||
return parseTorrent(data)
|
||||
}
|
||||
|
||||
export {
|
||||
webtorrentAdd,
|
||||
parseTorrentVideo
|
||||
}
|
|
@ -7,6 +7,7 @@
|
|||
{ "path": "../shared" }
|
||||
],
|
||||
"exclude": [
|
||||
"tools/"
|
||||
"tools/",
|
||||
"tests/fixtures"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -3,10 +3,10 @@ export * from './cli'
|
|||
export * from './custom-pages'
|
||||
export * from './feeds'
|
||||
export * from './logs'
|
||||
export * from './miscs'
|
||||
export * from './moderation'
|
||||
export * from './overviews'
|
||||
export * from './requests'
|
||||
export * from './runners'
|
||||
export * from './search'
|
||||
export * from './server'
|
||||
export * from './socket'
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
export * from './sql-command'
|
||||
export * from './webtorrent'
|
|
@ -10,6 +10,7 @@ export type CommonRequestParams = {
|
|||
url: string
|
||||
path?: string
|
||||
contentType?: string
|
||||
responseType?: string
|
||||
range?: string
|
||||
redirects?: number
|
||||
accept?: string
|
||||
|
@ -27,16 +28,23 @@ function makeRawRequest (options: {
|
|||
expectedStatus?: HttpStatusCode
|
||||
range?: string
|
||||
query?: { [ id: string ]: string }
|
||||
method?: 'GET' | 'POST'
|
||||
}) {
|
||||
const { host, protocol, pathname } = new URL(options.url)
|
||||
|
||||
return makeGetRequest({
|
||||
const reqOptions = {
|
||||
url: `${protocol}//${host}`,
|
||||
path: pathname,
|
||||
contentType: undefined,
|
||||
|
||||
...pick(options, [ 'expectedStatus', 'range', 'token', 'query' ])
|
||||
})
|
||||
}
|
||||
|
||||
if (options.method === 'POST') {
|
||||
return makePostBodyRequest(reqOptions)
|
||||
}
|
||||
|
||||
return makeGetRequest(reqOptions)
|
||||
}
|
||||
|
||||
function makeGetRequest (options: CommonRequestParams & {
|
||||
|
@ -135,6 +143,8 @@ function decodeQueryString (path: string) {
|
|||
return decode(path.split('?')[1])
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function unwrapBody <T> (test: request.Test): Promise<T> {
|
||||
return test.then(res => res.body)
|
||||
}
|
||||
|
@ -149,7 +159,16 @@ function unwrapBodyOrDecodeToJSON <T> (test: request.Test): Promise<T> {
|
|||
try {
|
||||
return JSON.parse(new TextDecoder().decode(res.body))
|
||||
} catch (err) {
|
||||
console.error('Cannot decode JSON.', res.body)
|
||||
console.error('Cannot decode JSON.', res.body instanceof Buffer ? res.body.toString() : res.body)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
if (res.text) {
|
||||
try {
|
||||
return JSON.parse(res.text)
|
||||
} catch (err) {
|
||||
console.error('Cannot decode json', res.text)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
@ -184,6 +203,7 @@ export {
|
|||
|
||||
function buildRequest (req: request.Test, options: CommonRequestParams) {
|
||||
if (options.contentType) req.set('Accept', options.contentType)
|
||||
if (options.responseType) req.responseType(options.responseType)
|
||||
if (options.token) req.set('Authorization', 'Bearer ' + options.token)
|
||||
if (options.range) req.set('Range', options.range)
|
||||
if (options.accept) req.set('Accept', options.accept)
|
||||
|
@ -196,13 +216,18 @@ function buildRequest (req: request.Test, options: CommonRequestParams) {
|
|||
req.set(name, options.headers[name])
|
||||
})
|
||||
|
||||
return req.expect((res) => {
|
||||
return req.expect(res => {
|
||||
if (options.expectedStatus && res.status !== options.expectedStatus) {
|
||||
throw new Error(`Expected status ${options.expectedStatus}, got ${res.status}. ` +
|
||||
const err = new Error(`Expected status ${options.expectedStatus}, got ${res.status}. ` +
|
||||
`\nThe server responded: "${res.body?.error ?? res.text}".\n` +
|
||||
'You may take a closer look at the logs. To see how to do so, check out this page: ' +
|
||||
'https://github.com/Chocobozzz/PeerTube/blob/develop/support/doc/development/tests.md#debug-server-logs')
|
||||
'https://github.com/Chocobozzz/PeerTube/blob/develop/support/doc/development/tests.md#debug-server-logs');
|
||||
|
||||
(err as any).res = res
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
return res
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
export * from './runner-jobs-command'
|
||||
export * from './runner-registration-tokens-command'
|
||||
export * from './runners-command'
|
|
@ -0,0 +1,279 @@
|
|||
import { omit, pick, wait } from '@shared/core-utils'
|
||||
import {
|
||||
AbortRunnerJobBody,
|
||||
AcceptRunnerJobBody,
|
||||
AcceptRunnerJobResult,
|
||||
ErrorRunnerJobBody,
|
||||
HttpStatusCode,
|
||||
isHLSTranscodingPayloadSuccess,
|
||||
isLiveRTMPHLSTranscodingUpdatePayload,
|
||||
isWebVideoOrAudioMergeTranscodingPayloadSuccess,
|
||||
RequestRunnerJobBody,
|
||||
RequestRunnerJobResult,
|
||||
ResultList,
|
||||
RunnerJobAdmin,
|
||||
RunnerJobLiveRTMPHLSTranscodingPayload,
|
||||
RunnerJobPayload,
|
||||
RunnerJobState,
|
||||
RunnerJobSuccessBody,
|
||||
RunnerJobSuccessPayload,
|
||||
RunnerJobType,
|
||||
RunnerJobUpdateBody,
|
||||
RunnerJobVODPayload
|
||||
} from '@shared/models'
|
||||
import { unwrapBody } from '../requests'
|
||||
import { waitJobs } from '../server'
|
||||
import { AbstractCommand, OverrideCommandOptions } from '../shared'
|
||||
|
||||
export class RunnerJobsCommand extends AbstractCommand {
|
||||
|
||||
list (options: OverrideCommandOptions & {
|
||||
start?: number
|
||||
count?: number
|
||||
sort?: string
|
||||
search?: string
|
||||
} = {}) {
|
||||
const path = '/api/v1/runners/jobs'
|
||||
|
||||
return this.getRequestBody<ResultList<RunnerJobAdmin>>({
|
||||
...options,
|
||||
|
||||
path,
|
||||
query: pick(options, [ 'start', 'count', 'sort', 'search' ]),
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
})
|
||||
}
|
||||
|
||||
cancelByAdmin (options: OverrideCommandOptions & { jobUUID: string }) {
|
||||
const path = '/api/v1/runners/jobs/' + options.jobUUID + '/cancel'
|
||||
|
||||
return this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
request (options: OverrideCommandOptions & RequestRunnerJobBody) {
|
||||
const path = '/api/v1/runners/jobs/request'
|
||||
|
||||
return unwrapBody<RequestRunnerJobResult>(this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
fields: pick(options, [ 'runnerToken' ]),
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
}))
|
||||
}
|
||||
|
||||
async requestVOD (options: OverrideCommandOptions & RequestRunnerJobBody) {
|
||||
const vodTypes = new Set<RunnerJobType>([ 'vod-audio-merge-transcoding', 'vod-hls-transcoding', 'vod-web-video-transcoding' ])
|
||||
|
||||
const { availableJobs } = await this.request(options)
|
||||
|
||||
return {
|
||||
availableJobs: availableJobs.filter(j => vodTypes.has(j.type))
|
||||
} as RequestRunnerJobResult<RunnerJobVODPayload>
|
||||
}
|
||||
|
||||
async requestLive (options: OverrideCommandOptions & RequestRunnerJobBody) {
|
||||
const vodTypes = new Set<RunnerJobType>([ 'live-rtmp-hls-transcoding' ])
|
||||
|
||||
const { availableJobs } = await this.request(options)
|
||||
|
||||
return {
|
||||
availableJobs: availableJobs.filter(j => vodTypes.has(j.type))
|
||||
} as RequestRunnerJobResult<RunnerJobLiveRTMPHLSTranscodingPayload>
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
accept <T extends RunnerJobPayload = RunnerJobPayload> (options: OverrideCommandOptions & AcceptRunnerJobBody & { jobUUID: string }) {
|
||||
const path = '/api/v1/runners/jobs/' + options.jobUUID + '/accept'
|
||||
|
||||
return unwrapBody<AcceptRunnerJobResult<T>>(this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
fields: pick(options, [ 'runnerToken' ]),
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
}))
|
||||
}
|
||||
|
||||
abort (options: OverrideCommandOptions & AbortRunnerJobBody & { jobUUID: string }) {
|
||||
const path = '/api/v1/runners/jobs/' + options.jobUUID + '/abort'
|
||||
|
||||
return this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
fields: pick(options, [ 'reason', 'jobToken', 'runnerToken' ]),
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
update (options: OverrideCommandOptions & RunnerJobUpdateBody & { jobUUID: string }) {
|
||||
const path = '/api/v1/runners/jobs/' + options.jobUUID + '/update'
|
||||
|
||||
const { payload } = options
|
||||
const attaches: { [id: string]: any } = {}
|
||||
let payloadWithoutFiles = payload
|
||||
|
||||
if (isLiveRTMPHLSTranscodingUpdatePayload(payload)) {
|
||||
if (payload.masterPlaylistFile) {
|
||||
attaches[`payload[masterPlaylistFile]`] = payload.masterPlaylistFile
|
||||
}
|
||||
|
||||
attaches[`payload[resolutionPlaylistFile]`] = payload.resolutionPlaylistFile
|
||||
attaches[`payload[videoChunkFile]`] = payload.videoChunkFile
|
||||
|
||||
payloadWithoutFiles = omit(payloadWithoutFiles as any, [ 'masterPlaylistFile', 'resolutionPlaylistFile', 'videoChunkFile' ])
|
||||
}
|
||||
|
||||
return this.postUploadRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
fields: {
|
||||
...pick(options, [ 'progress', 'jobToken', 'runnerToken' ]),
|
||||
|
||||
payload: payloadWithoutFiles
|
||||
},
|
||||
attaches,
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
error (options: OverrideCommandOptions & ErrorRunnerJobBody & { jobUUID: string }) {
|
||||
const path = '/api/v1/runners/jobs/' + options.jobUUID + '/error'
|
||||
|
||||
return this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
fields: pick(options, [ 'message', 'jobToken', 'runnerToken' ]),
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
success (options: OverrideCommandOptions & RunnerJobSuccessBody & { jobUUID: string }) {
|
||||
const { payload } = options
|
||||
|
||||
const path = '/api/v1/runners/jobs/' + options.jobUUID + '/success'
|
||||
const attaches: { [id: string]: any } = {}
|
||||
let payloadWithoutFiles = payload
|
||||
|
||||
if ((isWebVideoOrAudioMergeTranscodingPayloadSuccess(payload) || isHLSTranscodingPayloadSuccess(payload)) && payload.videoFile) {
|
||||
attaches[`payload[videoFile]`] = payload.videoFile
|
||||
|
||||
payloadWithoutFiles = omit(payloadWithoutFiles as any, [ 'videoFile' ])
|
||||
}
|
||||
|
||||
if (isHLSTranscodingPayloadSuccess(payload) && payload.resolutionPlaylistFile) {
|
||||
attaches[`payload[resolutionPlaylistFile]`] = payload.resolutionPlaylistFile
|
||||
|
||||
payloadWithoutFiles = omit(payloadWithoutFiles as any, [ 'resolutionPlaylistFile' ])
|
||||
}
|
||||
|
||||
return this.postUploadRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
attaches,
|
||||
fields: {
|
||||
...pick(options, [ 'jobToken', 'runnerToken' ]),
|
||||
|
||||
payload: payloadWithoutFiles
|
||||
},
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
getInputFile (options: OverrideCommandOptions & { url: string, jobToken: string, runnerToken: string }) {
|
||||
const { host, protocol, pathname } = new URL(options.url)
|
||||
|
||||
return this.postBodyRequest({
|
||||
url: `${protocol}//${host}`,
|
||||
path: pathname,
|
||||
|
||||
fields: pick(options, [ 'jobToken', 'runnerToken' ]),
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async autoAccept (options: OverrideCommandOptions & RequestRunnerJobBody & { type?: RunnerJobType }) {
|
||||
const { availableJobs } = await this.request(options)
|
||||
|
||||
const job = options.type
|
||||
? availableJobs.find(j => j.type === options.type)
|
||||
: availableJobs[0]
|
||||
|
||||
return this.accept({ ...options, jobUUID: job.uuid })
|
||||
}
|
||||
|
||||
async autoProcessWebVideoJob (runnerToken: string, jobUUIDToProcess?: string) {
|
||||
let jobUUID = jobUUIDToProcess
|
||||
|
||||
if (!jobUUID) {
|
||||
const { availableJobs } = await this.request({ runnerToken })
|
||||
jobUUID = availableJobs[0].uuid
|
||||
}
|
||||
|
||||
const { job } = await this.accept({ runnerToken, jobUUID })
|
||||
const jobToken = job.jobToken
|
||||
|
||||
const payload: RunnerJobSuccessPayload = { videoFile: 'video_short.mp4' }
|
||||
await this.success({ runnerToken, jobUUID, jobToken, payload })
|
||||
|
||||
await waitJobs([ this.server ])
|
||||
|
||||
return job
|
||||
}
|
||||
|
||||
async cancelAllJobs (options: { state?: RunnerJobState } = {}) {
|
||||
const { state } = options
|
||||
|
||||
const { data } = await this.list({ count: 100 })
|
||||
|
||||
for (const job of data) {
|
||||
if (state && job.state.id !== state) continue
|
||||
|
||||
await this.cancelByAdmin({ jobUUID: job.uuid })
|
||||
}
|
||||
}
|
||||
|
||||
async getJob (options: OverrideCommandOptions & { uuid: string }) {
|
||||
const { data } = await this.list({ ...options, count: 100, sort: '-updatedAt' })
|
||||
|
||||
return data.find(j => j.uuid === options.uuid)
|
||||
}
|
||||
|
||||
async requestLiveJob (runnerToken: string) {
|
||||
let availableJobs: RequestRunnerJobResult<RunnerJobLiveRTMPHLSTranscodingPayload>['availableJobs'] = []
|
||||
|
||||
while (availableJobs.length === 0) {
|
||||
const result = await this.requestLive({ runnerToken })
|
||||
availableJobs = result.availableJobs
|
||||
|
||||
if (availableJobs.length === 1) break
|
||||
|
||||
await wait(150)
|
||||
}
|
||||
|
||||
return availableJobs[0]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
import { pick } from '@shared/core-utils'
|
||||
import { HttpStatusCode, ResultList, RunnerRegistrationToken } from '@shared/models'
|
||||
import { AbstractCommand, OverrideCommandOptions } from '../shared'
|
||||
|
||||
export class RunnerRegistrationTokensCommand extends AbstractCommand {
|
||||
|
||||
list (options: OverrideCommandOptions & {
|
||||
start?: number
|
||||
count?: number
|
||||
sort?: string
|
||||
} = {}) {
|
||||
const path = '/api/v1/runners/registration-tokens'
|
||||
|
||||
return this.getRequestBody<ResultList<RunnerRegistrationToken>>({
|
||||
...options,
|
||||
|
||||
path,
|
||||
query: pick(options, [ 'start', 'count', 'sort' ]),
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
})
|
||||
}
|
||||
|
||||
generate (options: OverrideCommandOptions = {}) {
|
||||
const path = '/api/v1/runners/registration-tokens/generate'
|
||||
|
||||
return this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
delete (options: OverrideCommandOptions & {
|
||||
id: number
|
||||
}) {
|
||||
const path = '/api/v1/runners/registration-tokens/' + options.id
|
||||
|
||||
return this.deleteRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
async getFirstRegistrationToken (options: OverrideCommandOptions = {}) {
|
||||
const { data } = await this.list(options)
|
||||
|
||||
return data[0].registrationToken
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
import { pick } from '@shared/core-utils'
|
||||
import { HttpStatusCode, RegisterRunnerBody, RegisterRunnerResult, ResultList, Runner, UnregisterRunnerBody } from '@shared/models'
|
||||
import { unwrapBody } from '../requests'
|
||||
import { AbstractCommand, OverrideCommandOptions } from '../shared'
|
||||
|
||||
export class RunnersCommand extends AbstractCommand {
|
||||
|
||||
list (options: OverrideCommandOptions & {
|
||||
start?: number
|
||||
count?: number
|
||||
sort?: string
|
||||
} = {}) {
|
||||
const path = '/api/v1/runners'
|
||||
|
||||
return this.getRequestBody<ResultList<Runner>>({
|
||||
...options,
|
||||
|
||||
path,
|
||||
query: pick(options, [ 'start', 'count', 'sort' ]),
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
})
|
||||
}
|
||||
|
||||
register (options: OverrideCommandOptions & RegisterRunnerBody) {
|
||||
const path = '/api/v1/runners/register'
|
||||
|
||||
return unwrapBody<RegisterRunnerResult>(this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
fields: pick(options, [ 'name', 'registrationToken', 'description' ]),
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
}))
|
||||
}
|
||||
|
||||
unregister (options: OverrideCommandOptions & UnregisterRunnerBody) {
|
||||
const path = '/api/v1/runners/unregister'
|
||||
|
||||
return this.postBodyRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
fields: pick(options, [ 'runnerToken' ]),
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
delete (options: OverrideCommandOptions & {
|
||||
id: number
|
||||
}) {
|
||||
const path = '/api/v1/runners/' + options.id
|
||||
|
||||
return this.deleteRequest({
|
||||
...options,
|
||||
|
||||
path,
|
||||
implicitToken: true,
|
||||
defaultExpectedStatus: HttpStatusCode.NO_CONTENT_204
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async autoRegisterRunner () {
|
||||
const { data } = await this.server.runnerRegistrationTokens.list({ sort: 'createdAt' })
|
||||
|
||||
const { runnerToken } = await this.register({
|
||||
name: 'runner',
|
||||
registrationToken: data[0].registrationToken
|
||||
})
|
||||
|
||||
return runnerToken
|
||||
}
|
||||
}
|
|
@ -5,8 +5,9 @@ import { AbstractCommand, OverrideCommandOptions } from '../shared/abstract-comm
|
|||
|
||||
export class ConfigCommand extends AbstractCommand {
|
||||
|
||||
static getCustomConfigResolutions (enabled: boolean) {
|
||||
static getCustomConfigResolutions (enabled: boolean, with0p = false) {
|
||||
return {
|
||||
'0p': enabled && with0p,
|
||||
'144p': enabled,
|
||||
'240p': enabled,
|
||||
'360p': enabled,
|
||||
|
@ -129,7 +130,8 @@ export class ConfigCommand extends AbstractCommand {
|
|||
})
|
||||
}
|
||||
|
||||
enableTranscoding (webtorrent = true, hls = true) {
|
||||
// TODO: convert args to object
|
||||
enableTranscoding (webtorrent = true, hls = true, with0p = false) {
|
||||
return this.updateExistingSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
|
@ -138,7 +140,7 @@ export class ConfigCommand extends AbstractCommand {
|
|||
allowAudioFiles: true,
|
||||
allowAdditionalExtensions: true,
|
||||
|
||||
resolutions: ConfigCommand.getCustomConfigResolutions(true),
|
||||
resolutions: ConfigCommand.getCustomConfigResolutions(true, with0p),
|
||||
|
||||
webtorrent: {
|
||||
enabled: webtorrent
|
||||
|
@ -151,6 +153,7 @@ export class ConfigCommand extends AbstractCommand {
|
|||
})
|
||||
}
|
||||
|
||||
// TODO: convert args to object
|
||||
enableMinimumTranscoding (webtorrent = true, hls = true) {
|
||||
return this.updateExistingSubConfig({
|
||||
newConfig: {
|
||||
|
@ -173,6 +176,25 @@ export class ConfigCommand extends AbstractCommand {
|
|||
})
|
||||
}
|
||||
|
||||
enableRemoteTranscoding () {
|
||||
return this.updateExistingSubConfig({
|
||||
newConfig: {
|
||||
transcoding: {
|
||||
remoteRunners: {
|
||||
enabled: true
|
||||
}
|
||||
},
|
||||
live: {
|
||||
transcoding: {
|
||||
remoteRunners: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
enableStudio () {
|
||||
|
@ -363,6 +385,9 @@ export class ConfigCommand extends AbstractCommand {
|
|||
},
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
remoteRunners: {
|
||||
enabled: false
|
||||
},
|
||||
allowAdditionalExtensions: true,
|
||||
allowAudioFiles: true,
|
||||
threads: 1,
|
||||
|
@ -398,6 +423,9 @@ export class ConfigCommand extends AbstractCommand {
|
|||
maxUserLives: 50,
|
||||
transcoding: {
|
||||
enabled: true,
|
||||
remoteRunners: {
|
||||
enabled: false
|
||||
},
|
||||
threads: 4,
|
||||
profile: 'default',
|
||||
resolutions: {
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
|
||||
import { expect } from 'chai'
|
||||
import { wait } from '@shared/core-utils'
|
||||
import { JobState, JobType } from '../../models'
|
||||
import { JobState, JobType, RunnerJobState } from '../../models'
|
||||
import { PeerTubeServer } from './server'
|
||||
|
||||
async function waitJobs (
|
||||
serversArg: PeerTubeServer[] | PeerTubeServer,
|
||||
options: {
|
||||
skipDelayed?: boolean // default false
|
||||
runnerJobs?: boolean // default false
|
||||
} = {}
|
||||
) {
|
||||
const { skipDelayed = false } = options
|
||||
const { skipDelayed = false, runnerJobs = false } = options
|
||||
|
||||
const pendingJobWait = process.env.NODE_PENDING_JOB_WAIT
|
||||
? parseInt(process.env.NODE_PENDING_JOB_WAIT, 10)
|
||||
|
@ -33,7 +34,8 @@ async function waitJobs (
|
|||
// Check if each server has pending request
|
||||
for (const server of servers) {
|
||||
for (const state of states) {
|
||||
const p = server.jobs.list({
|
||||
|
||||
const jobPromise = server.jobs.list({
|
||||
state,
|
||||
start: 0,
|
||||
count: 10,
|
||||
|
@ -46,17 +48,29 @@ async function waitJobs (
|
|||
}
|
||||
})
|
||||
|
||||
tasks.push(p)
|
||||
tasks.push(jobPromise)
|
||||
}
|
||||
|
||||
const p = server.debug.getDebug()
|
||||
const debugPromise = server.debug.getDebug()
|
||||
.then(obj => {
|
||||
if (obj.activityPubMessagesWaiting !== 0) {
|
||||
pendingRequests = true
|
||||
}
|
||||
})
|
||||
tasks.push(debugPromise)
|
||||
|
||||
if (runnerJobs) {
|
||||
const runnerJobsPromise = server.runnerJobs.list({ count: 100 })
|
||||
.then(({ data }) => {
|
||||
for (const job of data) {
|
||||
if (job.state.id !== RunnerJobState.COMPLETED) {
|
||||
pendingRequests = true
|
||||
}
|
||||
}
|
||||
})
|
||||
tasks.push(runnerJobsPromise)
|
||||
}
|
||||
|
||||
tasks.push(p)
|
||||
}
|
||||
|
||||
return tasks
|
||||
|
|
|
@ -8,9 +8,9 @@ import { CLICommand } from '../cli'
|
|||
import { CustomPagesCommand } from '../custom-pages'
|
||||
import { FeedCommand } from '../feeds'
|
||||
import { LogsCommand } from '../logs'
|
||||
import { SQLCommand } from '../miscs'
|
||||
import { AbusesCommand } from '../moderation'
|
||||
import { OverviewsCommand } from '../overviews'
|
||||
import { RunnerJobsCommand, RunnerRegistrationTokensCommand, RunnersCommand } from '../runners'
|
||||
import { SearchCommand } from '../search'
|
||||
import { SocketIOCommand } from '../socket'
|
||||
import {
|
||||
|
@ -136,7 +136,6 @@ export class PeerTubeServer {
|
|||
streamingPlaylists?: StreamingPlaylistsCommand
|
||||
channels?: ChannelsCommand
|
||||
comments?: CommentsCommand
|
||||
sql?: SQLCommand
|
||||
notifications?: NotificationsCommand
|
||||
servers?: ServersCommand
|
||||
login?: LoginCommand
|
||||
|
@ -150,6 +149,10 @@ export class PeerTubeServer {
|
|||
videoToken?: VideoTokenCommand
|
||||
registrations?: RegistrationsCommand
|
||||
|
||||
runners?: RunnersCommand
|
||||
runnerRegistrationTokens?: RunnerRegistrationTokensCommand
|
||||
runnerJobs?: RunnerJobsCommand
|
||||
|
||||
constructor (options: { serverNumber: number } | { url: string }) {
|
||||
if ((options as any).url) {
|
||||
this.setUrl((options as any).url)
|
||||
|
@ -311,14 +314,14 @@ export class PeerTubeServer {
|
|||
})
|
||||
}
|
||||
|
||||
async kill () {
|
||||
if (!this.app) return
|
||||
|
||||
await this.sql.cleanup()
|
||||
kill () {
|
||||
if (!this.app) return Promise.resolve()
|
||||
|
||||
process.kill(-this.app.pid)
|
||||
|
||||
this.app = null
|
||||
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
private randomServer () {
|
||||
|
@ -420,7 +423,6 @@ export class PeerTubeServer {
|
|||
this.streamingPlaylists = new StreamingPlaylistsCommand(this)
|
||||
this.channels = new ChannelsCommand(this)
|
||||
this.comments = new CommentsCommand(this)
|
||||
this.sql = new SQLCommand(this)
|
||||
this.notifications = new NotificationsCommand(this)
|
||||
this.servers = new ServersCommand(this)
|
||||
this.login = new LoginCommand(this)
|
||||
|
@ -433,5 +435,9 @@ export class PeerTubeServer {
|
|||
this.twoFactor = new TwoFactorCommand(this)
|
||||
this.videoToken = new VideoTokenCommand(this)
|
||||
this.registrations = new RegistrationsCommand(this)
|
||||
|
||||
this.runners = new RunnersCommand(this)
|
||||
this.runnerRegistrationTokens = new RunnerRegistrationTokensCommand(this)
|
||||
this.runnerJobs = new RunnerJobsCommand(this)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ function createMultipleServers (totalServers: number, configOverride?: object, o
|
|||
return Promise.all(serverPromises)
|
||||
}
|
||||
|
||||
async function killallServers (servers: PeerTubeServer[]) {
|
||||
function killallServers (servers: PeerTubeServer[]) {
|
||||
return Promise.all(servers.map(s => s.kill()))
|
||||
}
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ interface InternalCommonCommandOptions extends OverrideCommandOptions {
|
|||
host?: string
|
||||
headers?: { [ name: string ]: string }
|
||||
requestType?: string
|
||||
responseType?: string
|
||||
xForwardedFor?: string
|
||||
}
|
||||
|
||||
|
@ -169,7 +170,7 @@ abstract class AbstractCommand {
|
|||
}
|
||||
|
||||
protected buildCommonRequestOptions (options: InternalCommonCommandOptions) {
|
||||
const { url, path, redirects, contentType, accept, range, host, headers, requestType, xForwardedFor } = options
|
||||
const { url, path, redirects, contentType, accept, range, host, headers, requestType, xForwardedFor, responseType } = options
|
||||
|
||||
return {
|
||||
url: url ?? this.server.url,
|
||||
|
@ -185,6 +186,7 @@ abstract class AbstractCommand {
|
|||
accept,
|
||||
headers,
|
||||
type: requestType,
|
||||
responseType,
|
||||
xForwardedFor
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,4 +12,13 @@ export class SocketIOCommand extends AbstractCommand {
|
|||
getLiveNotificationSocket () {
|
||||
return io(this.server.url + '/live-videos')
|
||||
}
|
||||
|
||||
getRunnersSocket (options: {
|
||||
runnerToken: string
|
||||
}) {
|
||||
return io(this.server.url + '/runners', {
|
||||
reconnection: false,
|
||||
auth: { runnerToken: options.runnerToken }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ export class LiveCommand extends AbstractCommand {
|
|||
permanentLive: boolean
|
||||
privacy?: VideoPrivacy
|
||||
}) {
|
||||
const { saveReplay, permanentLive, privacy } = options
|
||||
const { saveReplay, permanentLive, privacy = VideoPrivacy.PUBLIC } = options
|
||||
|
||||
const { uuid } = await this.create({
|
||||
...options,
|
||||
|
|
|
@ -13,7 +13,7 @@ export class StreamingPlaylistsCommand extends AbstractCommand {
|
|||
|
||||
withRetry?: boolean // default false
|
||||
currentRetry?: number
|
||||
}) {
|
||||
}): Promise<string> {
|
||||
const { videoFileToken, reinjectVideoFileToken, withRetry, currentRetry = 1 } = options
|
||||
|
||||
try {
|
||||
|
@ -54,6 +54,7 @@ export class StreamingPlaylistsCommand extends AbstractCommand {
|
|||
url: options.url,
|
||||
range: options.range,
|
||||
implicitToken: false,
|
||||
responseType: 'application/octet-stream',
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
}))
|
||||
}
|
||||
|
@ -65,6 +66,7 @@ export class StreamingPlaylistsCommand extends AbstractCommand {
|
|||
...options,
|
||||
|
||||
url: options.url,
|
||||
contentType: 'application/json',
|
||||
implicitToken: false,
|
||||
defaultExpectedStatus: HttpStatusCode.OK_200
|
||||
}))
|
||||
|
|
Loading…
Reference in New Issue