2021-05-10 04:13:41 -05:00
|
|
|
/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
|
|
|
|
|
|
|
|
import 'mocha'
|
|
|
|
import * as chai from 'chai'
|
|
|
|
import { pathExists, readdir, stat } from 'fs-extra'
|
|
|
|
import { join } from 'path'
|
|
|
|
import {
|
|
|
|
buildAbsoluteFixturePath,
|
2021-07-01 09:47:14 -05:00
|
|
|
cleanupTests,
|
2021-07-16 02:47:51 -05:00
|
|
|
createSingleServer,
|
|
|
|
PeerTubeServer,
|
2021-05-10 04:13:41 -05:00
|
|
|
setAccessTokensToServers,
|
2021-07-13 07:23:01 -05:00
|
|
|
setDefaultVideoChannel
|
2021-05-10 04:13:41 -05:00
|
|
|
} from '@shared/extra-utils'
|
2021-07-16 07:27:30 -05:00
|
|
|
import { HttpStatusCode, VideoPrivacy } from '@shared/models'
|
2021-05-10 04:13:41 -05:00
|
|
|
|
|
|
|
const expect = chai.expect
|
|
|
|
|
|
|
|
// Most classic resumable upload tests are done in other test suites
|
|
|
|
|
|
|
|
describe('Test resumable upload', function () {
|
|
|
|
const defaultFixture = 'video_short.mp4'
|
2021-07-16 02:47:51 -05:00
|
|
|
let server: PeerTubeServer
|
2021-05-10 04:13:41 -05:00
|
|
|
let rootId: number
|
|
|
|
|
|
|
|
async function buildSize (fixture: string, size?: number) {
|
|
|
|
if (size !== undefined) return size
|
|
|
|
|
|
|
|
const baseFixture = buildAbsoluteFixturePath(fixture)
|
|
|
|
return (await stat(baseFixture)).size
|
|
|
|
}
|
|
|
|
|
|
|
|
async function prepareUpload (sizeArg?: number) {
|
|
|
|
const size = await buildSize(defaultFixture, sizeArg)
|
|
|
|
|
|
|
|
const attributes = {
|
|
|
|
name: 'video',
|
2021-07-16 02:04:35 -05:00
|
|
|
channelId: server.store.channel.id,
|
2021-05-10 04:13:41 -05:00
|
|
|
privacy: VideoPrivacy.PUBLIC,
|
|
|
|
fixture: defaultFixture
|
|
|
|
}
|
|
|
|
|
|
|
|
const mimetype = 'video/mp4'
|
|
|
|
|
2021-07-16 02:04:35 -05:00
|
|
|
const res = await server.videos.prepareResumableUpload({ attributes, size, mimetype })
|
2021-05-10 04:13:41 -05:00
|
|
|
|
|
|
|
return res.header['location'].split('?')[1]
|
|
|
|
}
|
|
|
|
|
|
|
|
async function sendChunks (options: {
|
|
|
|
pathUploadId: string
|
|
|
|
size?: number
|
|
|
|
expectedStatus?: HttpStatusCode
|
|
|
|
contentLength?: number
|
|
|
|
contentRange?: string
|
|
|
|
contentRangeBuilder?: (start: number, chunk: any) => string
|
|
|
|
}) {
|
|
|
|
const { pathUploadId, expectedStatus, contentLength, contentRangeBuilder } = options
|
|
|
|
|
|
|
|
const size = await buildSize(defaultFixture, options.size)
|
|
|
|
const absoluteFilePath = buildAbsoluteFixturePath(defaultFixture)
|
|
|
|
|
2021-07-16 02:04:35 -05:00
|
|
|
return server.videos.sendResumableChunks({
|
2021-05-10 04:13:41 -05:00
|
|
|
pathUploadId,
|
|
|
|
videoFilePath: absoluteFilePath,
|
|
|
|
size,
|
|
|
|
contentLength,
|
|
|
|
contentRangeBuilder,
|
2021-07-15 03:02:54 -05:00
|
|
|
expectedStatus
|
2021-05-10 04:13:41 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
async function checkFileSize (uploadIdArg: string, expectedSize: number | null) {
|
|
|
|
const uploadId = uploadIdArg.replace(/^upload_id=/, '')
|
|
|
|
|
|
|
|
const subPath = join('tmp', 'resumable-uploads', uploadId)
|
2021-07-16 02:04:35 -05:00
|
|
|
const filePath = server.servers.buildDirectory(subPath)
|
2021-05-10 04:13:41 -05:00
|
|
|
const exists = await pathExists(filePath)
|
|
|
|
|
|
|
|
if (expectedSize === null) {
|
|
|
|
expect(exists).to.be.false
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(exists).to.be.true
|
|
|
|
|
|
|
|
expect((await stat(filePath)).size).to.equal(expectedSize)
|
|
|
|
}
|
|
|
|
|
|
|
|
async function countResumableUploads () {
|
|
|
|
const subPath = join('tmp', 'resumable-uploads')
|
2021-07-16 02:04:35 -05:00
|
|
|
const filePath = server.servers.buildDirectory(subPath)
|
2021-05-10 04:13:41 -05:00
|
|
|
|
|
|
|
const files = await readdir(filePath)
|
|
|
|
return files.length
|
|
|
|
}
|
|
|
|
|
|
|
|
before(async function () {
|
|
|
|
this.timeout(30000)
|
|
|
|
|
2021-07-16 02:47:51 -05:00
|
|
|
server = await createSingleServer(1)
|
2021-07-22 07:28:03 -05:00
|
|
|
await setAccessTokensToServers([ server ])
|
|
|
|
await setDefaultVideoChannel([ server ])
|
2021-05-10 04:13:41 -05:00
|
|
|
|
2021-07-16 02:04:35 -05:00
|
|
|
const body = await server.users.getMyInfo()
|
2021-07-13 07:23:01 -05:00
|
|
|
rootId = body.id
|
2021-05-10 04:13:41 -05:00
|
|
|
|
2021-07-16 02:04:35 -05:00
|
|
|
await server.users.update({ userId: rootId, videoQuota: 10_000_000 })
|
2021-05-10 04:13:41 -05:00
|
|
|
})
|
|
|
|
|
|
|
|
describe('Directory cleaning', function () {
|
|
|
|
|
|
|
|
it('Should correctly delete files after an upload', async function () {
|
|
|
|
const uploadId = await prepareUpload()
|
|
|
|
await sendChunks({ pathUploadId: uploadId })
|
2021-09-09 02:31:50 -05:00
|
|
|
await server.videos.endResumableUpload({ pathUploadId: uploadId })
|
2021-05-10 04:13:41 -05:00
|
|
|
|
|
|
|
expect(await countResumableUploads()).to.equal(0)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Should not delete files after an unfinished upload', async function () {
|
|
|
|
await prepareUpload()
|
|
|
|
|
|
|
|
expect(await countResumableUploads()).to.equal(2)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Should not delete recent uploads', async function () {
|
2021-07-16 02:04:35 -05:00
|
|
|
await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
|
2021-05-10 04:13:41 -05:00
|
|
|
|
|
|
|
expect(await countResumableUploads()).to.equal(2)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Should delete old uploads', async function () {
|
2021-07-16 02:04:35 -05:00
|
|
|
await server.debug.sendCommand({ body: { command: 'remove-dandling-resumable-uploads' } })
|
2021-05-10 04:13:41 -05:00
|
|
|
|
|
|
|
expect(await countResumableUploads()).to.equal(0)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
describe('Resumable upload and chunks', function () {
|
|
|
|
|
|
|
|
it('Should accept the same amount of chunks', async function () {
|
|
|
|
const uploadId = await prepareUpload()
|
|
|
|
await sendChunks({ pathUploadId: uploadId })
|
|
|
|
|
|
|
|
await checkFileSize(uploadId, null)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Should not accept more chunks than expected', async function () {
|
2021-07-22 07:58:25 -05:00
|
|
|
const uploadId = await prepareUpload(100)
|
2021-05-10 04:13:41 -05:00
|
|
|
|
|
|
|
await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409 })
|
|
|
|
await checkFileSize(uploadId, 0)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Should not accept more chunks than expected with an invalid content length/content range', async function () {
|
|
|
|
const uploadId = await prepareUpload(1500)
|
|
|
|
|
2021-07-22 07:58:25 -05:00
|
|
|
// Content length check seems to have changed in v16
|
|
|
|
if (process.version.startsWith('v16')) {
|
|
|
|
await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.CONFLICT_409, contentLength: 1000 })
|
|
|
|
await checkFileSize(uploadId, 1000)
|
|
|
|
} else {
|
|
|
|
await sendChunks({ pathUploadId: uploadId, expectedStatus: HttpStatusCode.BAD_REQUEST_400, contentLength: 1000 })
|
|
|
|
await checkFileSize(uploadId, 0)
|
|
|
|
}
|
2021-05-10 04:13:41 -05:00
|
|
|
})
|
|
|
|
|
|
|
|
it('Should not accept more chunks than expected with an invalid content length', async function () {
|
|
|
|
const uploadId = await prepareUpload(500)
|
|
|
|
|
|
|
|
const size = 1000
|
|
|
|
|
2021-07-23 04:20:00 -05:00
|
|
|
// Content length check seems to have changed in v16
|
|
|
|
const expectedStatus = process.version.startsWith('v16')
|
|
|
|
? HttpStatusCode.CONFLICT_409
|
|
|
|
: HttpStatusCode.BAD_REQUEST_400
|
|
|
|
|
2021-07-22 07:28:03 -05:00
|
|
|
const contentRangeBuilder = (start: number) => `bytes ${start}-${start + size - 1}/${size}`
|
2021-07-23 04:20:00 -05:00
|
|
|
await sendChunks({ pathUploadId: uploadId, expectedStatus, contentRangeBuilder, contentLength: size })
|
2021-05-10 04:13:41 -05:00
|
|
|
await checkFileSize(uploadId, 0)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2021-07-01 09:47:14 -05:00
|
|
|
after(async function () {
|
2021-07-22 07:28:03 -05:00
|
|
|
await cleanupTests([ server ])
|
2021-07-01 09:47:14 -05:00
|
|
|
})
|
2021-05-10 04:13:41 -05:00
|
|
|
})
|