import 'mocha'
import * as chai from 'chai'
-import * as xmlParser from 'fast-xml-parser'
-import { HttpStatusCode } from '@shared/core-utils'
+import { XMLParser, XMLValidator } from 'fast-xml-parser'
+import { HttpStatusCode, VideoPrivacy } from '@shared/models'
import {
cleanupTests,
+ createMultipleServers,
+ createSingleServer,
doubleFollow,
- flushAndRunMultipleServers,
- flushAndRunServer,
- ServerInfo,
+ makeGetRequest,
+ PeerTubeServer,
setAccessTokensToServers,
+ setDefaultChannelAvatar,
waitJobs
-} from '@shared/extra-utils'
-import { VideoPrivacy } from '@shared/models'
+} from '@shared/server-commands'
chai.use(require('chai-xml'))
chai.use(require('chai-json-schema'))
const expect = chai.expect
describe('Test syndication feeds', () => {
- let servers: ServerInfo[] = []
- let serverHLSOnly: ServerInfo
+ let servers: PeerTubeServer[] = []
+ let serverHLSOnly: PeerTubeServer
let userAccessToken: string
let rootAccountId: number
let rootChannelId: number
this.timeout(120000)
// Run servers
- servers = await flushAndRunMultipleServers(2)
- serverHLSOnly = await flushAndRunServer(3, {
+ servers = await createMultipleServers(2)
+ serverHLSOnly = await createSingleServer(3, {
transcoding: {
enabled: true,
webtorrent: { enabled: false },
})
await setAccessTokensToServers([ ...servers, serverHLSOnly ])
+ await setDefaultChannelAvatar(servers[0])
await doubleFollow(servers[0], servers[1])
{
}
{
- const attr = { username: 'john', password: 'password' }
- await servers[0].users.create({ username: attr.username, password: attr.password })
- userAccessToken = await servers[0].login.getAccessToken(attr)
+ userAccessToken = await servers[0].users.generateUserAndToken('john')
const user = await servers[0].users.getMyInfo({ token: userAccessToken })
userAccountId = user.account.id
expect(JSON.parse(jsonText)).to.be.jsonSchema({ type: 'object' })
}
})
+
+ it('Should serve the endpoint with a classic request', async function () {
+ await makeGetRequest({
+ url: servers[0].url,
+ path: '/feeds/videos.xml',
+ accept: 'application/xml',
+ expectedStatus: HttpStatusCode.OK_200
+ })
+ })
+
+ it('Should serve the endpoint as a cached request', async function () {
+ const res = await makeGetRequest({
+ url: servers[0].url,
+ path: '/feeds/videos.xml',
+ accept: 'application/xml',
+ expectedStatus: HttpStatusCode.OK_200
+ })
+
+ expect(res.headers['x-api-cache-cached']).to.equal('true')
+ })
+
+ it('Should not serve the endpoint as a cached request', async function () {
+ const res = await makeGetRequest({
+ url: servers[0].url,
+ path: '/feeds/videos.xml?v=186',
+ accept: 'application/xml',
+ expectedStatus: HttpStatusCode.OK_200
+ })
+
+ expect(res.headers['x-api-cache-cached']).to.not.exist
+ })
+
+ it('Should refuse to serve the endpoint without accept header', async function () {
+ await makeGetRequest({ url: servers[0].url, path: '/feeds/videos.xml', expectedStatus: HttpStatusCode.NOT_ACCEPTABLE_406 })
+ })
})
describe('Videos feed', function () {
it('Should contain a valid enclosure (covers RSS 2.0 endpoint)', async function () {
for (const server of servers) {
const rss = await server.feed.getXML({ feed: 'videos' })
- expect(xmlParser.validate(rss)).to.be.true
+ expect(XMLValidator.validate(rss)).to.be.true
- const xmlDoc = xmlParser.parse(rss, { parseAttributeValue: true, ignoreAttributes: false })
+ const parser = new XMLParser({ parseAttributeValue: true, ignoreAttributes: false })
+ const xmlDoc = parser.parse(rss)
const enclosure = xmlDoc.rss.channel.item[0].enclosure
expect(enclosure).to.exist
- expect(enclosure['@_type']).to.equal('application/x-bittorrent')
+
+ expect(enclosure['@_type']).to.equal('video/webm')
expect(enclosure['@_length']).to.equal(218910)
- expect(enclosure['@_url']).to.contain('720.torrent')
+ expect(enclosure['@_url']).to.contain('-720.webm')
}
})
const jsonObj = JSON.parse(json)
expect(jsonObj.items.length).to.be.equal(2)
- expect(jsonObj.items[0].html_content).to.equal('super comment 2')
- expect(jsonObj.items[1].html_content).to.equal('super comment 1')
+ expect(jsonObj.items[0].content_html).to.contain('<p>super comment 2</p>')
+ expect(jsonObj.items[1].content_html).to.contain('<p>super comment 1</p>')
}
})