aboutsummaryrefslogtreecommitdiffhomepage
path: root/server/tests/feeds/feeds.ts
diff options
context:
space:
mode:
authorChocobozzz <me@florianbigard.com>2018-06-08 20:34:37 +0200
committerChocobozzz <me@florianbigard.com>2018-06-08 20:34:37 +0200
commitfe3a55b071c99b346e9e9ab786f5d219e5a064cd (patch)
tree55c6d0e9a253b20df738fd82820eb931044828e6 /server/tests/feeds/feeds.ts
parent4a7591e1a8ec5ffdff85580c6be4b18d8b85b4d4 (diff)
downloadPeerTube-fe3a55b071c99b346e9e9ab786f5d219e5a064cd.tar.gz
PeerTube-fe3a55b071c99b346e9e9ab786f5d219e5a064cd.tar.zst
PeerTube-fe3a55b071c99b346e9e9ab786f5d219e5a064cd.zip
Add video comments RSS
Diffstat (limited to 'server/tests/feeds/feeds.ts')
-rw-r--r--server/tests/feeds/feeds.ts120
1 files changed, 120 insertions, 0 deletions
diff --git a/server/tests/feeds/feeds.ts b/server/tests/feeds/feeds.ts
new file mode 100644
index 000000000..f65148f00
--- /dev/null
+++ b/server/tests/feeds/feeds.ts
@@ -0,0 +1,120 @@
1/* tslint:disable:no-unused-expression */
2
3import * as chai from 'chai'
4import 'mocha'
5import {
6 doubleFollow,
7 flushAndRunMultipleServers,
8 flushTests,
9 getJSONfeed,
10 getXMLfeed,
11 killallServers,
12 ServerInfo,
13 setAccessTokensToServers,
14 uploadVideo,
15 wait
16} from '../utils'
17import { join } from 'path'
18import * as libxmljs from 'libxmljs'
19import { addVideoCommentThread } from '../utils/videos/video-comments'
20
21chai.use(require('chai-xml'))
22chai.use(require('chai-json-schema'))
23chai.config.includeStack = true
24const expect = chai.expect
25
26describe('Test syndication feeds', () => {
27 let servers: ServerInfo[] = []
28
29 before(async function () {
30 this.timeout(120000)
31
32 // Run servers
33 servers = await flushAndRunMultipleServers(2)
34
35 await setAccessTokensToServers(servers)
36 await doubleFollow(servers[0], servers[1])
37
38 const videoAttributes = {
39 name: 'my super name for server 1',
40 description: 'my super description for server 1',
41 fixture: 'video_short.webm'
42 }
43 const res = await uploadVideo(servers[0].url, servers[0].accessToken, videoAttributes)
44 const videoId = res.body.video.id
45
46 await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 1')
47 await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 2')
48
49 await wait(10000)
50 })
51
52 describe('All feed', function () {
53
54 it('Should be well formed XML (covers RSS 2.0 and ATOM 1.0 endpoints)', async function () {
55 for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
56 const rss = await getXMLfeed(servers[ 0 ].url, feed)
57 expect(rss.text).xml.to.be.valid()
58
59 const atom = await getXMLfeed(servers[ 0 ].url, feed, 'atom')
60 expect(atom.text).xml.to.be.valid()
61 }
62 })
63
64 it('Should be well formed JSON (covers JSON feed 1.0 endpoint)', async function () {
65 for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
66 const json = await getJSONfeed(servers[ 0 ].url, feed)
67 expect(JSON.parse(json.text)).to.be.jsonSchema({ 'type': 'object' })
68 }
69 })
70 })
71
72 describe('Videos feed', function () {
73 it('Should contain a valid enclosure (covers RSS 2.0 endpoint)', async function () {
74 for (const server of servers) {
75 const rss = await getXMLfeed(server.url, 'videos')
76 const xmlDoc = libxmljs.parseXmlString(rss.text)
77 const xmlEnclosure = xmlDoc.get('/rss/channel/item/enclosure')
78 expect(xmlEnclosure).to.exist
79 expect(xmlEnclosure.attr('type').value()).to.be.equal('application/x-bittorrent')
80 expect(xmlEnclosure.attr('length').value()).to.be.equal('218910')
81 expect(xmlEnclosure.attr('url').value()).to.contain('720.torrent')
82 }
83 })
84
85 it('Should contain a valid \'attachments\' object (covers JSON feed 1.0 endpoint)', async function () {
86 for (const server of servers) {
87 const json = await getJSONfeed(server.url, 'videos')
88 const jsonObj = JSON.parse(json.text)
89 expect(jsonObj.items.length).to.be.equal(1)
90 expect(jsonObj.items[ 0 ].attachments).to.exist
91 expect(jsonObj.items[ 0 ].attachments.length).to.be.eq(1)
92 expect(jsonObj.items[ 0 ].attachments[ 0 ].mime_type).to.be.eq('application/x-bittorrent')
93 expect(jsonObj.items[ 0 ].attachments[ 0 ].size_in_bytes).to.be.eq(218910)
94 expect(jsonObj.items[ 0 ].attachments[ 0 ].url).to.contain('720.torrent')
95 }
96 })
97 })
98
99 describe('Video comments feed', function () {
100 it('Should contain valid comments (covers JSON feed 1.0 endpoint)', async function () {
101 for (const server of servers) {
102 const json = await getJSONfeed(server.url, 'video-comments')
103
104 const jsonObj = JSON.parse(json.text)
105 expect(jsonObj.items.length).to.be.equal(2)
106 expect(jsonObj.items[ 0 ].html_content).to.equal('super comment 2')
107 expect(jsonObj.items[ 1 ].html_content).to.equal('super comment 1')
108 }
109 })
110 })
111
112 after(async function () {
113 killallServers(servers)
114
115 // Keep the logs if the test failed
116 if (this['ok']) {
117 await flushTests()
118 }
119 })
120})