]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/tests/feeds/feeds.ts
Merge branch 'release/2.1.0' into develop
[github/Chocobozzz/PeerTube.git] / server / tests / feeds / feeds.ts
1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3 import * as chai from 'chai'
4 import 'mocha'
5 import {
6 cleanupTests,
7 createUser,
8 doubleFollow,
9 flushAndRunMultipleServers,
10 getJSONfeed,
11 getMyUserInformation,
12 getXMLfeed,
13 ServerInfo,
14 setAccessTokensToServers,
15 uploadVideo,
16 userLogin
17 } from '../../../shared/extra-utils'
18 import * as libxmljs from 'libxmljs'
19 import { addVideoCommentThread } from '../../../shared/extra-utils/videos/video-comments'
20 import { waitJobs } from '../../../shared/extra-utils/server/jobs'
21 import { User } from '../../../shared/models/users'
22
23 chai.use(require('chai-xml'))
24 chai.use(require('chai-json-schema'))
25 chai.config.includeStack = true
26 const expect = chai.expect
27
28 describe('Test syndication feeds', () => {
29 let servers: ServerInfo[] = []
30 let userAccessToken: string
31 let rootAccountId: number
32 let rootChannelId: number
33 let userAccountId: number
34 let userChannelId: number
35
36 before(async function () {
37 this.timeout(120000)
38
39 // Run servers
40 servers = await flushAndRunMultipleServers(2)
41
42 await setAccessTokensToServers(servers)
43 await doubleFollow(servers[0], servers[1])
44
45 {
46 const res = await getMyUserInformation(servers[0].url, servers[0].accessToken)
47 const user: User = res.body
48 rootAccountId = user.account.id
49 rootChannelId = user.videoChannels[0].id
50 }
51
52 {
53 const attr = { username: 'john', password: 'password' }
54 await createUser({ url: servers[0].url, accessToken: servers[0].accessToken, username: attr.username, password: attr.password })
55 userAccessToken = await userLogin(servers[0], attr)
56
57 const res = await getMyUserInformation(servers[0].url, userAccessToken)
58 const user: User = res.body
59 userAccountId = user.account.id
60 userChannelId = user.videoChannels[0].id
61 }
62
63 {
64 await uploadVideo(servers[0].url, userAccessToken, { name: 'user video' })
65 }
66
67 {
68 const videoAttributes = {
69 name: 'my super name for server 1',
70 description: 'my super description for server 1',
71 fixture: 'video_short.webm'
72 }
73 const res = await uploadVideo(servers[0].url, servers[0].accessToken, videoAttributes)
74 const videoId = res.body.video.id
75
76 await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 1')
77 await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 2')
78 }
79
80 await waitJobs(servers)
81 })
82
83 describe('All feed', function () {
84
85 it('Should be well formed XML (covers RSS 2.0 and ATOM 1.0 endpoints)', async function () {
86 for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
87 const rss = await getXMLfeed(servers[0].url, feed)
88 expect(rss.text).xml.to.be.valid()
89
90 const atom = await getXMLfeed(servers[0].url, feed, 'atom')
91 expect(atom.text).xml.to.be.valid()
92 }
93 })
94
95 it('Should be well formed JSON (covers JSON feed 1.0 endpoint)', async function () {
96 for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
97 const json = await getJSONfeed(servers[0].url, feed)
98 expect(JSON.parse(json.text)).to.be.jsonSchema({ type: 'object' })
99 }
100 })
101 })
102
103 describe('Videos feed', function () {
104 it('Should contain a valid enclosure (covers RSS 2.0 endpoint)', async function () {
105 for (const server of servers) {
106 const rss = await getXMLfeed(server.url, 'videos')
107 const xmlDoc = libxmljs.parseXmlString(rss.text)
108 const xmlEnclosure = xmlDoc.get('/rss/channel/item/enclosure')
109 expect(xmlEnclosure).to.exist
110 expect(xmlEnclosure.attr('type').value()).to.be.equal('application/x-bittorrent')
111 expect(xmlEnclosure.attr('length').value()).to.be.equal('218910')
112 expect(xmlEnclosure.attr('url').value()).to.contain('720.torrent')
113 }
114 })
115
116 it('Should contain a valid \'attachments\' object (covers JSON feed 1.0 endpoint)', async function () {
117 for (const server of servers) {
118 const json = await getJSONfeed(server.url, 'videos')
119 const jsonObj = JSON.parse(json.text)
120 expect(jsonObj.items.length).to.be.equal(2)
121 expect(jsonObj.items[0].attachments).to.exist
122 expect(jsonObj.items[0].attachments.length).to.be.eq(1)
123 expect(jsonObj.items[0].attachments[0].mime_type).to.be.eq('application/x-bittorrent')
124 expect(jsonObj.items[0].attachments[0].size_in_bytes).to.be.eq(218910)
125 expect(jsonObj.items[0].attachments[0].url).to.contain('720.torrent')
126 }
127 })
128
129 it('Should filter by account', async function () {
130 {
131 const json = await getJSONfeed(servers[0].url, 'videos', { accountId: rootAccountId })
132 const jsonObj = JSON.parse(json.text)
133 expect(jsonObj.items.length).to.be.equal(1)
134 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
135 expect(jsonObj.items[0].author.name).to.equal('root')
136 }
137
138 {
139 const json = await getJSONfeed(servers[0].url, 'videos', { accountId: userAccountId })
140 const jsonObj = JSON.parse(json.text)
141 expect(jsonObj.items.length).to.be.equal(1)
142 expect(jsonObj.items[0].title).to.equal('user video')
143 expect(jsonObj.items[0].author.name).to.equal('john')
144 }
145
146 for (const server of servers) {
147 {
148 const json = await getJSONfeed(server.url, 'videos', { accountName: 'root@localhost:' + servers[0].port })
149 const jsonObj = JSON.parse(json.text)
150 expect(jsonObj.items.length).to.be.equal(1)
151 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
152 }
153
154 {
155 const json = await getJSONfeed(server.url, 'videos', { accountName: 'john@localhost:' + servers[0].port })
156 const jsonObj = JSON.parse(json.text)
157 expect(jsonObj.items.length).to.be.equal(1)
158 expect(jsonObj.items[0].title).to.equal('user video')
159 }
160 }
161 })
162
163 it('Should filter by video channel', async function () {
164 {
165 const json = await getJSONfeed(servers[0].url, 'videos', { videoChannelId: rootChannelId })
166 const jsonObj = JSON.parse(json.text)
167 expect(jsonObj.items.length).to.be.equal(1)
168 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
169 expect(jsonObj.items[0].author.name).to.equal('root')
170 }
171
172 {
173 const json = await getJSONfeed(servers[0].url, 'videos', { videoChannelId: userChannelId })
174 const jsonObj = JSON.parse(json.text)
175 expect(jsonObj.items.length).to.be.equal(1)
176 expect(jsonObj.items[0].title).to.equal('user video')
177 expect(jsonObj.items[0].author.name).to.equal('john')
178 }
179
180 for (const server of servers) {
181 {
182 const json = await getJSONfeed(server.url, 'videos', { videoChannelName: 'root_channel@localhost:' + servers[0].port })
183 const jsonObj = JSON.parse(json.text)
184 expect(jsonObj.items.length).to.be.equal(1)
185 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
186 }
187
188 {
189 const json = await getJSONfeed(server.url, 'videos', { videoChannelName: 'john_channel@localhost:' + servers[0].port })
190 const jsonObj = JSON.parse(json.text)
191 expect(jsonObj.items.length).to.be.equal(1)
192 expect(jsonObj.items[0].title).to.equal('user video')
193 }
194 }
195 })
196 })
197
198 describe('Video comments feed', function () {
199 it('Should contain valid comments (covers JSON feed 1.0 endpoint)', async function () {
200 for (const server of servers) {
201 const json = await getJSONfeed(server.url, 'video-comments')
202
203 const jsonObj = JSON.parse(json.text)
204 expect(jsonObj.items.length).to.be.equal(2)
205 expect(jsonObj.items[0].html_content).to.equal('super comment 2')
206 expect(jsonObj.items[1].html_content).to.equal('super comment 1')
207 }
208 })
209 })
210
211 after(async function () {
212 await cleanupTests(servers)
213 })
214 })