diff options
author | Chocobozzz <me@florianbigard.com> | 2018-06-08 20:34:37 +0200 |
---|---|---|
committer | Chocobozzz <me@florianbigard.com> | 2018-06-08 20:34:37 +0200 |
commit | fe3a55b071c99b346e9e9ab786f5d219e5a064cd (patch) | |
tree | 55c6d0e9a253b20df738fd82820eb931044828e6 /server/tests | |
parent | 4a7591e1a8ec5ffdff85580c6be4b18d8b85b4d4 (diff) | |
download | PeerTube-fe3a55b071c99b346e9e9ab786f5d219e5a064cd.tar.gz PeerTube-fe3a55b071c99b346e9e9ab786f5d219e5a064cd.tar.zst PeerTube-fe3a55b071c99b346e9e9ab786f5d219e5a064cd.zip |
Add video comments RSS
Diffstat (limited to 'server/tests')
-rw-r--r-- | server/tests/api/feeds/instance-feed.ts | 91 | ||||
-rw-r--r-- | server/tests/api/index-slow.ts | 1 | ||||
-rw-r--r-- | server/tests/feeds/feeds.ts | 120 | ||||
-rw-r--r-- | server/tests/utils/feeds/feeds.ts | 10 |
4 files changed, 126 insertions, 96 deletions
diff --git a/server/tests/api/feeds/instance-feed.ts b/server/tests/api/feeds/instance-feed.ts deleted file mode 100644 index e834e1db1..000000000 --- a/server/tests/api/feeds/instance-feed.ts +++ /dev/null | |||
@@ -1,91 +0,0 @@ | |||
1 | /* tslint:disable:no-unused-expression */ | ||
2 | |||
3 | import * as chai from 'chai' | ||
4 | import 'mocha' | ||
5 | import { | ||
6 | getOEmbed, | ||
7 | getXMLfeed, | ||
8 | getJSONfeed, | ||
9 | flushTests, | ||
10 | killallServers, | ||
11 | ServerInfo, | ||
12 | setAccessTokensToServers, | ||
13 | uploadVideo, | ||
14 | flushAndRunMultipleServers, | ||
15 | wait | ||
16 | } from '../../utils' | ||
17 | import { runServer } from '../../utils/server/servers' | ||
18 | import { join } from 'path' | ||
19 | import * as libxmljs from 'libxmljs' | ||
20 | |||
21 | chai.use(require('chai-xml')) | ||
22 | chai.use(require('chai-json-schema')) | ||
23 | chai.config.includeStack = true | ||
24 | const expect = chai.expect | ||
25 | |||
26 | describe('Test instance-wide syndication feeds', () => { | ||
27 | let servers: ServerInfo[] = [] | ||
28 | |||
29 | before(async function () { | ||
30 | this.timeout(30000) | ||
31 | |||
32 | // Run servers | ||
33 | servers = await flushAndRunMultipleServers(2) | ||
34 | |||
35 | await setAccessTokensToServers(servers) | ||
36 | |||
37 | this.timeout(60000) | ||
38 | |||
39 | const videoAttributes = { | ||
40 | name: 'my super name for server 1', | ||
41 | description: 'my super description for server 1', | ||
42 | fixture: 'video_short.webm' | ||
43 | } | ||
44 | await uploadVideo(servers[0].url, servers[0].accessToken, videoAttributes) | ||
45 | |||
46 | await wait(10000) | ||
47 | }) | ||
48 | |||
49 | it('should be well formed XML (covers RSS 2.0 and ATOM 1.0 endpoints)', async function () { | ||
50 | const rss = await getXMLfeed(servers[0].url) | ||
51 | expect(rss.text).xml.to.be.valid() | ||
52 | |||
53 | const atom = await getXMLfeed(servers[0].url, 'atom') | ||
54 | expect(atom.text).xml.to.be.valid() | ||
55 | }) | ||
56 | |||
57 | it('should be well formed JSON (covers JSON feed 1.0 endpoint)', async function () { | ||
58 | const json = await getJSONfeed(servers[0].url) | ||
59 | expect(JSON.parse(json.text)).to.be.jsonSchema({ 'type': 'object' }) | ||
60 | }) | ||
61 | |||
62 | it('should contain a valid enclosure (covers RSS 2.0 endpoint)', async function () { | ||
63 | const rss = await getXMLfeed(servers[0].url) | ||
64 | const xmlDoc = libxmljs.parseXmlString(rss.text) | ||
65 | const xmlEnclosure = xmlDoc.get('/rss/channel/item/enclosure') | ||
66 | expect(xmlEnclosure).to.exist | ||
67 | expect(xmlEnclosure.attr('type').value()).to.be.equal('application/x-bittorrent') | ||
68 | expect(xmlEnclosure.attr('length').value()).to.be.equal('218910') | ||
69 | expect(xmlEnclosure.attr('url').value()).to.contain('720.torrent') | ||
70 | }) | ||
71 | |||
72 | it('should contain a valid \'attachments\' object (covers JSON feed 1.0 endpoint)', async function () { | ||
73 | const json = await getJSONfeed(servers[0].url) | ||
74 | const jsonObj = JSON.parse(json.text) | ||
75 | expect(jsonObj.items.length).to.be.equal(1) | ||
76 | expect(jsonObj.items[0].attachments).to.exist | ||
77 | expect(jsonObj.items[0].attachments.length).to.be.eq(1) | ||
78 | expect(jsonObj.items[0].attachments[0].mime_type).to.be.eq('application/x-bittorrent') | ||
79 | expect(jsonObj.items[0].attachments[0].size_in_bytes).to.be.eq(218910) | ||
80 | expect(jsonObj.items[0].attachments[0].url).to.contain('720.torrent') | ||
81 | }) | ||
82 | |||
83 | after(async function () { | ||
84 | killallServers(servers) | ||
85 | |||
86 | // Keep the logs if the test failed | ||
87 | if (this['ok']) { | ||
88 | await flushTests() | ||
89 | } | ||
90 | }) | ||
91 | }) | ||
diff --git a/server/tests/api/index-slow.ts b/server/tests/api/index-slow.ts index 5f2f26095..cde546856 100644 --- a/server/tests/api/index-slow.ts +++ b/server/tests/api/index-slow.ts | |||
@@ -1,6 +1,5 @@ | |||
1 | // Order of the tests we want to execute | 1 | // Order of the tests we want to execute |
2 | import './videos/video-transcoder' | 2 | import './videos/video-transcoder' |
3 | import './feeds/instance-feed' | ||
4 | import './videos/multiple-servers' | 3 | import './videos/multiple-servers' |
5 | import './server/follows' | 4 | import './server/follows' |
6 | import './server/jobs' | 5 | import './server/jobs' |
diff --git a/server/tests/feeds/feeds.ts b/server/tests/feeds/feeds.ts new file mode 100644 index 000000000..f65148f00 --- /dev/null +++ b/server/tests/feeds/feeds.ts | |||
@@ -0,0 +1,120 @@ | |||
1 | /* tslint:disable:no-unused-expression */ | ||
2 | |||
3 | import * as chai from 'chai' | ||
4 | import 'mocha' | ||
5 | import { | ||
6 | doubleFollow, | ||
7 | flushAndRunMultipleServers, | ||
8 | flushTests, | ||
9 | getJSONfeed, | ||
10 | getXMLfeed, | ||
11 | killallServers, | ||
12 | ServerInfo, | ||
13 | setAccessTokensToServers, | ||
14 | uploadVideo, | ||
15 | wait | ||
16 | } from '../utils' | ||
17 | import { join } from 'path' | ||
18 | import * as libxmljs from 'libxmljs' | ||
19 | import { addVideoCommentThread } from '../utils/videos/video-comments' | ||
20 | |||
21 | chai.use(require('chai-xml')) | ||
22 | chai.use(require('chai-json-schema')) | ||
23 | chai.config.includeStack = true | ||
24 | const expect = chai.expect | ||
25 | |||
26 | describe('Test syndication feeds', () => { | ||
27 | let servers: ServerInfo[] = [] | ||
28 | |||
29 | before(async function () { | ||
30 | this.timeout(120000) | ||
31 | |||
32 | // Run servers | ||
33 | servers = await flushAndRunMultipleServers(2) | ||
34 | |||
35 | await setAccessTokensToServers(servers) | ||
36 | await doubleFollow(servers[0], servers[1]) | ||
37 | |||
38 | const videoAttributes = { | ||
39 | name: 'my super name for server 1', | ||
40 | description: 'my super description for server 1', | ||
41 | fixture: 'video_short.webm' | ||
42 | } | ||
43 | const res = await uploadVideo(servers[0].url, servers[0].accessToken, videoAttributes) | ||
44 | const videoId = res.body.video.id | ||
45 | |||
46 | await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 1') | ||
47 | await addVideoCommentThread(servers[0].url, servers[0].accessToken, videoId, 'super comment 2') | ||
48 | |||
49 | await wait(10000) | ||
50 | }) | ||
51 | |||
52 | describe('All feed', function () { | ||
53 | |||
54 | it('Should be well formed XML (covers RSS 2.0 and ATOM 1.0 endpoints)', async function () { | ||
55 | for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) { | ||
56 | const rss = await getXMLfeed(servers[ 0 ].url, feed) | ||
57 | expect(rss.text).xml.to.be.valid() | ||
58 | |||
59 | const atom = await getXMLfeed(servers[ 0 ].url, feed, 'atom') | ||
60 | expect(atom.text).xml.to.be.valid() | ||
61 | } | ||
62 | }) | ||
63 | |||
64 | it('Should be well formed JSON (covers JSON feed 1.0 endpoint)', async function () { | ||
65 | for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) { | ||
66 | const json = await getJSONfeed(servers[ 0 ].url, feed) | ||
67 | expect(JSON.parse(json.text)).to.be.jsonSchema({ 'type': 'object' }) | ||
68 | } | ||
69 | }) | ||
70 | }) | ||
71 | |||
72 | describe('Videos feed', function () { | ||
73 | it('Should contain a valid enclosure (covers RSS 2.0 endpoint)', async function () { | ||
74 | for (const server of servers) { | ||
75 | const rss = await getXMLfeed(server.url, 'videos') | ||
76 | const xmlDoc = libxmljs.parseXmlString(rss.text) | ||
77 | const xmlEnclosure = xmlDoc.get('/rss/channel/item/enclosure') | ||
78 | expect(xmlEnclosure).to.exist | ||
79 | expect(xmlEnclosure.attr('type').value()).to.be.equal('application/x-bittorrent') | ||
80 | expect(xmlEnclosure.attr('length').value()).to.be.equal('218910') | ||
81 | expect(xmlEnclosure.attr('url').value()).to.contain('720.torrent') | ||
82 | } | ||
83 | }) | ||
84 | |||
85 | it('Should contain a valid \'attachments\' object (covers JSON feed 1.0 endpoint)', async function () { | ||
86 | for (const server of servers) { | ||
87 | const json = await getJSONfeed(server.url, 'videos') | ||
88 | const jsonObj = JSON.parse(json.text) | ||
89 | expect(jsonObj.items.length).to.be.equal(1) | ||
90 | expect(jsonObj.items[ 0 ].attachments).to.exist | ||
91 | expect(jsonObj.items[ 0 ].attachments.length).to.be.eq(1) | ||
92 | expect(jsonObj.items[ 0 ].attachments[ 0 ].mime_type).to.be.eq('application/x-bittorrent') | ||
93 | expect(jsonObj.items[ 0 ].attachments[ 0 ].size_in_bytes).to.be.eq(218910) | ||
94 | expect(jsonObj.items[ 0 ].attachments[ 0 ].url).to.contain('720.torrent') | ||
95 | } | ||
96 | }) | ||
97 | }) | ||
98 | |||
99 | describe('Video comments feed', function () { | ||
100 | it('Should contain valid comments (covers JSON feed 1.0 endpoint)', async function () { | ||
101 | for (const server of servers) { | ||
102 | const json = await getJSONfeed(server.url, 'video-comments') | ||
103 | |||
104 | const jsonObj = JSON.parse(json.text) | ||
105 | expect(jsonObj.items.length).to.be.equal(2) | ||
106 | expect(jsonObj.items[ 0 ].html_content).to.equal('super comment 2') | ||
107 | expect(jsonObj.items[ 1 ].html_content).to.equal('super comment 1') | ||
108 | } | ||
109 | }) | ||
110 | }) | ||
111 | |||
112 | after(async function () { | ||
113 | killallServers(servers) | ||
114 | |||
115 | // Keep the logs if the test failed | ||
116 | if (this['ok']) { | ||
117 | await flushTests() | ||
118 | } | ||
119 | }) | ||
120 | }) | ||
diff --git a/server/tests/utils/feeds/feeds.ts b/server/tests/utils/feeds/feeds.ts index 20e68cf3d..ffd23a1ad 100644 --- a/server/tests/utils/feeds/feeds.ts +++ b/server/tests/utils/feeds/feeds.ts | |||
@@ -1,8 +1,10 @@ | |||
1 | import * as request from 'supertest' | 1 | import * as request from 'supertest' |
2 | import { readFileBufferPromise } from '../../../helpers/core-utils' | 2 | import { readFileBufferPromise } from '../../../helpers/core-utils' |
3 | 3 | ||
4 | function getXMLfeed (url: string, format?: string) { | 4 | type FeedType = 'videos' | 'video-comments' |
5 | const path = '/feeds/videos.xml' | 5 | |
6 | function getXMLfeed (url: string, feed: FeedType, format?: string) { | ||
7 | const path = '/feeds/' + feed + '.xml' | ||
6 | 8 | ||
7 | return request(url) | 9 | return request(url) |
8 | .get(path) | 10 | .get(path) |
@@ -12,8 +14,8 @@ function getXMLfeed (url: string, format?: string) { | |||
12 | .expect('Content-Type', /xml/) | 14 | .expect('Content-Type', /xml/) |
13 | } | 15 | } |
14 | 16 | ||
15 | function getJSONfeed (url: string) { | 17 | function getJSONfeed (url: string, feed: FeedType) { |
16 | const path = '/feeds/videos.json' | 18 | const path = '/feeds/' + feed + '.json' |
17 | 19 | ||
18 | return request(url) | 20 | return request(url) |
19 | .get(path) | 21 | .get(path) |