diff options
Diffstat (limited to 'server/tests/api/feeds')
-rw-r--r-- | server/tests/api/feeds/instance-feed.ts | 91 |
1 files changed, 0 insertions, 91 deletions
diff --git a/server/tests/api/feeds/instance-feed.ts b/server/tests/api/feeds/instance-feed.ts deleted file mode 100644 index e834e1db1..000000000 --- a/server/tests/api/feeds/instance-feed.ts +++ /dev/null | |||
@@ -1,91 +0,0 @@ | |||
1 | /* tslint:disable:no-unused-expression */ | ||
2 | |||
3 | import * as chai from 'chai' | ||
4 | import 'mocha' | ||
5 | import { | ||
6 | getOEmbed, | ||
7 | getXMLfeed, | ||
8 | getJSONfeed, | ||
9 | flushTests, | ||
10 | killallServers, | ||
11 | ServerInfo, | ||
12 | setAccessTokensToServers, | ||
13 | uploadVideo, | ||
14 | flushAndRunMultipleServers, | ||
15 | wait | ||
16 | } from '../../utils' | ||
17 | import { runServer } from '../../utils/server/servers' | ||
18 | import { join } from 'path' | ||
19 | import * as libxmljs from 'libxmljs' | ||
20 | |||
21 | chai.use(require('chai-xml')) | ||
22 | chai.use(require('chai-json-schema')) | ||
23 | chai.config.includeStack = true | ||
24 | const expect = chai.expect | ||
25 | |||
26 | describe('Test instance-wide syndication feeds', () => { | ||
27 | let servers: ServerInfo[] = [] | ||
28 | |||
29 | before(async function () { | ||
30 | this.timeout(30000) | ||
31 | |||
32 | // Run servers | ||
33 | servers = await flushAndRunMultipleServers(2) | ||
34 | |||
35 | await setAccessTokensToServers(servers) | ||
36 | |||
37 | this.timeout(60000) | ||
38 | |||
39 | const videoAttributes = { | ||
40 | name: 'my super name for server 1', | ||
41 | description: 'my super description for server 1', | ||
42 | fixture: 'video_short.webm' | ||
43 | } | ||
44 | await uploadVideo(servers[0].url, servers[0].accessToken, videoAttributes) | ||
45 | |||
46 | await wait(10000) | ||
47 | }) | ||
48 | |||
49 | it('should be well formed XML (covers RSS 2.0 and ATOM 1.0 endpoints)', async function () { | ||
50 | const rss = await getXMLfeed(servers[0].url) | ||
51 | expect(rss.text).xml.to.be.valid() | ||
52 | |||
53 | const atom = await getXMLfeed(servers[0].url, 'atom') | ||
54 | expect(atom.text).xml.to.be.valid() | ||
55 | }) | ||
56 | |||
57 | it('should be well formed JSON (covers JSON feed 1.0 endpoint)', async function () { | ||
58 | const json = await getJSONfeed(servers[0].url) | ||
59 | expect(JSON.parse(json.text)).to.be.jsonSchema({ 'type': 'object' }) | ||
60 | }) | ||
61 | |||
62 | it('should contain a valid enclosure (covers RSS 2.0 endpoint)', async function () { | ||
63 | const rss = await getXMLfeed(servers[0].url) | ||
64 | const xmlDoc = libxmljs.parseXmlString(rss.text) | ||
65 | const xmlEnclosure = xmlDoc.get('/rss/channel/item/enclosure') | ||
66 | expect(xmlEnclosure).to.exist | ||
67 | expect(xmlEnclosure.attr('type').value()).to.be.equal('application/x-bittorrent') | ||
68 | expect(xmlEnclosure.attr('length').value()).to.be.equal('218910') | ||
69 | expect(xmlEnclosure.attr('url').value()).to.contain('720.torrent') | ||
70 | }) | ||
71 | |||
72 | it('should contain a valid \'attachments\' object (covers JSON feed 1.0 endpoint)', async function () { | ||
73 | const json = await getJSONfeed(servers[0].url) | ||
74 | const jsonObj = JSON.parse(json.text) | ||
75 | expect(jsonObj.items.length).to.be.equal(1) | ||
76 | expect(jsonObj.items[0].attachments).to.exist | ||
77 | expect(jsonObj.items[0].attachments.length).to.be.eq(1) | ||
78 | expect(jsonObj.items[0].attachments[0].mime_type).to.be.eq('application/x-bittorrent') | ||
79 | expect(jsonObj.items[0].attachments[0].size_in_bytes).to.be.eq(218910) | ||
80 | expect(jsonObj.items[0].attachments[0].url).to.contain('720.torrent') | ||
81 | }) | ||
82 | |||
83 | after(async function () { | ||
84 | killallServers(servers) | ||
85 | |||
86 | // Keep the logs if the test failed | ||
87 | if (this['ok']) { | ||
88 | await flushTests() | ||
89 | } | ||
90 | }) | ||
91 | }) | ||