aboutsummaryrefslogtreecommitdiffhomepage
path: root/packages/tests/src/feeds/feeds.ts
diff options
context:
space:
mode:
authorChocobozzz <me@florianbigard.com>2023-07-31 14:34:36 +0200
committerChocobozzz <me@florianbigard.com>2023-08-11 15:02:33 +0200
commit3a4992633ee62d5edfbb484d9c6bcb3cf158489d (patch)
treee4510b39bdac9c318fdb4b47018d08f15368b8f0 /packages/tests/src/feeds/feeds.ts
parent04d1da5621d25d59bd5fa1543b725c497bf5d9a8 (diff)
downloadPeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.gz
PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.zst
PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.zip
Migrate server to ESM
Sorry for the very big commit that may lead to git log issues and merge conflicts, but it's a major step forward: * Server can be faster at startup because imports() are async and we can easily lazy import big modules * Angular doesn't seem to support ES import (with .js extension), so we had to correctly organize peertube into a monorepo: * Use yarn workspace feature * Use typescript reference projects for dependencies * Shared projects have been moved into "packages", each one is now a node module (with a dedicated package.json/tsconfig.json) * server/tools have been moved into apps/ and is now a dedicated app bundled and published on NPM so users don't have to build peertube cli tools manually * server/tests have been moved into packages/ so we don't compile them every time we want to run the server * Use isolatedModule option: * Had to move from const enum to const (https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums) * Had to explictely specify "type" imports when used in decorators * Prefer tsx (that uses esbuild under the hood) instead of ts-node to load typescript files (tests with mocha or scripts): * To reduce test complexity as esbuild doesn't support decorator metadata, we only test server files that do not import server models * We still build tests files into js files for a faster CI * Remove unmaintained peertube CLI import script * Removed some barrels to speed up execution (less imports)
Diffstat (limited to 'packages/tests/src/feeds/feeds.ts')
-rw-r--r--packages/tests/src/feeds/feeds.ts697
1 files changed, 697 insertions, 0 deletions
diff --git a/packages/tests/src/feeds/feeds.ts b/packages/tests/src/feeds/feeds.ts
new file mode 100644
index 000000000..7587bb34e
--- /dev/null
+++ b/packages/tests/src/feeds/feeds.ts
@@ -0,0 +1,697 @@
1/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3import * as chai from 'chai'
4import chaiJSONSChema from 'chai-json-schema'
5import chaiXML from 'chai-xml'
6import { XMLParser, XMLValidator } from 'fast-xml-parser'
7import { HttpStatusCode, VideoPrivacy } from '@peertube/peertube-models'
8import {
9 cleanupTests,
10 createMultipleServers,
11 createSingleServer,
12 doubleFollow,
13 makeGetRequest,
14 makeRawRequest,
15 PeerTubeServer,
16 PluginsCommand,
17 setAccessTokensToServers,
18 setDefaultChannelAvatar,
19 setDefaultVideoChannel,
20 stopFfmpeg,
21 waitJobs
22} from '@peertube/peertube-server-commands'
23
24chai.use(chaiXML)
25chai.use(chaiJSONSChema)
26chai.config.includeStack = true
27
28const expect = chai.expect
29
30describe('Test syndication feeds', () => {
31 let servers: PeerTubeServer[] = []
32 let serverHLSOnly: PeerTubeServer
33
34 let userAccessToken: string
35 let rootAccountId: number
36 let rootChannelId: number
37
38 let userAccountId: number
39 let userChannelId: number
40 let userFeedToken: string
41
42 let liveId: string
43
44 before(async function () {
45 this.timeout(120000)
46
47 // Run servers
48 servers = await createMultipleServers(2)
49 serverHLSOnly = await createSingleServer(3, {
50 transcoding: {
51 enabled: true,
52 web_videos: { enabled: false },
53 hls: { enabled: true }
54 }
55 })
56
57 await setAccessTokensToServers([ ...servers, serverHLSOnly ])
58 await setDefaultChannelAvatar(servers[0])
59 await setDefaultVideoChannel(servers)
60 await doubleFollow(servers[0], servers[1])
61
62 await servers[0].config.enableLive({ allowReplay: false, transcoding: false })
63
64 {
65 const user = await servers[0].users.getMyInfo()
66 rootAccountId = user.account.id
67 rootChannelId = user.videoChannels[0].id
68 }
69
70 {
71 userAccessToken = await servers[0].users.generateUserAndToken('john')
72
73 const user = await servers[0].users.getMyInfo({ token: userAccessToken })
74 userAccountId = user.account.id
75 userChannelId = user.videoChannels[0].id
76
77 const token = await servers[0].users.getMyScopedTokens({ token: userAccessToken })
78 userFeedToken = token.feedToken
79 }
80
81 {
82 await servers[0].videos.upload({ token: userAccessToken, attributes: { name: 'user video' } })
83 }
84
85 {
86 const attributes = {
87 name: 'my super name for server 1',
88 description: 'my super description for server 1',
89 fixture: 'video_short.webm'
90 }
91 const { id } = await servers[0].videos.upload({ attributes })
92
93 await servers[0].comments.createThread({ videoId: id, text: 'super comment 1' })
94 await servers[0].comments.createThread({ videoId: id, text: 'super comment 2' })
95 }
96
97 {
98 const attributes = { name: 'unlisted video', privacy: VideoPrivacy.UNLISTED }
99 const { id } = await servers[0].videos.upload({ attributes })
100
101 await servers[0].comments.createThread({ videoId: id, text: 'comment on unlisted video' })
102 }
103
104 {
105 const attributes = { name: 'password protected video', privacy: VideoPrivacy.PASSWORD_PROTECTED, videoPasswords: [ 'password' ] }
106 const { id } = await servers[0].videos.upload({ attributes })
107
108 await servers[0].comments.createThread({ videoId: id, text: 'comment on password protected video' })
109 }
110
111 await serverHLSOnly.videos.upload({ attributes: { name: 'hls only video' } })
112
113 await waitJobs([ ...servers, serverHLSOnly ])
114
115 await servers[0].plugins.install({ path: PluginsCommand.getPluginTestPath('-podcast-custom-tags') })
116 })
117
118 describe('All feed', function () {
119
120 it('Should be well formed XML (covers RSS 2.0 and ATOM 1.0 endpoints)', async function () {
121 for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
122 const rss = await servers[0].feed.getXML({ feed, ignoreCache: true })
123 expect(rss).xml.to.be.valid()
124
125 const atom = await servers[0].feed.getXML({ feed, format: 'atom', ignoreCache: true })
126 expect(atom).xml.to.be.valid()
127 }
128 })
129
130 it('Should be well formed XML (covers Podcast endpoint)', async function () {
131 const podcast = await servers[0].feed.getPodcastXML({ ignoreCache: true, channelId: rootChannelId })
132 expect(podcast).xml.to.be.valid()
133 })
134
135 it('Should be well formed JSON (covers JSON feed 1.0 endpoint)', async function () {
136 for (const feed of [ 'video-comments' as 'video-comments', 'videos' as 'videos' ]) {
137 const jsonText = await servers[0].feed.getJSON({ feed, ignoreCache: true })
138 expect(JSON.parse(jsonText)).to.be.jsonSchema({ type: 'object' })
139 }
140 })
141
142 it('Should serve the endpoint with a classic request', async function () {
143 await makeGetRequest({
144 url: servers[0].url,
145 path: '/feeds/videos.xml',
146 accept: 'application/xml',
147 expectedStatus: HttpStatusCode.OK_200
148 })
149 })
150
151 it('Should refuse to serve the endpoint without accept header', async function () {
152 await makeGetRequest({ url: servers[0].url, path: '/feeds/videos.xml', expectedStatus: HttpStatusCode.NOT_ACCEPTABLE_406 })
153 })
154 })
155
156 describe('Videos feed', function () {
157
158 describe('Podcast feed', function () {
159
160 it('Should contain a valid podcast:alternateEnclosure', async function () {
161 // Since podcast feeds should only work on the server they originate on,
162 // only test the first server where the videos reside
163 const rss = await servers[0].feed.getPodcastXML({ ignoreCache: false, channelId: rootChannelId })
164 expect(XMLValidator.validate(rss)).to.be.true
165
166 const parser = new XMLParser({ parseAttributeValue: true, ignoreAttributes: false })
167 const xmlDoc = parser.parse(rss)
168
169 const itemGuid = xmlDoc.rss.channel.item.guid
170 expect(itemGuid).to.exist
171 expect(itemGuid['@_isPermaLink']).to.equal(true)
172
173 const enclosure = xmlDoc.rss.channel.item.enclosure
174 expect(enclosure).to.exist
175 const alternateEnclosure = xmlDoc.rss.channel.item['podcast:alternateEnclosure']
176 expect(alternateEnclosure).to.exist
177
178 expect(alternateEnclosure['@_type']).to.equal('video/webm')
179 expect(alternateEnclosure['@_length']).to.equal(218910)
180 expect(alternateEnclosure['@_lang']).to.equal('zh')
181 expect(alternateEnclosure['@_title']).to.equal('720p')
182 expect(alternateEnclosure['@_default']).to.equal(true)
183
184 expect(alternateEnclosure['podcast:source'][0]['@_uri']).to.contain('-720.webm')
185 expect(alternateEnclosure['podcast:source'][0]['@_uri']).to.equal(enclosure['@_url'])
186 expect(alternateEnclosure['podcast:source'][1]['@_uri']).to.contain('-720.torrent')
187 expect(alternateEnclosure['podcast:source'][1]['@_contentType']).to.equal('application/x-bittorrent')
188 expect(alternateEnclosure['podcast:source'][2]['@_uri']).to.contain('magnet:?')
189 })
190
191 it('Should contain a valid podcast:alternateEnclosure with HLS only', async function () {
192 const rss = await serverHLSOnly.feed.getPodcastXML({ ignoreCache: false, channelId: rootChannelId })
193 expect(XMLValidator.validate(rss)).to.be.true
194
195 const parser = new XMLParser({ parseAttributeValue: true, ignoreAttributes: false })
196 const xmlDoc = parser.parse(rss)
197
198 const itemGuid = xmlDoc.rss.channel.item.guid
199 expect(itemGuid).to.exist
200 expect(itemGuid['@_isPermaLink']).to.equal(true)
201
202 const enclosure = xmlDoc.rss.channel.item.enclosure
203 const alternateEnclosure = xmlDoc.rss.channel.item['podcast:alternateEnclosure']
204 expect(alternateEnclosure).to.exist
205
206 expect(alternateEnclosure['@_type']).to.equal('application/x-mpegURL')
207 expect(alternateEnclosure['@_lang']).to.equal('zh')
208 expect(alternateEnclosure['@_title']).to.equal('HLS')
209 expect(alternateEnclosure['@_default']).to.equal(true)
210
211 expect(alternateEnclosure['podcast:source']['@_uri']).to.contain('-master.m3u8')
212 expect(alternateEnclosure['podcast:source']['@_uri']).to.equal(enclosure['@_url'])
213 })
214
215 it('Should contain a valid podcast:socialInteract', async function () {
216 const rss = await servers[0].feed.getPodcastXML({ ignoreCache: false, channelId: rootChannelId })
217 expect(XMLValidator.validate(rss)).to.be.true
218
219 const parser = new XMLParser({ parseAttributeValue: true, ignoreAttributes: false })
220 const xmlDoc = parser.parse(rss)
221
222 const item = xmlDoc.rss.channel.item
223 const socialInteract = item['podcast:socialInteract']
224 expect(socialInteract).to.exist
225 expect(socialInteract['@_protocol']).to.equal('activitypub')
226 expect(socialInteract['@_uri']).to.exist
227 expect(socialInteract['@_accountUrl']).to.exist
228 })
229
230 it('Should contain a valid support custom tags for plugins', async function () {
231 const rss = await servers[0].feed.getPodcastXML({ ignoreCache: false, channelId: userChannelId })
232 expect(XMLValidator.validate(rss)).to.be.true
233
234 const parser = new XMLParser({ parseAttributeValue: true, ignoreAttributes: false })
235 const xmlDoc = parser.parse(rss)
236
237 const fooTag = xmlDoc.rss.channel.fooTag
238 expect(fooTag).to.exist
239 expect(fooTag['@_bar']).to.equal('baz')
240 expect(fooTag['#text']).to.equal(42)
241
242 const bizzBuzzItem = xmlDoc.rss.channel['biz:buzzItem']
243 expect(bizzBuzzItem).to.exist
244
245 let nestedTag = bizzBuzzItem.nestedTag
246 expect(nestedTag).to.exist
247 expect(nestedTag).to.equal('example nested tag')
248
249 const item = xmlDoc.rss.channel.item
250 const fizzTag = item.fizzTag
251 expect(fizzTag).to.exist
252 expect(fizzTag['@_bar']).to.equal('baz')
253 expect(fizzTag['#text']).to.equal(21)
254
255 const bizzBuzz = item['biz:buzz']
256 expect(bizzBuzz).to.exist
257
258 nestedTag = bizzBuzz.nestedTag
259 expect(nestedTag).to.exist
260 expect(nestedTag).to.equal('example nested tag')
261 })
262
263 it('Should contain a valid podcast:liveItem for live streams', async function () {
264 this.timeout(120000)
265
266 const { uuid } = await servers[0].live.create({
267 fields: {
268 name: 'live-0',
269 privacy: VideoPrivacy.PUBLIC,
270 channelId: rootChannelId,
271 permanentLive: false
272 }
273 })
274 liveId = uuid
275
276 const ffmpeg = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveId, copyCodecs: true, fixtureName: 'video_short.mp4' })
277 await servers[0].live.waitUntilPublished({ videoId: liveId })
278
279 const rss = await servers[0].feed.getPodcastXML({ ignoreCache: false, channelId: rootChannelId })
280 expect(XMLValidator.validate(rss)).to.be.true
281
282 const parser = new XMLParser({ parseAttributeValue: true, ignoreAttributes: false })
283 const xmlDoc = parser.parse(rss)
284 const liveItem = xmlDoc.rss.channel['podcast:liveItem']
285 expect(liveItem.title).to.equal('live-0')
286 expect(liveItem.guid['@_isPermaLink']).to.equal(false)
287 expect(liveItem.guid['#text']).to.contain(`${uuid}_`)
288 expect(liveItem['@_status']).to.equal('live')
289
290 const enclosure = liveItem.enclosure
291 const alternateEnclosure = liveItem['podcast:alternateEnclosure']
292 expect(alternateEnclosure).to.exist
293 expect(alternateEnclosure['@_type']).to.equal('application/x-mpegURL')
294 expect(alternateEnclosure['@_title']).to.equal('HLS live stream')
295 expect(alternateEnclosure['@_default']).to.equal(true)
296
297 expect(alternateEnclosure['podcast:source']['@_uri']).to.contain('/master.m3u8')
298 expect(alternateEnclosure['podcast:source']['@_uri']).to.equal(enclosure['@_url'])
299
300 await stopFfmpeg(ffmpeg)
301
302 await servers[0].live.waitUntilEnded({ videoId: liveId })
303
304 await waitJobs(servers)
305 })
306 })
307
308 describe('JSON feed', function () {
309
310 it('Should contain a valid \'attachments\' object', async function () {
311 for (const server of servers) {
312 const json = await server.feed.getJSON({ feed: 'videos', ignoreCache: true })
313 const jsonObj = JSON.parse(json)
314 expect(jsonObj.items.length).to.be.equal(2)
315 expect(jsonObj.items[0].attachments).to.exist
316 expect(jsonObj.items[0].attachments.length).to.be.eq(1)
317 expect(jsonObj.items[0].attachments[0].mime_type).to.be.eq('application/x-bittorrent')
318 expect(jsonObj.items[0].attachments[0].size_in_bytes).to.be.eq(218910)
319 expect(jsonObj.items[0].attachments[0].url).to.contain('720.torrent')
320 }
321 })
322
323 it('Should filter by account', async function () {
324 {
325 const json = await servers[0].feed.getJSON({ feed: 'videos', query: { accountId: rootAccountId }, ignoreCache: true })
326 const jsonObj = JSON.parse(json)
327 expect(jsonObj.items.length).to.be.equal(1)
328 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
329 expect(jsonObj.items[0].author.name).to.equal('Main root channel')
330 }
331
332 {
333 const json = await servers[0].feed.getJSON({ feed: 'videos', query: { accountId: userAccountId }, ignoreCache: true })
334 const jsonObj = JSON.parse(json)
335 expect(jsonObj.items.length).to.be.equal(1)
336 expect(jsonObj.items[0].title).to.equal('user video')
337 expect(jsonObj.items[0].author.name).to.equal('Main john channel')
338 }
339
340 for (const server of servers) {
341 {
342 const json = await server.feed.getJSON({ feed: 'videos', query: { accountName: 'root@' + servers[0].host }, ignoreCache: true })
343 const jsonObj = JSON.parse(json)
344 expect(jsonObj.items.length).to.be.equal(1)
345 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
346 }
347
348 {
349 const json = await server.feed.getJSON({ feed: 'videos', query: { accountName: 'john@' + servers[0].host }, ignoreCache: true })
350 const jsonObj = JSON.parse(json)
351 expect(jsonObj.items.length).to.be.equal(1)
352 expect(jsonObj.items[0].title).to.equal('user video')
353 }
354 }
355 })
356
357 it('Should filter by video channel', async function () {
358 {
359 const json = await servers[0].feed.getJSON({ feed: 'videos', query: { videoChannelId: rootChannelId }, ignoreCache: true })
360 const jsonObj = JSON.parse(json)
361 expect(jsonObj.items.length).to.be.equal(1)
362 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
363 expect(jsonObj.items[0].author.name).to.equal('Main root channel')
364 }
365
366 {
367 const json = await servers[0].feed.getJSON({ feed: 'videos', query: { videoChannelId: userChannelId }, ignoreCache: true })
368 const jsonObj = JSON.parse(json)
369 expect(jsonObj.items.length).to.be.equal(1)
370 expect(jsonObj.items[0].title).to.equal('user video')
371 expect(jsonObj.items[0].author.name).to.equal('Main john channel')
372 }
373
374 for (const server of servers) {
375 {
376 const query = { videoChannelName: 'root_channel@' + servers[0].host }
377 const json = await server.feed.getJSON({ feed: 'videos', query, ignoreCache: true })
378 const jsonObj = JSON.parse(json)
379 expect(jsonObj.items.length).to.be.equal(1)
380 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
381 }
382
383 {
384 const query = { videoChannelName: 'john_channel@' + servers[0].host }
385 const json = await server.feed.getJSON({ feed: 'videos', query, ignoreCache: true })
386 const jsonObj = JSON.parse(json)
387 expect(jsonObj.items.length).to.be.equal(1)
388 expect(jsonObj.items[0].title).to.equal('user video')
389 }
390 }
391 })
392
393 it('Should correctly have videos feed with HLS only', async function () {
394 this.timeout(120000)
395
396 const json = await serverHLSOnly.feed.getJSON({ feed: 'videos', ignoreCache: true })
397 const jsonObj = JSON.parse(json)
398 expect(jsonObj.items.length).to.be.equal(1)
399 expect(jsonObj.items[0].attachments).to.exist
400 expect(jsonObj.items[0].attachments.length).to.be.eq(4)
401
402 for (let i = 0; i < 4; i++) {
403 expect(jsonObj.items[0].attachments[i].mime_type).to.be.eq('application/x-bittorrent')
404 expect(jsonObj.items[0].attachments[i].size_in_bytes).to.be.greaterThan(0)
405 expect(jsonObj.items[0].attachments[i].url).to.exist
406 }
407 })
408
409 it('Should not display waiting live videos', async function () {
410 const { uuid } = await servers[0].live.create({
411 fields: {
412 name: 'live',
413 privacy: VideoPrivacy.PUBLIC,
414 channelId: rootChannelId
415 }
416 })
417 liveId = uuid
418
419 const json = await servers[0].feed.getJSON({ feed: 'videos', ignoreCache: true })
420
421 const jsonObj = JSON.parse(json)
422 expect(jsonObj.items.length).to.be.equal(2)
423 expect(jsonObj.items[0].title).to.equal('my super name for server 1')
424 expect(jsonObj.items[1].title).to.equal('user video')
425 })
426
427 it('Should display published live videos', async function () {
428 this.timeout(120000)
429
430 const ffmpeg = await servers[0].live.sendRTMPStreamInVideo({ videoId: liveId, copyCodecs: true, fixtureName: 'video_short.mp4' })
431 await servers[0].live.waitUntilPublished({ videoId: liveId })
432
433 const json = await servers[0].feed.getJSON({ feed: 'videos', ignoreCache: true })
434
435 const jsonObj = JSON.parse(json)
436 expect(jsonObj.items.length).to.be.equal(3)
437 expect(jsonObj.items[0].title).to.equal('live')
438 expect(jsonObj.items[1].title).to.equal('my super name for server 1')
439 expect(jsonObj.items[2].title).to.equal('user video')
440
441 await stopFfmpeg(ffmpeg)
442 })
443
444 it('Should have the channel avatar as feed icon', async function () {
445 const json = await servers[0].feed.getJSON({ feed: 'videos', query: { videoChannelId: rootChannelId }, ignoreCache: true })
446
447 const jsonObj = JSON.parse(json)
448 const imageUrl = jsonObj.icon
449 expect(imageUrl).to.include('/lazy-static/avatars/')
450 await makeRawRequest({ url: imageUrl, expectedStatus: HttpStatusCode.OK_200 })
451 })
452 })
453 })
454
455 describe('Video comments feed', function () {
456
457 it('Should contain valid comments (covers JSON feed 1.0 endpoint) and not from unlisted/password protected videos', async function () {
458 for (const server of servers) {
459 const json = await server.feed.getJSON({ feed: 'video-comments', ignoreCache: true })
460
461 const jsonObj = JSON.parse(json)
462 expect(jsonObj.items.length).to.be.equal(2)
463 expect(jsonObj.items[0].content_html).to.contain('<p>super comment 2</p>')
464 expect(jsonObj.items[1].content_html).to.contain('<p>super comment 1</p>')
465 }
466 })
467
468 it('Should not list comments from muted accounts or instances', async function () {
469 this.timeout(30000)
470
471 const remoteHandle = 'root@' + servers[0].host
472
473 await servers[1].blocklist.addToServerBlocklist({ account: remoteHandle })
474
475 {
476 const json = await servers[1].feed.getJSON({ feed: 'video-comments', ignoreCache: true })
477 const jsonObj = JSON.parse(json)
478 expect(jsonObj.items.length).to.be.equal(0)
479 }
480
481 await servers[1].blocklist.removeFromServerBlocklist({ account: remoteHandle })
482
483 {
484 const videoUUID = (await servers[1].videos.quickUpload({ name: 'server 2' })).uuid
485 await waitJobs(servers)
486 await servers[0].comments.createThread({ videoId: videoUUID, text: 'super comment' })
487 await waitJobs(servers)
488
489 const json = await servers[1].feed.getJSON({ feed: 'video-comments', ignoreCache: true })
490 const jsonObj = JSON.parse(json)
491 expect(jsonObj.items.length).to.be.equal(3)
492 }
493
494 await servers[1].blocklist.addToMyBlocklist({ account: remoteHandle })
495
496 {
497 const json = await servers[1].feed.getJSON({ feed: 'video-comments', ignoreCache: true })
498 const jsonObj = JSON.parse(json)
499 expect(jsonObj.items.length).to.be.equal(2)
500 }
501 })
502 })
503
504 describe('Video feed from my subscriptions', function () {
505 let feeduserAccountId: number
506 let feeduserFeedToken: string
507
508 it('Should list no videos for a user with no videos and no subscriptions', async function () {
509 const attr = { username: 'feeduser', password: 'password' }
510 await servers[0].users.create({ username: attr.username, password: attr.password })
511 const feeduserAccessToken = await servers[0].login.getAccessToken(attr)
512
513 {
514 const user = await servers[0].users.getMyInfo({ token: feeduserAccessToken })
515 feeduserAccountId = user.account.id
516 }
517
518 {
519 const token = await servers[0].users.getMyScopedTokens({ token: feeduserAccessToken })
520 feeduserFeedToken = token.feedToken
521 }
522
523 {
524 const body = await servers[0].videos.listMySubscriptionVideos({ token: feeduserAccessToken })
525 expect(body.total).to.equal(0)
526
527 const query = { accountId: feeduserAccountId, token: feeduserFeedToken }
528 const json = await servers[0].feed.getJSON({ feed: 'subscriptions', query, ignoreCache: true })
529 const jsonObj = JSON.parse(json)
530 expect(jsonObj.items.length).to.be.equal(0) // no subscription, it should not list the instance's videos but list 0 videos
531 }
532 })
533
534 it('Should fail with an invalid token', async function () {
535 const query = { accountId: feeduserAccountId, token: 'toto' }
536 await servers[0].feed.getJSON({ feed: 'subscriptions', query, expectedStatus: HttpStatusCode.FORBIDDEN_403, ignoreCache: true })
537 })
538
539 it('Should fail with a token of another user', async function () {
540 const query = { accountId: feeduserAccountId, token: userFeedToken }
541 await servers[0].feed.getJSON({ feed: 'subscriptions', query, expectedStatus: HttpStatusCode.FORBIDDEN_403, ignoreCache: true })
542 })
543
544 it('Should list no videos for a user with videos but no subscriptions', async function () {
545 const body = await servers[0].videos.listMySubscriptionVideos({ token: userAccessToken })
546 expect(body.total).to.equal(0)
547
548 const query = { accountId: userAccountId, token: userFeedToken }
549 const json = await servers[0].feed.getJSON({ feed: 'subscriptions', query, ignoreCache: true })
550 const jsonObj = JSON.parse(json)
551 expect(jsonObj.items.length).to.be.equal(0) // no subscription, it should not list the instance's videos but list 0 videos
552 })
553
554 it('Should list self videos for a user with a subscription to themselves', async function () {
555 this.timeout(30000)
556
557 await servers[0].subscriptions.add({ token: userAccessToken, targetUri: 'john_channel@' + servers[0].host })
558 await waitJobs(servers)
559
560 {
561 const body = await servers[0].videos.listMySubscriptionVideos({ token: userAccessToken })
562 expect(body.total).to.equal(1)
563 expect(body.data[0].name).to.equal('user video')
564
565 const query = { accountId: userAccountId, token: userFeedToken }
566 const json = await servers[0].feed.getJSON({ feed: 'subscriptions', query, ignoreCache: true })
567 const jsonObj = JSON.parse(json)
568 expect(jsonObj.items.length).to.be.equal(1) // subscribed to self, it should not list the instance's videos but list john's
569 }
570 })
571
572 it('Should list videos of a user\'s subscription', async function () {
573 this.timeout(30000)
574
575 await servers[0].subscriptions.add({ token: userAccessToken, targetUri: 'root_channel@' + servers[0].host })
576 await waitJobs(servers)
577
578 {
579 const body = await servers[0].videos.listMySubscriptionVideos({ token: userAccessToken })
580 expect(body.total).to.equal(2, 'there should be 2 videos part of the subscription')
581
582 const query = { accountId: userAccountId, token: userFeedToken }
583 const json = await servers[0].feed.getJSON({ feed: 'subscriptions', query, ignoreCache: true })
584 const jsonObj = JSON.parse(json)
585 expect(jsonObj.items.length).to.be.equal(2) // subscribed to root, it should not list the instance's videos but list root/john's
586 }
587 })
588
589 it('Should renew the token, and so have an invalid old token', async function () {
590 await servers[0].users.renewMyScopedTokens({ token: userAccessToken })
591
592 const query = { accountId: userAccountId, token: userFeedToken }
593 await servers[0].feed.getJSON({ feed: 'subscriptions', query, expectedStatus: HttpStatusCode.FORBIDDEN_403, ignoreCache: true })
594 })
595
596 it('Should succeed with the new token', async function () {
597 const token = await servers[0].users.getMyScopedTokens({ token: userAccessToken })
598 userFeedToken = token.feedToken
599
600 const query = { accountId: userAccountId, token: userFeedToken }
601 await servers[0].feed.getJSON({ feed: 'subscriptions', query, ignoreCache: true })
602 })
603
604 })
605
606 describe('Cache', function () {
607 const uuids: string[] = []
608
609 function doPodcastRequest () {
610 return makeGetRequest({
611 url: servers[0].url,
612 path: '/feeds/podcast/videos.xml',
613 query: { videoChannelId: servers[0].store.channel.id },
614 accept: 'application/xml',
615 expectedStatus: HttpStatusCode.OK_200
616 })
617 }
618
619 function doVideosRequest (query: { [id: string]: string } = {}) {
620 return makeGetRequest({
621 url: servers[0].url,
622 path: '/feeds/videos.xml',
623 query,
624 accept: 'application/xml',
625 expectedStatus: HttpStatusCode.OK_200
626 })
627 }
628
629 before(async function () {
630 {
631 const { uuid } = await servers[0].videos.quickUpload({ name: 'cache 1' })
632 uuids.push(uuid)
633 }
634
635 {
636 const { uuid } = await servers[0].videos.quickUpload({ name: 'cache 2' })
637 uuids.push(uuid)
638 }
639 })
640
641 it('Should serve the videos endpoint as a cached request', async function () {
642 await doVideosRequest()
643
644 const res = await doVideosRequest()
645
646 expect(res.headers['x-api-cache-cached']).to.equal('true')
647 })
648
649 it('Should not serve the videos endpoint as a cached request', async function () {
650 const res = await doVideosRequest({ v: '186' })
651
652 expect(res.headers['x-api-cache-cached']).to.not.exist
653 })
654
655 it('Should invalidate the podcast feed cache after video deletion', async function () {
656 await doPodcastRequest()
657
658 {
659 const res = await doPodcastRequest()
660 expect(res.headers['x-api-cache-cached']).to.exist
661 }
662
663 await servers[0].videos.remove({ id: uuids[0] })
664
665 {
666 const res = await doPodcastRequest()
667 expect(res.headers['x-api-cache-cached']).to.not.exist
668 }
669 })
670
671 it('Should invalidate the podcast feed cache after video deletion, even after server restart', async function () {
672 this.timeout(120000)
673
674 await doPodcastRequest()
675
676 {
677 const res = await doPodcastRequest()
678 expect(res.headers['x-api-cache-cached']).to.exist
679 }
680
681 await servers[0].kill()
682 await servers[0].run()
683
684 await servers[0].videos.remove({ id: uuids[1] })
685
686 const res = await doPodcastRequest()
687 expect(res.headers['x-api-cache-cached']).to.not.exist
688 })
689
690 })
691
692 after(async function () {
693 await servers[0].plugins.uninstall({ npmName: 'peertube-plugin-test-podcast-custom-tags' })
694
695 await cleanupTests([ ...servers, serverHLSOnly ])
696 })
697})