diff options
author | Chocobozzz <me@florianbigard.com> | 2018-05-14 10:57:07 +0200 |
---|---|---|
committer | Chocobozzz <me@florianbigard.com> | 2018-05-15 08:39:02 +0200 |
commit | efda99c30f2c04702bf57cc150cdfdd0acccc178 (patch) | |
tree | 2d7a0807189f75ed75365f86d85e57211c8d3e74 /client/src/assets | |
parent | 4de2fafcad4b56449e3817825d4b317c5ed4696c (diff) | |
download | PeerTube-efda99c30f2c04702bf57cc150cdfdd0acccc178.tar.gz PeerTube-efda99c30f2c04702bf57cc150cdfdd0acccc178.tar.zst PeerTube-efda99c30f2c04702bf57cc150cdfdd0acccc178.zip |
Store webtorrent chunks in indexdb
Diffstat (limited to 'client/src/assets')
-rw-r--r-- | client/src/assets/player/peertube-chunk-store.ts | 217 | ||||
-rw-r--r-- | client/src/assets/player/peertube-videojs-plugin.ts | 19 |
2 files changed, 227 insertions, 9 deletions
diff --git a/client/src/assets/player/peertube-chunk-store.ts b/client/src/assets/player/peertube-chunk-store.ts new file mode 100644 index 000000000..005e98a81 --- /dev/null +++ b/client/src/assets/player/peertube-chunk-store.ts | |||
@@ -0,0 +1,217 @@ | |||
1 | // From https://github.com/MinEduTDF/idb-chunk-store | ||
2 | // We use temporary IndexDB (all data are removed on destroy) to avoid RAM issues | ||
3 | // Thanks @santiagogil and @Feross | ||
4 | |||
5 | import { EventEmitter } from 'events' | ||
6 | import Dexie from 'dexie' | ||
7 | |||
8 | class ChunkDatabase extends Dexie { | ||
9 | chunks: Dexie.Table<{ id: number, buf: Buffer }, number> | ||
10 | |||
11 | constructor (dbname: string) { | ||
12 | super(dbname) | ||
13 | |||
14 | this.version(1).stores({ | ||
15 | chunks: 'id' | ||
16 | }) | ||
17 | } | ||
18 | } | ||
19 | |||
20 | class ExpirationDatabase extends Dexie { | ||
21 | databases: Dexie.Table<{ name: string, expiration: number }, number> | ||
22 | |||
23 | constructor () { | ||
24 | super('webtorrent-expiration') | ||
25 | |||
26 | this.version(1).stores({ | ||
27 | databases: 'name,expiration' | ||
28 | }) | ||
29 | } | ||
30 | } | ||
31 | |||
32 | export class PeertubeChunkStore extends EventEmitter { | ||
33 | private static readonly BUFFERING_PUT_MS = 1000 | ||
34 | private static readonly CLEANER_INTERVAL_MS = 1000 * 60 // 1 minute | ||
35 | private static readonly CLEANER_EXPIRATION_MS = 1000 * 60 * 5 // 5 minutes | ||
36 | |||
37 | chunkLength: number | ||
38 | |||
39 | private pendingPut: { id: number, buf: Buffer, cb: Function }[] = [] | ||
40 | // If the store is full | ||
41 | private memoryChunks: { [ id: number ]: Buffer | true } = {} | ||
42 | private databaseName: string | ||
43 | private putBulkTimeout | ||
44 | private cleanerInterval | ||
45 | private db: ChunkDatabase | ||
46 | private expirationDB: ExpirationDatabase | ||
47 | private readonly length: number | ||
48 | private readonly lastChunkLength: number | ||
49 | private readonly lastChunkIndex: number | ||
50 | |||
51 | constructor (chunkLength: number, opts) { | ||
52 | super() | ||
53 | |||
54 | this.databaseName = 'webtorrent-chunks-' | ||
55 | |||
56 | if (!opts) opts = {} | ||
57 | if (opts.torrent && opts.torrent.infoHash) this.databaseName += opts.torrent.infoHash | ||
58 | else this.databaseName += '-default' | ||
59 | |||
60 | this.setMaxListeners(100) | ||
61 | |||
62 | this.chunkLength = Number(chunkLength) | ||
63 | if (!this.chunkLength) throw new Error('First argument must be a chunk length') | ||
64 | |||
65 | this.length = Number(opts.length) || Infinity | ||
66 | |||
67 | if (this.length !== Infinity) { | ||
68 | this.lastChunkLength = (this.length % this.chunkLength) || this.chunkLength | ||
69 | this.lastChunkIndex = Math.ceil(this.length / this.chunkLength) - 1 | ||
70 | } | ||
71 | |||
72 | this.db = new ChunkDatabase(this.databaseName) | ||
73 | // Track databases that expired | ||
74 | this.expirationDB = new ExpirationDatabase() | ||
75 | |||
76 | this.runCleaner() | ||
77 | } | ||
78 | |||
79 | put (index: number, buf: Buffer, cb: Function) { | ||
80 | const isLastChunk = (index === this.lastChunkIndex) | ||
81 | if (isLastChunk && buf.length !== this.lastChunkLength) { | ||
82 | return this.nextTick(cb, new Error('Last chunk length must be ' + this.lastChunkLength)) | ||
83 | } | ||
84 | if (!isLastChunk && buf.length !== this.chunkLength) { | ||
85 | return this.nextTick(cb, new Error('Chunk length must be ' + this.chunkLength)) | ||
86 | } | ||
87 | |||
88 | // Specify we have this chunk | ||
89 | this.memoryChunks[index] = true | ||
90 | |||
91 | // Add it to the pending put | ||
92 | this.pendingPut.push({ id: index, buf, cb }) | ||
93 | // If it's already planned, return | ||
94 | if (this.putBulkTimeout) return | ||
95 | |||
96 | // Plan a future bulk insert | ||
97 | this.putBulkTimeout = setTimeout(async () => { | ||
98 | const processing = this.pendingPut | ||
99 | this.pendingPut = [] | ||
100 | this.putBulkTimeout = undefined | ||
101 | |||
102 | try { | ||
103 | await this.db.transaction('rw', this.db.chunks, () => { | ||
104 | return this.db.chunks.bulkPut(processing.map(p => ({ id: p.id, buf: p.buf }))) | ||
105 | }) | ||
106 | } catch (err) { | ||
107 | console.log('Cannot bulk insert chunks. Store them in memory.', { err }) | ||
108 | |||
109 | processing.forEach(p => this.memoryChunks[ p.id ] = p.buf) | ||
110 | } finally { | ||
111 | processing.forEach(p => p.cb()) | ||
112 | } | ||
113 | }, PeertubeChunkStore.BUFFERING_PUT_MS) | ||
114 | } | ||
115 | |||
116 | get (index: number, opts, cb) { | ||
117 | if (typeof opts === 'function') return this.get(index, null, opts) | ||
118 | |||
119 | // IndexDB could be slow, use our memory index first | ||
120 | const memoryChunk = this.memoryChunks[index] | ||
121 | if (memoryChunk === undefined) return cb(null, new Buffer(0)) | ||
122 | // Chunk in memory | ||
123 | if (memoryChunk !== true) return cb(null, memoryChunk) | ||
124 | |||
125 | // Chunk in store | ||
126 | this.db.transaction('r', this.db.chunks, async () => { | ||
127 | const result = await this.db.chunks.get({ id: index }) | ||
128 | if (result === undefined) return cb(null, new Buffer(0)) | ||
129 | |||
130 | const buf = result.buf | ||
131 | if (!opts) return this.nextTick(cb, null, buf) | ||
132 | |||
133 | const offset = opts.offset || 0 | ||
134 | const len = opts.length || (buf.length - offset) | ||
135 | return cb(null, buf.slice(offset, len + offset)) | ||
136 | }) | ||
137 | .catch(err => { | ||
138 | console.error(err) | ||
139 | return cb(err) | ||
140 | }) | ||
141 | } | ||
142 | |||
143 | close (db) { | ||
144 | return this.destroy(db) | ||
145 | } | ||
146 | |||
147 | async destroy (cb) { | ||
148 | try { | ||
149 | if (this.pendingPut) { | ||
150 | clearTimeout(this.putBulkTimeout) | ||
151 | this.pendingPut = null | ||
152 | } | ||
153 | if (this.cleanerInterval) { | ||
154 | clearInterval(this.cleanerInterval) | ||
155 | this.cleanerInterval = null | ||
156 | } | ||
157 | |||
158 | if (this.expirationDB) { | ||
159 | await this.expirationDB.close() | ||
160 | this.expirationDB = null | ||
161 | } | ||
162 | |||
163 | if (this.db) { | ||
164 | console.log('Destroying IndexDB database %s.', this.databaseName) | ||
165 | await this.db.close() | ||
166 | await this.db.delete() | ||
167 | } | ||
168 | |||
169 | return cb() | ||
170 | } catch (err) { | ||
171 | console.error('Cannot destroy peertube chunk store.', err) | ||
172 | return cb(err) | ||
173 | } | ||
174 | } | ||
175 | |||
176 | private runCleaner () { | ||
177 | this.checkExpiration() | ||
178 | |||
179 | this.cleanerInterval = setInterval(async () => { | ||
180 | this.checkExpiration() | ||
181 | }, PeertubeChunkStore.CLEANER_INTERVAL_MS) | ||
182 | } | ||
183 | |||
184 | private checkExpiration () { | ||
185 | this.expirationDB.transaction('rw', this.expirationDB.databases, async () => { | ||
186 | try { | ||
187 | // Update our database expiration since we are alive | ||
188 | await this.expirationDB.databases.put({ | ||
189 | name: this.databaseName, | ||
190 | expiration: new Date().getTime() + PeertubeChunkStore.CLEANER_EXPIRATION_MS | ||
191 | }) | ||
192 | |||
193 | const now = new Date().getTime() | ||
194 | const databasesToDeleteInfo = await this.expirationDB.databases.where('expiration').below(now).toArray() | ||
195 | |||
196 | for (const databaseToDeleteInfo of databasesToDeleteInfo) { | ||
197 | await this.dropDatabase(databaseToDeleteInfo.name) | ||
198 | |||
199 | await this.expirationDB.databases.where({ name: databaseToDeleteInfo.name }).delete() | ||
200 | } | ||
201 | } catch (err) { | ||
202 | console.error('Cannot check expiration.', err) | ||
203 | } | ||
204 | }) | ||
205 | } | ||
206 | |||
207 | private dropDatabase (databaseName: string) { | ||
208 | const dbToDelete = new ChunkDatabase(databaseName) | ||
209 | |||
210 | console.log('Deleting %s.', databaseName) | ||
211 | return dbToDelete.delete() | ||
212 | } | ||
213 | |||
214 | private nextTick (cb, err, val?) { | ||
215 | process.nextTick(() => cb(err, val), undefined) | ||
216 | } | ||
217 | } | ||
diff --git a/client/src/assets/player/peertube-videojs-plugin.ts b/client/src/assets/player/peertube-videojs-plugin.ts index a4f99559c..ac04421a7 100644 --- a/client/src/assets/player/peertube-videojs-plugin.ts +++ b/client/src/assets/player/peertube-videojs-plugin.ts | |||
@@ -4,16 +4,11 @@ import { VideoFile } from '../../../../shared/models/videos/video.model' | |||
4 | import { renderVideo } from './video-renderer' | 4 | import { renderVideo } from './video-renderer' |
5 | import './settings-menu-button' | 5 | import './settings-menu-button' |
6 | import { PeertubePluginOptions, VideoJSComponentInterface, videojsUntyped } from './peertube-videojs-typings' | 6 | import { PeertubePluginOptions, VideoJSComponentInterface, videojsUntyped } from './peertube-videojs-typings' |
7 | import { | 7 | import { getAverageBandwidth, getStoredMute, getStoredVolume, saveAverageBandwidth, saveMuteInStore, saveVolumeInStore } from './utils' |
8 | getAverageBandwidth, | ||
9 | getStoredMute, | ||
10 | getStoredVolume, | ||
11 | saveAverageBandwidth, | ||
12 | saveMuteInStore, | ||
13 | saveVolumeInStore | ||
14 | } from './utils' | ||
15 | import minBy from 'lodash-es/minBy' | 8 | import minBy from 'lodash-es/minBy' |
16 | import maxBy from 'lodash-es/maxBy' | 9 | import maxBy from 'lodash-es/maxBy' |
10 | import * as CacheChunkStore from 'cache-chunk-store' | ||
11 | import { PeertubeChunkStore } from './peertube-chunk-store' | ||
17 | 12 | ||
18 | const webtorrent = new WebTorrent({ | 13 | const webtorrent = new WebTorrent({ |
19 | tracker: { | 14 | tracker: { |
@@ -169,7 +164,13 @@ class PeerTubePlugin extends Plugin { | |||
169 | console.log('Adding ' + magnetOrTorrentUrl + '.') | 164 | console.log('Adding ' + magnetOrTorrentUrl + '.') |
170 | 165 | ||
171 | const oldTorrent = this.torrent | 166 | const oldTorrent = this.torrent |
172 | this.torrent = webtorrent.add(magnetOrTorrentUrl, torrent => { | 167 | const options = { |
168 | store: (chunkLength, storeOpts) => new CacheChunkStore(new PeertubeChunkStore(chunkLength, storeOpts), { | ||
169 | max: 100 | ||
170 | }) | ||
171 | } | ||
172 | |||
173 | this.torrent = webtorrent.add(magnetOrTorrentUrl, options, torrent => { | ||
173 | console.log('Added ' + magnetOrTorrentUrl + '.') | 174 | console.log('Added ' + magnetOrTorrentUrl + '.') |
174 | 175 | ||
175 | // Pause the old torrent | 176 | // Pause the old torrent |