]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - client/src/assets/player/peertube-chunk-store.ts
Fix updating video tags to empty field
[github/Chocobozzz/PeerTube.git] / client / src / assets / player / peertube-chunk-store.ts
1 // From https://github.com/MinEduTDF/idb-chunk-store
2 // We use temporary IndexDB (all data are removed on destroy) to avoid RAM issues
3 // Thanks @santiagogil and @Feross
4
5 import { EventEmitter } from 'events'
6 import Dexie from 'dexie'
7
8 class ChunkDatabase extends Dexie {
9 chunks: Dexie.Table<{ id: number, buf: Buffer }, number>
10
11 constructor (dbname: string) {
12 super(dbname)
13
14 this.version(1).stores({
15 chunks: 'id'
16 })
17 }
18 }
19
20 class ExpirationDatabase extends Dexie {
21 databases: Dexie.Table<{ name: string, expiration: number }, number>
22
23 constructor () {
24 super('webtorrent-expiration')
25
26 this.version(1).stores({
27 databases: 'name,expiration'
28 })
29 }
30 }
31
32 export class PeertubeChunkStore extends EventEmitter {
33 private static readonly BUFFERING_PUT_MS = 1000
34 private static readonly CLEANER_INTERVAL_MS = 1000 * 60 // 1 minute
35 private static readonly CLEANER_EXPIRATION_MS = 1000 * 60 * 5 // 5 minutes
36
37 chunkLength: number
38
39 private pendingPut: { id: number, buf: Buffer, cb: Function }[] = []
40 // If the store is full
41 private memoryChunks: { [ id: number ]: Buffer | true } = {}
42 private databaseName: string
43 private putBulkTimeout
44 private cleanerInterval
45 private db: ChunkDatabase
46 private expirationDB: ExpirationDatabase
47 private readonly length: number
48 private readonly lastChunkLength: number
49 private readonly lastChunkIndex: number
50
51 constructor (chunkLength: number, opts) {
52 super()
53
54 this.databaseName = 'webtorrent-chunks-'
55
56 if (!opts) opts = {}
57 if (opts.torrent && opts.torrent.infoHash) this.databaseName += opts.torrent.infoHash
58 else this.databaseName += '-default'
59
60 this.setMaxListeners(100)
61
62 this.chunkLength = Number(chunkLength)
63 if (!this.chunkLength) throw new Error('First argument must be a chunk length')
64
65 this.length = Number(opts.length) || Infinity
66
67 if (this.length !== Infinity) {
68 this.lastChunkLength = (this.length % this.chunkLength) || this.chunkLength
69 this.lastChunkIndex = Math.ceil(this.length / this.chunkLength) - 1
70 }
71
72 this.db = new ChunkDatabase(this.databaseName)
73 // Track databases that expired
74 this.expirationDB = new ExpirationDatabase()
75
76 this.runCleaner()
77 }
78
79 put (index: number, buf: Buffer, cb: Function) {
80 const isLastChunk = (index === this.lastChunkIndex)
81 if (isLastChunk && buf.length !== this.lastChunkLength) {
82 return this.nextTick(cb, new Error('Last chunk length must be ' + this.lastChunkLength))
83 }
84 if (!isLastChunk && buf.length !== this.chunkLength) {
85 return this.nextTick(cb, new Error('Chunk length must be ' + this.chunkLength))
86 }
87
88 // Specify we have this chunk
89 this.memoryChunks[index] = true
90
91 // Add it to the pending put
92 this.pendingPut.push({ id: index, buf, cb })
93 // If it's already planned, return
94 if (this.putBulkTimeout) return
95
96 // Plan a future bulk insert
97 this.putBulkTimeout = setTimeout(async () => {
98 const processing = this.pendingPut
99 this.pendingPut = []
100 this.putBulkTimeout = undefined
101
102 try {
103 await this.db.transaction('rw', this.db.chunks, () => {
104 return this.db.chunks.bulkPut(processing.map(p => ({ id: p.id, buf: p.buf })))
105 })
106 } catch (err) {
107 console.log('Cannot bulk insert chunks. Store them in memory.', { err })
108
109 processing.forEach(p => this.memoryChunks[ p.id ] = p.buf)
110 } finally {
111 processing.forEach(p => p.cb())
112 }
113 }, PeertubeChunkStore.BUFFERING_PUT_MS)
114 }
115
116 get (index: number, opts, cb) {
117 if (typeof opts === 'function') return this.get(index, null, opts)
118
119 // IndexDB could be slow, use our memory index first
120 const memoryChunk = this.memoryChunks[index]
121 if (memoryChunk === undefined) return cb(null, new Buffer(0))
122 // Chunk in memory
123 if (memoryChunk !== true) return cb(null, memoryChunk)
124
125 // Chunk in store
126 this.db.transaction('r', this.db.chunks, async () => {
127 const result = await this.db.chunks.get({ id: index })
128 if (result === undefined) return cb(null, new Buffer(0))
129
130 const buf = result.buf
131 if (!opts) return this.nextTick(cb, null, buf)
132
133 const offset = opts.offset || 0
134 const len = opts.length || (buf.length - offset)
135 return cb(null, buf.slice(offset, len + offset))
136 })
137 .catch(err => {
138 console.error(err)
139 return cb(err)
140 })
141 }
142
143 close (db) {
144 return this.destroy(db)
145 }
146
147 async destroy (cb) {
148 try {
149 if (this.pendingPut) {
150 clearTimeout(this.putBulkTimeout)
151 this.pendingPut = null
152 }
153 if (this.cleanerInterval) {
154 clearInterval(this.cleanerInterval)
155 this.cleanerInterval = null
156 }
157
158 if (this.db) {
159 await this.db.close()
160
161 await this.dropDatabase(this.databaseName)
162 }
163
164 if (this.expirationDB) {
165 await this.expirationDB.close()
166 this.expirationDB = null
167 }
168
169 return cb()
170 } catch (err) {
171 console.error('Cannot destroy peertube chunk store.', err)
172 return cb(err)
173 }
174 }
175
176 private runCleaner () {
177 this.checkExpiration()
178
179 this.cleanerInterval = setInterval(async () => {
180 this.checkExpiration()
181 }, PeertubeChunkStore.CLEANER_INTERVAL_MS)
182 }
183
184 private async checkExpiration () {
185 let databasesToDeleteInfo: { name: string }[] = []
186
187 try {
188 await this.expirationDB.transaction('rw', this.expirationDB.databases, async () => {
189 // Update our database expiration since we are alive
190 await this.expirationDB.databases.put({
191 name: this.databaseName,
192 expiration: new Date().getTime() + PeertubeChunkStore.CLEANER_EXPIRATION_MS
193 })
194
195 const now = new Date().getTime()
196 databasesToDeleteInfo = await this.expirationDB.databases.where('expiration').below(now).toArray()
197 })
198 } catch (err) {
199 console.error('Cannot update expiration of fetch expired databases.', err)
200 }
201
202 for (const databaseToDeleteInfo of databasesToDeleteInfo) {
203 await this.dropDatabase(databaseToDeleteInfo.name)
204 }
205 }
206
207 private async dropDatabase (databaseName: string) {
208 const dbToDelete = new ChunkDatabase(databaseName)
209 console.log('Destroying IndexDB database %s.', databaseName)
210
211 try {
212 await dbToDelete.delete()
213
214 await this.expirationDB.transaction('rw', this.expirationDB.databases, () => {
215 return this.expirationDB.databases.where({ name: databaseName }).delete()
216 })
217 } catch (err) {
218 console.error('Cannot delete %s.', databaseName, err)
219 }
220 }
221
222 private nextTick (cb, err, val?) {
223 process.nextTick(() => cb(err, val), undefined)
224 }
225 }