29 changed files with 7027 additions and 13 deletions
-
3src/views/components/jsmpeg/index.js
-
7src/views/components/jsmpeg/modules/audio-output/index.js
-
157src/views/components/jsmpeg/modules/audio-output/webaudio.js
-
191src/views/components/jsmpeg/modules/buffer.js
-
114src/views/components/jsmpeg/modules/decoder/decoder.js
-
15src/views/components/jsmpeg/modules/decoder/index.js
-
134src/views/components/jsmpeg/modules/decoder/mp2-wasm.js
-
831src/views/components/jsmpeg/modules/decoder/mp2.js
-
155src/views/components/jsmpeg/modules/decoder/mpeg1-wasm.js
-
2846src/views/components/jsmpeg/modules/decoder/mpeg1.js
-
7src/views/components/jsmpeg/modules/demuxer/index.js
-
228src/views/components/jsmpeg/modules/demuxer/ts.js
-
94src/views/components/jsmpeg/modules/jsmpeg.js
-
728src/views/components/jsmpeg/modules/player.js
-
130src/views/components/jsmpeg/modules/renderer/canvas2d.js
-
9src/views/components/jsmpeg/modules/renderer/index.js
-
307src/views/components/jsmpeg/modules/renderer/webgl.js
-
133src/views/components/jsmpeg/modules/source/ajax-progressive.js
-
68src/views/components/jsmpeg/modules/source/ajax.js
-
80src/views/components/jsmpeg/modules/source/fetch.js
-
13src/views/components/jsmpeg/modules/source/index.js
-
237src/views/components/jsmpeg/modules/source/websocket.js
-
178src/views/components/jsmpeg/modules/video-element.js
-
159src/views/components/jsmpeg/modules/wasm-module.js
-
94src/views/components/jsmpeg/types/index.d.ts
-
70src/views/components/jsmpeg/utils/index.js
-
26src/views/storeManage/warehouse3D/index.vue
-
22src/views/storeManage/warehouse3D/module/video.vue
-
4src/views/storeManage/warehouse3D/readRoom/index.vue
@ -0,0 +1,3 @@ |
|||
import JSMpeg from './modules/jsmpeg' |
|||
|
|||
export default JSMpeg |
@ -0,0 +1,7 @@ |
|||
import WebAudioOut from './webaudio' |
|||
|
|||
const AudioOutput = { |
|||
WebAudio: WebAudioOut |
|||
} |
|||
|
|||
export default AudioOutput |
@ -0,0 +1,157 @@ |
|||
import { Now } from '../../utils' |
|||
|
|||
export default class WebAudioOut { |
|||
/** @type {AudioContext} */ |
|||
context |
|||
/** @type {GainNode} */ |
|||
gain |
|||
/** @type {GainNode} */ |
|||
destination |
|||
/** @type {number} */ |
|||
startTime |
|||
buffer |
|||
/** @type {number} */ |
|||
wallclockStartTime |
|||
/** @type {number} */ |
|||
volume |
|||
/** @type {boolean} */ |
|||
enabled |
|||
/** @type {boolean} */ |
|||
unlocked |
|||
constructor(options) { |
|||
this.context = WebAudioOut.CachedContext = |
|||
WebAudioOut.CachedContext || |
|||
new (window.AudioContext || window.webkitAudioContext)() |
|||
|
|||
this.gain = this.context.createGain() |
|||
this.destination = this.gain |
|||
|
|||
// Keep track of the number of connections to this AudioContext, so we
|
|||
// can safely close() it when we're the only one connected to it.
|
|||
this.gain.connect(this.context.destination) |
|||
this.context._connections = (this.context._connections || 0) + 1 |
|||
|
|||
this.startTime = 0 |
|||
this.buffer = null |
|||
this.wallclockStartTime = 0 |
|||
this.volume = 1 |
|||
this.enabled = true |
|||
|
|||
this.unlocked = !WebAudioOut.NeedsUnlocking() |
|||
|
|||
Object.defineProperty(this, 'enqueuedTime', { get: this.getEnqueuedTime }) |
|||
} |
|||
|
|||
destroy() { |
|||
this.gain.disconnect() |
|||
this.context._connections-- |
|||
|
|||
if (this.context._connections === 0) { |
|||
this.context.close() |
|||
WebAudioOut.CachedContext = null |
|||
} |
|||
} |
|||
|
|||
play(sampleRate, left, right) { |
|||
if (!this.enabled) { |
|||
return |
|||
} |
|||
|
|||
// If the context is not unlocked yet, we simply advance the start time
|
|||
// to "fake" actually playing audio. This will keep the video in sync.
|
|||
if (!this.unlocked) { |
|||
const ts = Now() |
|||
if (this.wallclockStartTime < ts) { |
|||
this.wallclockStartTime = ts |
|||
} |
|||
this.wallclockStartTime += left.length / sampleRate |
|||
return |
|||
} |
|||
|
|||
this.gain.gain.value = this.volume |
|||
|
|||
const buffer = this.context.createBuffer(2, left.length, sampleRate) |
|||
buffer.getChannelData(0).set(left) |
|||
buffer.getChannelData(1).set(right) |
|||
|
|||
const source = this.context.createBufferSource() |
|||
source.buffer = buffer |
|||
source.connect(this.destination) |
|||
|
|||
const now = this.context.currentTime |
|||
const duration = buffer.duration |
|||
if (this.startTime < now) { |
|||
this.startTime = now |
|||
this.wallclockStartTime = Now() |
|||
} |
|||
|
|||
source.start(this.startTime) |
|||
this.startTime += duration |
|||
this.wallclockStartTime += duration |
|||
} |
|||
|
|||
stop() { |
|||
// Meh; there seems to be no simple way to get a list of currently
|
|||
// active source nodes from the Audio Context, and maintaining this
|
|||
// list ourselfs would be a pain, so we just set the gain to 0
|
|||
// to cut off all enqueued audio instantly.
|
|||
this.gain.gain.value = 0 |
|||
} |
|||
|
|||
getEnqueuedTime() { |
|||
// The AudioContext.currentTime is only updated every so often, so if we
|
|||
// want to get exact timing, we need to rely on the system time.
|
|||
return Math.max(this.wallclockStartTime - Now(), 0) |
|||
} |
|||
|
|||
resetEnqueuedTime() { |
|||
this.startTime = this.context.currentTime |
|||
this.wallclockStartTime = Now() |
|||
} |
|||
|
|||
unlock(callback) { |
|||
if (this.unlocked) { |
|||
if (callback) { |
|||
callback() |
|||
} |
|||
return |
|||
} |
|||
|
|||
this.unlockCallback = callback |
|||
|
|||
// Create empty buffer and play it
|
|||
const buffer = this.context.createBuffer(1, 1, 22050) |
|||
const source = this.context.createBufferSource() |
|||
source.buffer = buffer |
|||
source.connect(this.destination) |
|||
source.start(0) |
|||
|
|||
setTimeout(this.checkIfUnlocked.bind(this, source, 0), 0) |
|||
} |
|||
|
|||
checkIfUnlocked(source, attempt) { |
|||
if ( |
|||
source.playbackState === source.PLAYING_STATE || |
|||
source.playbackState === source.FINISHED_STATE |
|||
) { |
|||
this.unlocked = true |
|||
if (this.unlockCallback) { |
|||
this.unlockCallback() |
|||
this.unlockCallback = null |
|||
} |
|||
} else if (attempt < 10) { |
|||
// Jeez, what a shit show. Thanks iOS!
|
|||
setTimeout(this.checkIfUnlocked.bind(this, source, attempt + 1), 100) |
|||
} |
|||
} |
|||
|
|||
static NeedsUnlocking() { |
|||
return /iPhone|iPad|iPod/i.test(navigator.userAgent) |
|||
} |
|||
|
|||
static IsSupported() { |
|||
return window.AudioContext || window.webkitAudioContext |
|||
} |
|||
|
|||
static CachedContext = null |
|||
} |
@ -0,0 +1,191 @@ |
|||
/* eslint-disable */ |
|||
'use strict' |
|||
export default class BitBuffer { |
|||
/** @type {Uint8Array} */ |
|||
bytes |
|||
/** @type {number} */ |
|||
byteLength |
|||
/** @type {1|2} */ |
|||
mode |
|||
/** @type {number} */ |
|||
index |
|||
constructor(bufferOrLength, mode = BitBuffer.MODE.EXPAND) { |
|||
if (typeof bufferOrLength === 'object') { |
|||
this.bytes = |
|||
bufferOrLength instanceof Uint8Array |
|||
? bufferOrLength |
|||
: new Uint8Array(bufferOrLength) |
|||
|
|||
this.byteLength = this.bytes.length |
|||
} else { |
|||
this.bytes = new Uint8Array(bufferOrLength || 1024 * 1024) |
|||
this.byteLength = 0 |
|||
} |
|||
|
|||
this.mode = mode |
|||
this.index = 0 |
|||
} |
|||
|
|||
resize(size) { |
|||
const newBytes = new Uint8Array(size) |
|||
if (this.byteLength !== 0) { |
|||
this.byteLength = Math.min(this.byteLength, size) |
|||
newBytes.set(this.bytes, 0, this.byteLength) |
|||
} |
|||
this.bytes = newBytes |
|||
this.index = Math.min(this.index, this.byteLength << 3) |
|||
} |
|||
|
|||
evict(sizeNeeded) { |
|||
const bytePos = this.index >> 3 |
|||
const available = this.bytes.length - this.byteLength |
|||
|
|||
// If the current index is the write position, we can simply reset both
|
|||
// to 0. Also reset (and throw away yet unread data) if we won't be able
|
|||
// to fit the new data in even after a normal eviction.
|
|||
if ( |
|||
this.index === this.byteLength << 3 || |
|||
sizeNeeded > available + bytePos // emergency evac
|
|||
) { |
|||
this.byteLength = 0 |
|||
this.index = 0 |
|||
return |
|||
} else if (bytePos === 0) { |
|||
// Nothing read yet - we can't evict anything
|
|||
return |
|||
} |
|||
|
|||
// Some browsers don't support copyWithin() yet - we may have to do
|
|||
// it manually using set and a subarray
|
|||
if (this.bytes.copyWithin) { |
|||
this.bytes.copyWithin(0, bytePos, this.byteLength) |
|||
} else { |
|||
this.bytes.set(this.bytes.subarray(bytePos, this.byteLength)) |
|||
} |
|||
|
|||
this.byteLength = this.byteLength - bytePos |
|||
this.index -= bytePos << 3 |
|||
return |
|||
} |
|||
|
|||
write(buffers) { |
|||
const isArrayOfBuffers = typeof buffers[0] === 'object' |
|||
let totalLength = 0 |
|||
const available = this.bytes.length - this.byteLength |
|||
|
|||
// Calculate total byte length
|
|||
if (isArrayOfBuffers) { |
|||
// let totalLength = 0
|
|||
for (let i = 0; i < buffers.length; i++) { |
|||
totalLength += buffers[i].byteLength |
|||
} |
|||
} else { |
|||
totalLength = buffers.byteLength |
|||
} |
|||
|
|||
// Do we need to resize or evict?
|
|||
if (totalLength > available) { |
|||
if (this.mode === BitBuffer.MODE.EXPAND) { |
|||
const newSize = Math.max(this.bytes.length * 2, totalLength - available) |
|||
this.resize(newSize) |
|||
} else { |
|||
this.evict(totalLength) |
|||
} |
|||
} |
|||
|
|||
if (isArrayOfBuffers) { |
|||
for (let i = 0; i < buffers.length; i++) { |
|||
this.appendSingleBuffer(buffers[i]) |
|||
} |
|||
} else { |
|||
this.appendSingleBuffer(buffers) |
|||
} |
|||
|
|||
return totalLength |
|||
} |
|||
|
|||
appendSingleBuffer(buffer) { |
|||
buffer = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer) |
|||
|
|||
this.bytes.set(buffer, this.byteLength) |
|||
this.byteLength += buffer.length |
|||
} |
|||
|
|||
findNextStartCode() { |
|||
for (let i = (this.index + 7) >> 3; i < this.byteLength; i++) { |
|||
if ( |
|||
this.bytes[i] === 0x00 && |
|||
this.bytes[i + 1] === 0x00 && |
|||
this.bytes[i + 2] === 0x01 |
|||
) { |
|||
this.index = (i + 4) << 3 |
|||
return this.bytes[i + 3] |
|||
} |
|||
} |
|||
this.index = this.byteLength << 3 |
|||
return -1 |
|||
} |
|||
|
|||
findStartCode(code) { |
|||
let current = 0 |
|||
while (true) { |
|||
current = this.findNextStartCode() |
|||
if (current === code || current === -1) { |
|||
return current |
|||
} |
|||
} |
|||
return -1 |
|||
} |
|||
|
|||
nextBytesAreStartCode() { |
|||
const i = (this.index + 7) >> 3 |
|||
return ( |
|||
i >= this.byteLength || |
|||
(this.bytes[i] === 0x00 && |
|||
this.bytes[i + 1] === 0x00 && |
|||
this.bytes[i + 2] === 0x01) |
|||
) |
|||
} |
|||
|
|||
peek(count) { |
|||
let offset = this.index |
|||
let value = 0 |
|||
while (count) { |
|||
const currentByte = this.bytes[offset >> 3] |
|||
const remaining = 8 - (offset & 7) // remaining bits in byte
|
|||
const read = remaining < count ? remaining : count // bits in this run
|
|||
const shift = remaining - read |
|||
const mask = 0xff >> (8 - read) |
|||
|
|||
value = (value << read) | ((currentByte & (mask << shift)) >> shift) |
|||
|
|||
offset += read |
|||
count -= read |
|||
} |
|||
|
|||
return value |
|||
} |
|||
|
|||
read(count) { |
|||
const value = this.peek(count) |
|||
this.index += count |
|||
return value |
|||
} |
|||
|
|||
skip(count) { |
|||
return (this.index += count) |
|||
} |
|||
|
|||
rewind(count) { |
|||
this.index = Math.max(this.index - count, 0) |
|||
} |
|||
|
|||
has(count) { |
|||
return (this.byteLength << 3) - this.index >= count |
|||
} |
|||
|
|||
static MODE = { |
|||
EVICT: 1, |
|||
EXPAND: 2 |
|||
} |
|||
} |
@ -0,0 +1,114 @@ |
|||
/* eslint-disable */ |
|||
import WebAudioOut from '../audio-output/webaudio' |
|||
import CanvasRenderer from '../renderer/canvas2d' |
|||
import WebGLRenderer from '../renderer/webgl' |
|||
|
|||
export default class BaseDecoder { |
|||
/** |
|||
* @type {WebGLRenderer|CanvasRenderer|WebAudioOut} |
|||
*/ |
|||
destination |
|||
constructor(options) { |
|||
this.destination = null |
|||
this.canPlay = false |
|||
|
|||
this.collectTimestamps = !options.streaming |
|||
this.bytesWritten = 0 |
|||
this.timestamps = [] |
|||
this.timestampIndex = 0 |
|||
|
|||
this.startTime = 0 |
|||
this.decodedTime = 0 |
|||
|
|||
Object.defineProperty(this, 'currentTime', { get: this.getCurrentTime }) |
|||
} |
|||
|
|||
destroy() {} |
|||
|
|||
connect(destination) { |
|||
this.destination = destination |
|||
} |
|||
|
|||
bufferGetIndex() { |
|||
return this.bits.index |
|||
} |
|||
|
|||
bufferSetIndex(index) { |
|||
this.bits.index = index |
|||
} |
|||
|
|||
bufferWrite(buffers) { |
|||
return this.bits.write(buffers) |
|||
} |
|||
|
|||
write(pts, buffers) { |
|||
if (this.collectTimestamps) { |
|||
if (this.timestamps.length === 0) { |
|||
this.startTime = pts |
|||
this.decodedTime = pts |
|||
} |
|||
this.timestamps.push({ index: this.bytesWritten << 3, time: pts }) |
|||
} |
|||
|
|||
this.bytesWritten += this.bufferWrite(buffers) |
|||
this.canPlay = true |
|||
} |
|||
|
|||
seek(time) { |
|||
if (!this.collectTimestamps) { |
|||
return |
|||
} |
|||
|
|||
this.timestampIndex = 0 |
|||
for (let i = 0; i < this.timestamps.length; i++) { |
|||
if (this.timestamps[i].time > time) { |
|||
break |
|||
} |
|||
this.timestampIndex = i |
|||
} |
|||
|
|||
const ts = this.timestamps[this.timestampIndex] |
|||
if (ts) { |
|||
this.bufferSetIndex(ts.index) |
|||
this.decodedTime = ts.time |
|||
} else { |
|||
this.bufferSetIndex(0) |
|||
this.decodedTime = this.startTime |
|||
} |
|||
} |
|||
|
|||
decode() { |
|||
this.advanceDecodedTime(0) |
|||
} |
|||
|
|||
advanceDecodedTime(seconds) { |
|||
if (this.collectTimestamps) { |
|||
let newTimestampIndex = -1 |
|||
const currentIndex = this.bufferGetIndex() |
|||
for (let i = this.timestampIndex; i < this.timestamps.length; i++) { |
|||
if (this.timestamps[i].index > currentIndex) { |
|||
break |
|||
} |
|||
newTimestampIndex = i |
|||
} |
|||
|
|||
// Did we find a new PTS, different from the last? If so, we don't have
|
|||
// to advance the decoded time manually and can instead sync it exactly
|
|||
// to the PTS.
|
|||
if ( |
|||
newTimestampIndex !== -1 && |
|||
newTimestampIndex !== this.timestampIndex |
|||
) { |
|||
this.timestampIndex = newTimestampIndex |
|||
this.decodedTime = this.timestamps[this.timestampIndex].time |
|||
return |
|||
} |
|||
} |
|||
|
|||
this.decodedTime += seconds |
|||
} |
|||
|
|||
getCurrentTime() { |
|||
return this.decodedTime |
|||
} |
|||
} |
@ -0,0 +1,15 @@ |
|||
import BaseDecoder from './decoder' |
|||
import MPEG1 from './mpeg1' |
|||
import MPEG1WASM from './mpeg1-wasm' |
|||
import MP2 from './mp2' |
|||
import MP2WASM from './mp2-wasm' |
|||
|
|||
const Decoder = { |
|||
Base: BaseDecoder, |
|||
MPEG1Video: MPEG1, |
|||
MPEG1VideoWASM: MPEG1WASM, |
|||
MP2Audio: MP2, |
|||
MP2AudioWASM: MP2WASM |
|||
} |
|||
|
|||
export default Decoder |
@ -0,0 +1,134 @@ |
|||
import { Now } from '../../utils' |
|||
import BitBuffer from '../buffer' |
|||
import BaseDecoder from './decoder' |
|||
|
|||
export default class MP2WASM extends BaseDecoder { |
|||
constructor(options) { |
|||
super(options) |
|||
|
|||
this.onDecodeCallback = options.onAudioDecode |
|||
this.module = options.wasmModule |
|||
|
|||
this.bufferSize = options.audioBufferSize || 128 * 1024 |
|||
this.bufferMode = options.streaming |
|||
? BitBuffer.MODE.EVICT |
|||
: BitBuffer.MODE.EXPAND |
|||
|
|||
this.sampleRate = 0 |
|||
} |
|||
|
|||
initializeWasmDecoder() { |
|||
if (!this.module.instance) { |
|||
console.warn('JSMpeg: WASM module not compiled yet') |
|||
return |
|||
} |
|||
this.instance = this.module.instance |
|||
this.functions = this.module.instance.exports |
|||
this.decoder = this.functions._mp2_decoder_create( |
|||
this.bufferSize, |
|||
this.bufferMode |
|||
) |
|||
} |
|||
|
|||
destroy() { |
|||
if (!this.decoder) { |
|||
return |
|||
} |
|||
this.functions._mp2_decoder_destroy(this.decoder) |
|||
} |
|||
|
|||
bufferGetIndex() { |
|||
if (!this.decoder) { |
|||
return |
|||
} |
|||
return this.functions._mp2_decoder_get_index(this.decoder) |
|||
} |
|||
|
|||
bufferSetIndex(index) { |
|||
if (!this.decoder) { |
|||
return |
|||
} |
|||
this.functions._mp2_decoder_set_index(this.decoder, index) |
|||
} |
|||
|
|||
bufferWrite(buffers) { |
|||
if (!this.decoder) { |
|||
this.initializeWasmDecoder() |
|||
} |
|||
|
|||
let totalLength = 0 |
|||
for (let i = 0; i < buffers.length; i++) { |
|||
totalLength += buffers[i].length |
|||
} |
|||
|
|||
let ptr = this.functions._mp2_decoder_get_write_ptr( |
|||
this.decoder, |
|||
totalLength |
|||
) |
|||
for (let i = 0; i < buffers.length; i++) { |
|||
this.instance.heapU8.set(buffers[i], ptr) |
|||
ptr += buffers[i].length |
|||
} |
|||
|
|||
this.functions._mp2_decoder_did_write(this.decoder, totalLength) |
|||
return totalLength |
|||
} |
|||
|
|||
decode() { |
|||
const startTime = Now() |
|||
|
|||
if (!this.decoder) { |
|||
return false |
|||
} |
|||
|
|||
const decodedBytes = this.functions._mp2_decoder_decode(this.decoder) |
|||
if (decodedBytes === 0) { |
|||
return false |
|||
} |
|||
|
|||
if (!this.sampleRate) { |
|||
this.sampleRate = this.functions._mp2_decoder_get_sample_rate( |
|||
this.decoder |
|||
) |
|||
} |
|||
|
|||
if (this.destination) { |
|||
// Create a Float32 View into the modules output channel data
|
|||
const leftPtr = this.functions._mp2_decoder_get_left_channel_ptr( |
|||
this.decoder |
|||
) |
|||
const rightPtr = this.functions._mp2_decoder_get_right_channel_ptr( |
|||
this.decoder |
|||
) |
|||
|
|||
const leftOffset = leftPtr / Float32Array.BYTES_PER_ELEMENT |
|||
const rightOffset = rightPtr / Float32Array.BYTES_PER_ELEMENT |
|||
|
|||
const left = this.instance.heapF32.subarray( |
|||
leftOffset, |
|||
leftOffset + MP2WASM.SAMPLES_PER_FRAME |
|||
) |
|||
const right = this.instance.heapF32.subarray( |
|||
rightOffset, |
|||
rightOffset + MP2WASM.SAMPLES_PER_FRAME |
|||
) |
|||
|
|||
this.destination.play(this.sampleRate, left, right) |
|||
} |
|||
|
|||
this.advanceDecodedTime(MP2WASM.SAMPLES_PER_FRAME / this.sampleRate) |
|||
|
|||
const elapsedTime = Now() - startTime |
|||
if (this.onDecodeCallback) { |
|||
this.onDecodeCallback(this, elapsedTime) |
|||
} |
|||
return true |
|||
} |
|||
|
|||
getCurrentTime() { |
|||
const enqueuedTime = this.destination ? this.destination.enqueuedTime : 0 |
|||
return this.decodedTime - enqueuedTime |
|||
} |
|||
|
|||
static SAMPLES_PER_FRAME = 1152 |
|||
} |
@ -0,0 +1,831 @@ |
|||
'use strict' |
|||
|
|||
import { Fill, Now } from '../../utils' |
|||
import BitBuffer from '../buffer' |
|||
import BaseDecoder from './decoder' |
|||
|
|||
/** |
|||
* Based on kjmp2 by Martin J. Fiedler |
|||
* http://keyj.emphy.de/kjmp2/
|
|||
*/ |
|||
export default class MP2 extends BaseDecoder { |
|||
constructor(options) { |
|||
super(options) |
|||
|
|||
this.onDecodeCallback = options.onAudioDecode |
|||
|
|||
const bufferSize = options.audioBufferSize || 128 * 1024 |
|||
const bufferMode = options.streaming |
|||
? BitBuffer.MODE.EVICT |
|||
: BitBuffer.MODE.EXPAND |
|||
|
|||
this.bits = new BitBuffer(bufferSize, bufferMode) |
|||
|
|||
this.left = new Float32Array(1152) |
|||
this.right = new Float32Array(1152) |
|||
this.sampleRate = 44100 |
|||
|
|||
this.D = new Float32Array(1024) |
|||
this.D.set(MP2.SYNTHESIS_WINDOW, 0) |
|||
this.D.set(MP2.SYNTHESIS_WINDOW, 512) |
|||
this.V = [new Float32Array(1024), new Float32Array(1024)] |
|||
this.U = new Int32Array(32) |
|||
this.VPos = 0 |
|||
|
|||
this.allocation = [new Array(32), new Array(32)] |
|||
this.scaleFactorInfo = [new Uint8Array(32), new Uint8Array(32)] |
|||
this.scaleFactor = [new Array(32), new Array(32)] |
|||
this.sample = [new Array(32), new Array(32)] |
|||
|
|||
for (let j = 0; j < 2; j++) { |
|||
for (let i = 0; i < 32; i++) { |
|||
this.scaleFactor[j][i] = [0, 0, 0] |
|||
this.sample[j][i] = [0, 0, 0] |
|||
} |
|||
} |
|||
} |
|||
|
|||
decode() { |
|||
const startTime = Now() |
|||
|
|||
const pos = this.bits.index >> 3 |
|||
if (pos >= this.bits.byteLength) { |
|||
return false |
|||
} |
|||
|
|||
const decoded = this.decodeFrame(this.left, this.right) |
|||
this.bits.index = (pos + decoded) << 3 |
|||
if (!decoded) { |
|||
return false |
|||
} |
|||
|
|||
if (this.destination) { |
|||
this.destination.play(this.sampleRate, this.left, this.right) |
|||
} |
|||
|
|||
this.advanceDecodedTime(this.left.length / this.sampleRate) |
|||
|
|||
const elapsedTime = Now() - startTime |
|||
if (this.onDecodeCallback) { |
|||
this.onDecodeCallback(this, elapsedTime) |
|||
} |
|||
return true |
|||
} |
|||
|
|||
getCurrentTime() { |
|||
const enqueuedTime = this.destination ? this.destination.enqueuedTime : 0 |
|||
return this.decodedTime - enqueuedTime |
|||
} |
|||
|
|||
decodeFrame(left, right) { |
|||
// Check for valid header: syncword OK, MPEG-Audio Layer 2
|
|||
const sync = this.bits.read(11) |
|||
const version = this.bits.read(2) |
|||
const layer = this.bits.read(2) |
|||
const hasCRC = !this.bits.read(1) |
|||
|
|||
if ( |
|||
sync !== MP2.FRAME_SYNC || |
|||
version !== MP2.VERSION.MPEG_1 || |
|||
layer !== MP2.LAYER.II |
|||
) { |
|||
return 0 // Invalid header or unsupported version
|
|||
} |
|||
|
|||
let bitrateIndex = this.bits.read(4) - 1 |
|||
if (bitrateIndex > 13) { |
|||
return 0 // Invalid bit rate or 'free format'
|
|||
} |
|||
|
|||
let sampleRateIndex = this.bits.read(2) |
|||
let sampleRate = MP2.SAMPLE_RATE[sampleRateIndex] |
|||
if (sampleRateIndex === 3) { |
|||
return 0 // Invalid sample rate
|
|||
} |
|||
if (version === MP2.VERSION.MPEG_2) { |
|||
sampleRateIndex += 4 |
|||
bitrateIndex += 14 |
|||
} |
|||
const padding = this.bits.read(1) |
|||
// const privat = this.bits.read(1)
|
|||
const mode = this.bits.read(2) |
|||
|
|||
// Parse the mode_extension, set up the stereo bound
|
|||
let bound = 0 |
|||
if (mode === MP2.MODE.JOINT_STEREO) { |
|||
bound = (this.bits.read(2) + 1) << 2 |
|||
} else { |
|||
this.bits.skip(2) |
|||
bound = mode === MP2.MODE.MONO ? 0 : 32 |
|||
} |
|||
|
|||
// Discard the last 4 bits of the header and the CRC value, if present
|
|||
this.bits.skip(4) |
|||
if (hasCRC) { |
|||
this.bits.skip(16) |
|||
} |
|||
|
|||
// Compute the frame size
|
|||
sampleRate = MP2.SAMPLE_RATE[sampleRateIndex] |
|||
const bitrate = MP2.BIT_RATE[bitrateIndex] |
|||
const frameSize = ((144000 * bitrate) / sampleRate + padding) | 0 |
|||
|
|||
// Prepare the quantizer table lookups
|
|||
let tab3 = 0 |
|||
let sblimit = 0 |
|||
if (version === MP2.VERSION.MPEG_2) { |
|||
// MPEG-2 (LSR)
|
|||
tab3 = 2 |
|||
sblimit = 30 |
|||
} else { |
|||
// MPEG-1
|
|||
const tab1 = mode === MP2.MODE.MONO ? 0 : 1 |
|||
const tab2 = MP2.QUANT_LUT_STEP_1[tab1][bitrateIndex] |
|||
tab3 = MP2.QUANT_LUT_STEP_2[tab2][sampleRateIndex] |
|||
sblimit = tab3 & 63 |
|||
tab3 >>= 6 |
|||
} |
|||
|
|||
if (bound > sblimit) { |
|||
bound = sblimit |
|||
} |
|||
|
|||
// Read the allocation information
|
|||
for (let sb = 0; sb < bound; sb++) { |
|||
this.allocation[0][sb] = this.readAllocation(sb, tab3) |
|||
this.allocation[1][sb] = this.readAllocation(sb, tab3) |
|||
} |
|||
|
|||
for (let sb = bound; sb < sblimit; sb++) { |
|||
this.allocation[0][sb] = this.allocation[1][sb] = this.readAllocation( |
|||
sb, |
|||
tab3 |
|||
) |
|||
} |
|||
|
|||
// Read scale factor selector information
|
|||
const channels = mode === MP2.MODE.MONO ? 1 : 2 |
|||
for (let sb = 0; sb < sblimit; sb++) { |
|||
for (let ch = 0; ch < channels; ch++) { |
|||
if (this.allocation[ch][sb]) { |
|||
this.scaleFactorInfo[ch][sb] = this.bits.read(2) |
|||
} |
|||
} |
|||
if (mode === MP2.MODE.MONO) { |
|||
this.scaleFactorInfo[1][sb] = this.scaleFactorInfo[0][sb] |
|||
} |
|||
} |
|||
|
|||
// Read scale factors
|
|||
for (let sb = 0; sb < sblimit; sb++) { |
|||
for (let ch = 0; ch < channels; ch++) { |
|||
if (this.allocation[ch][sb]) { |
|||
const sf = this.scaleFactor[ch][sb] |
|||
switch (this.scaleFactorInfo[ch][sb]) { |
|||
case 0: |
|||
sf[0] = this.bits.read(6) |
|||
sf[1] = this.bits.read(6) |
|||
sf[2] = this.bits.read(6) |
|||
break |
|||
case 1: |
|||
sf[0] = sf[1] = this.bits.read(6) |
|||
sf[2] = this.bits.read(6) |
|||
break |
|||
case 2: |
|||
sf[0] = sf[1] = sf[2] = this.bits.read(6) |
|||
break |
|||
case 3: |
|||
sf[0] = this.bits.read(6) |
|||
sf[1] = sf[2] = this.bits.read(6) |
|||
break |
|||
} |
|||
} |
|||
} |
|||
if (mode === MP2.MODE.MONO) { |
|||
this.scaleFactor[1][sb][0] = this.scaleFactor[0][sb][0] |
|||
this.scaleFactor[1][sb][1] = this.scaleFactor[0][sb][1] |
|||
this.scaleFactor[1][sb][2] = this.scaleFactor[0][sb][2] |
|||
} |
|||
} |
|||
|
|||
// Coefficient input and reconstruction
|
|||
let outPos = 0 |
|||
for (let part = 0; part < 3; part++) { |
|||
for (let granule = 0; granule < 4; granule++) { |
|||
// Read the samples
|
|||
for (let sb = 0; sb < bound; sb++) { |
|||
this.readSamples(0, sb, part) |
|||
this.readSamples(1, sb, part) |
|||
} |
|||
for (let sb = bound; sb < sblimit; sb++) { |
|||
this.readSamples(0, sb, part) |
|||
this.sample[1][sb][0] = this.sample[0][sb][0] |
|||
this.sample[1][sb][1] = this.sample[0][sb][1] |
|||
this.sample[1][sb][2] = this.sample[0][sb][2] |
|||
} |
|||
for (let sb = sblimit; sb < 32; sb++) { |
|||
this.sample[0][sb][0] = 0 |
|||
this.sample[0][sb][1] = 0 |
|||
this.sample[0][sb][2] = 0 |
|||
this.sample[1][sb][0] = 0 |
|||
this.sample[1][sb][1] = 0 |
|||
this.sample[1][sb][2] = 0 |
|||
} |
|||
|
|||
// Synthesis loop
|
|||
for (let p = 0; p < 3; p++) { |
|||
// Shifting step
|
|||
this.VPos = (this.VPos - 64) & 1023 |
|||
|
|||
for (let ch = 0; ch < 2; ch++) { |
|||
MP2.MatrixTransform(this.sample[ch], p, this.V[ch], this.VPos) |
|||
|
|||
// Build U, windowing, calculate output
|
|||
Fill(this.U, 0) |
|||
|
|||
let dIndex = 512 - (this.VPos >> 1) |
|||
let vIndex = this.VPos % 128 >> 1 |
|||
while (vIndex < 1024) { |
|||
for (let i = 0; i < 32; ++i) { |
|||
this.U[i] += this.D[dIndex++] * this.V[ch][vIndex++] |
|||
} |
|||
|
|||
vIndex += 128 - 32 |
|||
dIndex += 64 - 32 |
|||
} |
|||
|
|||
vIndex = 128 - 32 + 1024 - vIndex |
|||
dIndex -= 512 - 32 |
|||
while (vIndex < 1024) { |
|||
for (let i = 0; i < 32; ++i) { |
|||
this.U[i] += this.D[dIndex++] * this.V[ch][vIndex++] |
|||
} |
|||
|
|||
vIndex += 128 - 32 |
|||
dIndex += 64 - 32 |
|||
} |
|||
|
|||
// Output samples
|
|||
const outChannel = ch === 0 ? left : right |
|||
for (let j = 0; j < 32; j++) { |
|||
outChannel[outPos + j] = this.U[j] / 2147418112 |
|||
} |
|||
} // End of synthesis channel loop
|
|||
outPos += 32 |
|||
} // End of synthesis sub-block loop
|
|||
} // Decoding of the granule finished
|
|||
} |
|||
|
|||
this.sampleRate = sampleRate |
|||
return frameSize |
|||
} |
|||
|
|||
readAllocation(sb, tab3) { |
|||
const tab4 = MP2.QUANT_LUT_STEP_3[tab3][sb] |
|||
const qtab = MP2.QUANT_LUT_STEP4[tab4 & 15][this.bits.read(tab4 >> 4)] |
|||
return qtab ? MP2.QUANT_TAB[qtab - 1] : 0 |
|||
} |
|||
|
|||
readSamples(ch, sb, part) { |
|||
const q = this.allocation[ch][sb] |
|||
let sf = this.scaleFactor[ch][sb][part] |
|||
const sample = this.sample[ch][sb] |
|||
let val = 0 |
|||
|
|||
if (!q) { |
|||
// No bits allocated for this subband
|
|||
sample[0] = sample[1] = sample[2] = 0 |
|||
return |
|||
} |
|||
|
|||
// Resolve scalefactor
|
|||
if (sf === 63) { |
|||
sf = 0 |
|||
} else { |
|||
const shift = (sf / 3) | 0 |
|||
sf = (MP2.SCALEFACTOR_BASE[sf % 3] + ((1 << shift) >> 1)) >> shift |
|||
} |
|||
|
|||
// Decode samples
|
|||
let adj = q.levels |
|||
if (q.group) { |
|||
// Decode grouped samples
|
|||
val = this.bits.read(q.bits) |
|||
sample[0] = val % adj |
|||
val = (val / adj) | 0 |
|||
sample[1] = val % adj |
|||
sample[2] = (val / adj) | 0 |
|||
} else { |
|||
// Decode direct samples
|
|||
sample[0] = this.bits.read(q.bits) |
|||
sample[1] = this.bits.read(q.bits) |
|||
sample[2] = this.bits.read(q.bits) |
|||
} |
|||
|
|||
// Postmultiply samples
|
|||
const scale = (65536 / (adj + 1)) | 0 |
|||
adj = ((adj + 1) >> 1) - 1 |
|||
|
|||
val = (adj - sample[0]) * scale |
|||
sample[0] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12 |
|||
|
|||
val = (adj - sample[1]) * scale |
|||
sample[1] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12 |
|||
|
|||
val = (adj - sample[2]) * scale |
|||
sample[2] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12 |
|||
} |
|||
|
|||
static MatrixTransform(s, ss, d, dp) { |
|||
let t01, |
|||
t02, |
|||
t03, |
|||
t04, |
|||
t05, |
|||
t06, |
|||
t07, |
|||
t08, |
|||
t09, |
|||
t10, |
|||
t11, |
|||
t12, |
|||
t13, |
|||
t14, |
|||
t15, |
|||
t16, |
|||
t17, |
|||
t18, |
|||
t19, |
|||
t20, |
|||
t21, |
|||
t22, |
|||
t23, |
|||
t24, |
|||
t25, |
|||
t26, |
|||
t27, |
|||
t28, |
|||
t29, |
|||
t30, |
|||
t31, |
|||
t32, |
|||
t33 |
|||
|
|||
t01 = s[0][ss] + s[31][ss] |
|||
t02 = (s[0][ss] - s[31][ss]) * 0.500602998235 |
|||
t03 = s[1][ss] + s[30][ss] |
|||
t04 = (s[1][ss] - s[30][ss]) * 0.505470959898 |
|||
t05 = s[2][ss] + s[29][ss] |
|||
t06 = (s[2][ss] - s[29][ss]) * 0.515447309923 |
|||
t07 = s[3][ss] + s[28][ss] |
|||
t08 = (s[3][ss] - s[28][ss]) * 0.53104259109 |
|||
t09 = s[4][ss] + s[27][ss] |
|||
t10 = (s[4][ss] - s[27][ss]) * 0.553103896034 |
|||
t11 = s[5][ss] + s[26][ss] |
|||
t12 = (s[5][ss] - s[26][ss]) * 0.582934968206 |
|||
t13 = s[6][ss] + s[25][ss] |
|||
t14 = (s[6][ss] - s[25][ss]) * 0.622504123036 |
|||
t15 = s[7][ss] + s[24][ss] |
|||
t16 = (s[7][ss] - s[24][ss]) * 0.674808341455 |
|||
t17 = s[8][ss] + s[23][ss] |
|||
t18 = (s[8][ss] - s[23][ss]) * 0.744536271002 |
|||
t19 = s[9][ss] + s[22][ss] |
|||
t20 = (s[9][ss] - s[22][ss]) * 0.839349645416 |
|||
t21 = s[10][ss] + s[21][ss] |
|||
t22 = (s[10][ss] - s[21][ss]) * 0.972568237862 |
|||
t23 = s[11][ss] + s[20][ss] |
|||
t24 = (s[11][ss] - s[20][ss]) * 1.16943993343 |
|||
t25 = s[12][ss] + s[19][ss] |
|||
t26 = (s[12][ss] - s[19][ss]) * 1.48416461631 |
|||
t27 = s[13][ss] + s[18][ss] |
|||
t28 = (s[13][ss] - s[18][ss]) * 2.05778100995 |
|||
t29 = s[14][ss] + s[17][ss] |
|||
t30 = (s[14][ss] - s[17][ss]) * 3.40760841847 |
|||
t31 = s[15][ss] + s[16][ss] |
|||
t32 = (s[15][ss] - s[16][ss]) * 10.1900081235 |
|||
|
|||
t33 = t01 + t31 |
|||
t31 = (t01 - t31) * 0.502419286188 |
|||
t01 = t03 + t29 |
|||
t29 = (t03 - t29) * 0.52249861494 |
|||
t03 = t05 + t27 |
|||
t27 = (t05 - t27) * 0.566944034816 |
|||
t05 = t07 + t25 |
|||
t25 = (t07 - t25) * 0.64682178336 |
|||
t07 = t09 + t23 |
|||
t23 = (t09 - t23) * 0.788154623451 |
|||
t09 = t11 + t21 |
|||
t21 = (t11 - t21) * 1.06067768599 |
|||
t11 = t13 + t19 |
|||
t19 = (t13 - t19) * 1.72244709824 |
|||
t13 = t15 + t17 |
|||
t17 = (t15 - t17) * 5.10114861869 |
|||
t15 = t33 + t13 |
|||
t13 = (t33 - t13) * 0.509795579104 |
|||
t33 = t01 + t11 |
|||
t01 = (t01 - t11) * 0.601344886935 |
|||
t11 = t03 + t09 |
|||
t09 = (t03 - t09) * 0.899976223136 |
|||
t03 = t05 + t07 |
|||
t07 = (t05 - t07) * 2.56291544774 |
|||
t05 = t15 + t03 |
|||
t15 = (t15 - t03) * 0.541196100146 |
|||
t03 = t33 + t11 |
|||
t11 = (t33 - t11) * 1.30656296488 |
|||
t33 = t05 + t03 |
|||
t05 = (t05 - t03) * 0.707106781187 |
|||
t03 = t15 + t11 |
|||
t15 = (t15 - t11) * 0.707106781187 |
|||
t03 += t15 |
|||
t11 = t13 + t07 |
|||
t13 = (t13 - t07) * 0.541196100146 |
|||
t07 = t01 + t09 |
|||
t09 = (t01 - t09) * 1.30656296488 |
|||
t01 = t11 + t07 |
|||
t07 = (t11 - t07) * 0.707106781187 |
|||
t11 = t13 + t09 |
|||
t13 = (t13 - t09) * 0.707106781187 |
|||
t11 += t13 |
|||
t01 += t11 |
|||
t11 += t07 |
|||
t07 += t13 |
|||
t09 = t31 + t17 |
|||
t31 = (t31 - t17) * 0.509795579104 |
|||
t17 = t29 + t19 |
|||
t29 = (t29 - t19) * 0.601344886935 |
|||
t19 = t27 + t21 |
|||
t21 = (t27 - t21) * 0.899976223136 |
|||
t27 = t25 + t23 |
|||
t23 = (t25 - t23) * 2.56291544774 |
|||
t25 = t09 + t27 |
|||
t09 = (t09 - t27) * 0.541196100146 |
|||
t27 = t17 + t19 |
|||
t19 = (t17 - t19) * 1.30656296488 |
|||
t17 = t25 + t27 |
|||
t27 = (t25 - t27) * 0.707106781187 |
|||
t25 = t09 + t19 |
|||
t19 = (t09 - t19) * 0.707106781187 |
|||
t25 += t19 |
|||
t09 = t31 + t23 |
|||
t31 = (t31 - t23) * 0.541196100146 |
|||
t23 = t29 + t21 |
|||
t21 = (t29 - t21) * 1.30656296488 |
|||
t29 = t09 + t23 |
|||
t23 = (t09 - t23) * 0.707106781187 |
|||
t09 = t31 + t21 |
|||
t31 = (t31 - t21) * 0.707106781187 |
|||
t09 += t31 |
|||
t29 += t09 |
|||
t09 += t23 |
|||
t23 += t31 |
|||
t17 += t29 |
|||
t29 += t25 |
|||
t25 += t09 |
|||
t09 += t27 |
|||
t27 += t23 |
|||
t23 += t19 |
|||
t19 += t31 |
|||
t21 = t02 + t32 |
|||
t02 = (t02 - t32) * 0.502419286188 |
|||
t32 = t04 + t30 |
|||
t04 = (t04 - t30) * 0.52249861494 |
|||
t30 = t06 + t28 |
|||
t28 = (t06 - t28) * 0.566944034816 |
|||
t06 = t08 + t26 |
|||
t08 = (t08 - t26) * 0.64682178336 |
|||
t26 = t10 + t24 |
|||
t10 = (t10 - t24) * 0.788154623451 |
|||
t24 = t12 + t22 |
|||
t22 = (t12 - t22) * 1.06067768599 |
|||
t12 = t14 + t20 |
|||
t20 = (t14 - t20) * 1.72244709824 |
|||
t14 = t16 + t18 |
|||
t16 = (t16 - t18) * 5.10114861869 |
|||
t18 = t21 + t14 |
|||
t14 = (t21 - t14) * 0.509795579104 |
|||
t21 = t32 + t12 |
|||
t32 = (t32 - t12) * 0.601344886935 |
|||
t12 = t30 + t24 |
|||
t24 = (t30 - t24) * 0.899976223136 |
|||
t30 = t06 + t26 |
|||
t26 = (t06 - t26) * 2.56291544774 |
|||
t06 = t18 + t30 |
|||
t18 = (t18 - t30) * 0.541196100146 |
|||
t30 = t21 + t12 |
|||
t12 = (t21 - t12) * 1.30656296488 |
|||
t21 = t06 + t30 |
|||
t30 = (t06 - t30) * 0.707106781187 |
|||
t06 = t18 + t12 |
|||
t12 = (t18 - t12) * 0.707106781187 |
|||
t06 += t12 |
|||
t18 = t14 + t26 |
|||
t26 = (t14 - t26) * 0.541196100146 |
|||
t14 = t32 + t24 |
|||
t24 = (t32 - t24) * 1.30656296488 |
|||
t32 = t18 + t14 |
|||
t14 = (t18 - t14) * 0.707106781187 |
|||
t18 = t26 + t24 |
|||
t24 = (t26 - t24) * 0.707106781187 |
|||
t18 += t24 |
|||
t32 += t18 |
|||
t18 += t14 |
|||
t26 = t14 + t24 |
|||
t14 = t02 + t16 |
|||
t02 = (t02 - t16) * 0.509795579104 |
|||
t16 = t04 + t20 |
|||
t04 = (t04 - t20) * 0.601344886935 |
|||
t20 = t28 + t22 |
|||
t22 = (t28 - t22) * 0.899976223136 |
|||
t28 = t08 + t10 |
|||
t10 = (t08 - t10) * 2.56291544774 |
|||
t08 = t14 + t28 |
|||
t14 = (t14 - t28) * 0.541196100146 |
|||
t28 = t16 + t20 |
|||
t20 = (t16 - t20) * 1.30656296488 |
|||
t16 = t08 + t28 |
|||
t28 = (t08 - t28) * 0.707106781187 |
|||
t08 = t14 + t20 |
|||
t20 = (t14 - t20) * 0.707106781187 |
|||
t08 += t20 |
|||
t14 = t02 + t10 |
|||
t02 = (t02 - t10) * 0.541196100146 |
|||
t10 = t04 + t22 |
|||
t22 = (t04 - t22) * 1.30656296488 |
|||
t04 = t14 + t10 |
|||
t10 = (t14 - t10) * 0.707106781187 |
|||
t14 = t02 + t22 |
|||
t02 = (t02 - t22) * 0.707106781187 |
|||
t14 += t02 |
|||
t04 += t14 |
|||
t14 += t10 |
|||
t10 += t02 |
|||
t16 += t04 |
|||
t04 += t08 |
|||
t08 += t14 |
|||
t14 += t28 |
|||
t28 += t10 |
|||
t10 += t20 |
|||
t20 += t02 |
|||
t21 += t16 |
|||
t16 += t32 |
|||
t32 += t04 |
|||
t04 += t06 |
|||
t06 += t08 |
|||
t08 += t18 |
|||
t18 += t14 |
|||
t14 += t30 |
|||
t30 += t28 |
|||
t28 += t26 |
|||
t26 += t10 |
|||
t10 += t12 |
|||
t12 += t20 |
|||
t20 += t24 |
|||
t24 += t02 |
|||
|
|||
d[dp + 48] = -t33 |
|||
d[dp + 49] = d[dp + 47] = -t21 |
|||
d[dp + 50] = d[dp + 46] = -t17 |
|||
d[dp + 51] = d[dp + 45] = -t16 |
|||
d[dp + 52] = d[dp + 44] = -t01 |
|||
d[dp + 53] = d[dp + 43] = -t32 |
|||
d[dp + 54] = d[dp + 42] = -t29 |
|||
d[dp + 55] = d[dp + 41] = -t04 |
|||
d[dp + 56] = d[dp + 40] = -t03 |
|||
d[dp + 57] = d[dp + 39] = -t06 |
|||
d[dp + 58] = d[dp + 38] = -t25 |
|||
d[dp + 59] = d[dp + 37] = -t08 |
|||
d[dp + 60] = d[dp + 36] = -t11 |
|||
d[dp + 61] = d[dp + 35] = -t18 |
|||
d[dp + 62] = d[dp + 34] = -t09 |
|||
d[dp + 63] = d[dp + 33] = -t14 |
|||
d[dp + 32] = -t05 |
|||
d[dp + 0] = t05 |
|||
d[dp + 31] = -t30 |
|||
d[dp + 1] = t30 |
|||
d[dp + 30] = -t27 |
|||
d[dp + 2] = t27 |
|||
d[dp + 29] = -t28 |
|||
d[dp + 3] = t28 |
|||
d[dp + 28] = -t07 |
|||
d[dp + 4] = t07 |
|||
d[dp + 27] = -t26 |
|||
d[dp + 5] = t26 |
|||
d[dp + 26] = -t23 |
|||
d[dp + 6] = t23 |
|||
d[dp + 25] = -t10 |
|||
d[dp + 7] = t10 |
|||
d[dp + 24] = -t15 |
|||
d[dp + 8] = t15 |
|||
d[dp + 23] = -t12 |
|||
d[dp + 9] = t12 |
|||
d[dp + 22] = -t19 |
|||
d[dp + 10] = t19 |
|||
d[dp + 21] = -t20 |
|||
d[dp + 11] = t20 |
|||
d[dp + 20] = -t13 |
|||
d[dp + 12] = t13 |
|||
d[dp + 19] = -t24 |
|||
d[dp + 13] = t24 |
|||
d[dp + 18] = -t31 |
|||
d[dp + 14] = t31 |
|||
d[dp + 17] = -t02 |
|||
d[dp + 15] = t02 |
|||
d[dp + 16] = 0.0 |
|||
} |
|||
|
|||
static FRAME_SYNC = 0x7ff |
|||
|
|||
static VERSION = { |
|||
MPEG_2_5: 0x0, |
|||
MPEG_2: 0x2, |
|||
MPEG_1: 0x3 |
|||
} |
|||
|
|||
static LAYER = { |
|||
III: 0x1, |
|||
II: 0x2, |
|||
I: 0x3 |
|||
} |
|||
|
|||
static MODE = { |
|||
STEREO: 0x0, |
|||
JOINT_STEREO: 0x1, |
|||
DUAL_CHANNEL: 0x2, |
|||
MONO: 0x3 |
|||
} |
|||
|
|||
static SAMPLE_RATE = new Uint16Array([ |
|||
44100, |
|||
48000, |
|||
32000, |
|||
0, // MPEG-1
|
|||
22050, |
|||
24000, |
|||
16000, |
|||
0 // MPEG-2
|
|||
]) |
|||
|
|||
static BIT_RATE = new Uint16Array([ |
|||
32, |
|||
48, |
|||
56, |
|||
64, |
|||
80, |
|||
96, |
|||
112, |
|||
128, |
|||
160, |
|||
192, |
|||
224, |
|||
256, |
|||
320, |
|||
384, // MPEG-1
|
|||
8, |
|||
16, |
|||
24, |
|||
32, |
|||
40, |
|||
48, |
|||
56, |
|||
64, |
|||
80, |
|||
96, |
|||
112, |
|||
128, |
|||
144, |
|||
160 // MPEG-2
|
|||
]) |
|||
|
|||
static SCALEFACTOR_BASE = new Uint32Array([ |
|||
0x02000000, 0x01965fea, 0x01428a30 |
|||
]) |
|||
|
|||
static SYNTHESIS_WINDOW = new Float32Array([ |
|||
0.0, -0.5, -0.5, -0.5, -0.5, -0.5, -0.5, -1.0, -1.0, -1.0, -1.0, -1.5, -1.5, |
|||
-2.0, -2.0, -2.5, -2.5, -3.0, -3.5, -3.5, -4.0, -4.5, -5.0, -5.5, -6.5, |
|||
-7.0, -8.0, -8.5, -9.5, -10.5, -12.0, -13.0, -14.5, -15.5, -17.5, -19.0, |
|||
-20.5, -22.5, -24.5, -26.5, -29.0, -31.5, -34.0, -36.5, -39.5, -42.5, -45.5, |
|||
-48.5, -52.0, -55.5, -58.5, -62.5, -66.0, -69.5, -73.5, -77.0, -80.5, -84.5, |
|||
-88.0, -91.5, -95.0, -98.0, -101.0, -104.0, 106.5, 109.0, 111.0, 112.5, |
|||
113.5, 114.0, 114.0, 113.5, 112.0, 110.5, 107.5, 104.0, 100.0, 94.5, 88.5, |
|||
81.5, 73.0, 63.5, 53.0, 41.5, 28.5, 14.5, -1.0, -18.0, -36.0, -55.5, -76.5, |
|||
-98.5, -122.0, -147.0, -173.5, -200.5, -229.5, -259.5, -290.5, -322.5, |
|||
-355.5, -389.5, -424.0, -459.5, -495.5, -532.0, -568.5, -605.0, -641.5, |
|||
-678.0, -714.0, -749.0, -783.5, -817.0, -849.0, -879.5, -908.5, -935.0, |
|||
-959.5, -981.0, -1000.5, -1016.0, -1028.5, -1037.5, -1042.5, -1043.5, |
|||
-1040.0, -1031.5, 1018.5, 1000.0, 976.0, 946.5, 911.0, 869.5, 822.0, 767.5, |
|||
707.0, 640.0, 565.5, 485.0, 397.0, 302.5, 201.0, 92.5, -22.5, -144.0, |
|||
-272.5, -407.0, -547.5, -694.0, -846.0, -1003.0, -1165.0, -1331.5, -1502.0, |
|||
-1675.5, -1852.5, -2031.5, -2212.5, -2394.0, -2576.5, -2758.5, -2939.5, |
|||
-3118.5, -3294.5, -3467.5, -3635.5, -3798.5, -3955.0, -4104.5, -4245.5, |
|||
-4377.5, -4499.0, -4609.5, -4708.0, -4792.5, -4863.5, -4919.0, -4958.0, |
|||
-4979.5, -4983.0, -4967.5, -4931.5, -4875.0, -4796.0, -4694.5, -4569.5, |
|||
-4420.0, -4246.0, -4046.0, -3820.0, -3567.0, 3287.0, 2979.5, 2644.0, 2280.5, |
|||
1888.0, 1467.5, 1018.5, 541.0, 35.0, -499.0, -1061.0, -1650.0, -2266.5, |
|||
-2909.0, -3577.0, -4270.0, -4987.5, -5727.5, -6490.0, -7274.0, -8077.5, |
|||
-8899.5, -9739.0, -10594.5, -11464.5, -12347.0, -13241.0, -14144.5, |
|||
-15056.0, -15973.5, -16895.5, -17820.0, -18744.5, -19668.0, -20588.0, |
|||
-21503.0, -22410.5, -23308.5, -24195.0, -25068.5, -25926.5, -26767.0, |
|||
-27589.0, -28389.0, -29166.5, -29919.0, -30644.5, -31342.0, -32009.5, |
|||
-32645.0, -33247.0, -33814.5, -34346.0, -34839.5, -35295.0, -35710.0, |
|||
-36084.5, -36417.5, -36707.5, -36954.0, -37156.5, -37315.0, -37428.0, |
|||
-37496.0, 37519.0, 37496.0, 37428.0, 37315.0, 37156.5, 36954.0, 36707.5, |
|||
36417.5, 36084.5, 35710.0, 35295.0, 34839.5, 34346.0, 33814.5, 33247.0, |
|||
32645.0, 32009.5, 31342.0, 30644.5, 29919.0, 29166.5, 28389.0, 27589.0, |
|||
26767.0, 25926.5, 25068.5, 24195.0, 23308.5, 22410.5, 21503.0, 20588.0, |
|||
19668.0, 18744.5, 17820.0, 16895.5, 15973.5, 15056.0, 14144.5, 13241.0, |
|||
12347.0, 11464.5, 10594.5, 9739.0, 8899.5, 8077.5, 7274.0, 6490.0, 5727.5, |
|||
4987.5, 4270.0, 3577.0, 2909.0, 2266.5, 1650.0, 1061.0, 499.0, -35.0, |
|||
-541.0, -1018.5, -1467.5, -1888.0, -2280.5, -2644.0, -2979.5, 3287.0, |
|||
3567.0, 3820.0, 4046.0, 4246.0, 4420.0, 4569.5, 4694.5, 4796.0, 4875.0, |
|||
4931.5, 4967.5, 4983.0, 4979.5, 4958.0, 4919.0, 4863.5, 4792.5, 4708.0, |
|||
4609.5, 4499.0, 4377.5, 4245.5, 4104.5, 3955.0, 3798.5, 3635.5, 3467.5, |
|||
3294.5, 3118.5, 2939.5, 2758.5, 2576.5, 2394.0, 2212.5, 2031.5, 1852.5, |
|||
1675.5, 1502.0, 1331.5, 1165.0, 1003.0, 846.0, 694.0, 547.5, 407.0, 272.5, |
|||
144.0, 22.5, -92.5, -201.0, -302.5, -397.0, -485.0, -565.5, -640.0, -707.0, |
|||
-767.5, -822.0, -869.5, -911.0, -946.5, -976.0, -1000.0, 1018.5, 1031.5, |
|||
1040.0, 1043.5, 1042.5, 1037.5, 1028.5, 1016.0, 1000.5, 981.0, 959.5, 935.0, |
|||
908.5, 879.5, 849.0, 817.0, 783.5, 749.0, 714.0, 678.0, 641.5, 605.0, 568.5, |
|||
532.0, 495.5, 459.5, 424.0, 389.5, 355.5, 322.5, 290.5, 259.5, 229.5, 200.5, |
|||
173.5, 147.0, 122.0, 98.5, 76.5, 55.5, 36.0, 18.0, 1.0, -14.5, -28.5, -41.5, |
|||
-53.0, -63.5, -73.0, -81.5, -88.5, -94.5, -100.0, -104.0, -107.5, -110.5, |
|||
-112.0, -113.5, -114.0, -114.0, -113.5, -112.5, -111.0, -109.0, 106.5, |
|||
104.0, 101.0, 98.0, 95.0, 91.5, 88.0, 84.5, 80.5, 77.0, 73.5, 69.5, 66.0, |
|||
62.5, 58.5, 55.5, 52.0, 48.5, 45.5, 42.5, 39.5, 36.5, 34.0, 31.5, 29.0, |
|||
26.5, 24.5, 22.5, 20.5, 19.0, 17.5, 15.5, 14.5, 13.0, 12.0, 10.5, 9.5, 8.5, |
|||
8.0, 7.0, 6.5, 5.5, 5.0, 4.5, 4.0, 3.5, 3.5, 3.0, 2.5, 2.5, 2.0, 2.0, 1.5, |
|||
1.5, 1.0, 1.0, 1.0, 1.0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5 |
|||
]) |
|||
|
|||
// Quantizer lookup, step 1: bitrate classes
|
|||
static QUANT_LUT_STEP_1 = [ |
|||
// 32, 48, 56, 64, 80, 96,112,128,160,192,224,256,320,384 <- bitrate
|
|||
[0, 0, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2], // mono
|
|||
// 16, 24, 28, 32, 40, 48, 56, 64, 80, 96,112,128,160,192 <- bitrate / chan
|
|||
[0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 2] // stereo
|
|||
] |
|||
|
|||
// Quantizer lookup, step 2: bitrate class, sample rate -> B2 table idx, sblimit
|
|||
static QUANT_TAB = { |
|||
A: 27 | 64, // Table 3-B.2a: high-rate, sblimit = 27
|
|||
B: 30 | 64, // Table 3-B.2b: high-rate, sblimit = 30
|
|||
C: 8, // Table 3-B.2c: low-rate, sblimit = 8
|
|||
D: 12 // Table 3-B.2d: low-rate, sblimit = 12
|
|||
} |
|||
|
|||
static QUANT_LUT_STEP_2 = [ |
|||
// 44.1 kHz, 48 kHz, 32 kHz
|
|||
[MP2.QUANT_TAB.C, MP2.QUANT_TAB.C, MP2.QUANT_TAB.D], // 32 - 48 kbit/sec/ch
|
|||
[MP2.QUANT_TAB.A, MP2.QUANT_TAB.A, MP2.QUANT_TAB.A], // 56 - 80 kbit/sec/ch
|
|||
[MP2.QUANT_TAB.B, MP2.QUANT_TAB.A, MP2.QUANT_TAB.B] // 96+ kbit/sec/ch
|
|||
] |
|||
|
|||
// Quantizer lookup, step 3: B2 table, subband -> nbal, row index
|
|||
// (upper 4 bits: nbal, lower 4 bits: row index)
|
|||
static QUANT_LUT_STEP_3 = [ |
|||
// Low-rate table (3-B.2c and 3-B.2d)
|
|||
[0x44, 0x44, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34], |
|||
// High-rate table (3-B.2a and 3-B.2b)
|
|||
[ |
|||
0x43, 0x43, 0x43, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x31, |
|||
0x31, 0x31, 0x31, 0x31, 0x31, 0x31, 0x31, 0x31, 0x31, 0x31, 0x31, 0x20, |
|||
0x20, 0x20, 0x20, 0x20, 0x20, 0x20 |
|||
], |
|||
// MPEG-2 LSR table (B.2 in ISO 13818-3)
|
|||
[ |
|||
0x45, 0x45, 0x45, 0x45, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x24, |
|||
0x24, 0x24, 0x24, 0x24, 0x24, 0x24, 0x24, 0x24, 0x24, 0x24, 0x24, 0x24, |
|||
0x24, 0x24, 0x24, 0x24, 0x24, 0x24 |
|||
] |
|||
] |
|||
|
|||
// Quantizer lookup, step 4: table row, allocation[] value -> quant table index
|
|||
static QUANT_LUT_STEP4 = [ |
|||
[0, 1, 2, 17], |
|||
[0, 1, 2, 3, 4, 5, 6, 17], |
|||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 17], |
|||
[0, 1, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], |
|||
[0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17], |
|||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] |
|||
] |
|||
|
|||
static QUANT_TAB = [ |
|||
{ levels: 3, group: 1, bits: 5 }, // 1
|
|||
{ levels: 5, group: 1, bits: 7 }, // 2
|
|||
{ levels: 7, group: 0, bits: 3 }, // 3
|
|||
{ levels: 9, group: 1, bits: 10 }, // 4
|
|||
{ levels: 15, group: 0, bits: 4 }, // 5
|
|||
{ levels: 31, group: 0, bits: 5 }, // 6
|
|||
{ levels: 63, group: 0, bits: 6 }, // 7
|
|||
{ levels: 127, group: 0, bits: 7 }, // 8
|
|||
{ levels: 255, group: 0, bits: 8 }, // 9
|
|||
{ levels: 511, group: 0, bits: 9 }, // 10
|
|||
{ levels: 1023, group: 0, bits: 10 }, // 11
|
|||
{ levels: 2047, group: 0, bits: 11 }, // 12
|
|||
{ levels: 4095, group: 0, bits: 12 }, // 13
|
|||
{ levels: 8191, group: 0, bits: 13 }, // 14
|
|||
{ levels: 16383, group: 0, bits: 14 }, // 15
|
|||
{ levels: 32767, group: 0, bits: 15 }, // 16
|
|||
{ levels: 65535, group: 0, bits: 16 } // 17
|
|||
] |
|||
} |
@ -0,0 +1,155 @@ |
|||
import { Now } from '../../utils' |
|||
import BitBuffer from '../buffer' |
|||
import BaseDecoder from './decoder' |
|||
|
|||
export default class MPEG1WASM extends BaseDecoder { |
|||
options = null |
|||
/** 分辨率 */ |
|||
resolution = { |
|||
width: 0, |
|||
height: 0 |
|||
} |
|||
constructor(options) { |
|||
super(options) |
|||
|
|||
this.onDecodeCallback = options.onVideoDecode |
|||
this.module = options.wasmModule |
|||
|
|||
this.bufferSize = options.videoBufferSize || 512 * 1024 |
|||
this.bufferMode = options.streaming |
|||
? BitBuffer.MODE.EVICT |
|||
: BitBuffer.MODE.EXPAND |
|||
|
|||
this.decodeFirstFrame = options.decodeFirstFrame !== false |
|||
this.hasSequenceHeader = false |
|||
this.options = options |
|||
} |
|||
|
|||
initializeWasmDecoder() { |
|||
if (!this.module.instance) { |
|||
console.warn('JSMpeg: WASM module not compiled yet') |
|||
return |
|||
} |
|||
|
|||
this.instance = this.module.instance |
|||
this.functions = this.module.instance.exports |
|||
this.decoder = this.functions._mpeg1_decoder_create( |
|||
this.bufferSize, |
|||
this.bufferMode |
|||
) |
|||
} |
|||
|
|||
destroy() { |
|||
if (!this.decoder) { |
|||
return |
|||
} |
|||
this.functions._mpeg1_decoder_destroy(this.decoder) |
|||
} |
|||
|
|||
bufferGetIndex() { |
|||
if (!this.decoder) { |
|||
return |
|||
} |
|||
return this.functions._mpeg1_decoder_get_index(this.decoder) |
|||
} |
|||
|
|||
bufferSetIndex(index) { |
|||
if (!this.decoder) { |
|||
return |
|||
} |
|||
this.functions._mpeg1_decoder_set_index(this.decoder, index) |
|||
} |
|||
|
|||
bufferWrite(buffers) { |
|||
if (!this.decoder) { |
|||
this.initializeWasmDecoder() |
|||
} |
|||
|
|||
let totalLength = 0 |
|||
for (let i = 0; i < buffers.length; i++) { |
|||
totalLength += buffers[i].length |
|||
} |
|||
|
|||
let ptr = this.functions._mpeg1_decoder_get_write_ptr( |
|||
this.decoder, |
|||
totalLength |
|||
) |
|||
for (let i = 0; i < buffers.length; i++) { |
|||
this.instance.heapU8.set(buffers[i], ptr) |
|||
ptr += buffers[i].length |
|||
} |
|||
|
|||
this.functions._mpeg1_decoder_did_write(this.decoder, totalLength) |
|||
return totalLength |
|||
} |
|||
|
|||
write(pts, buffers) { |
|||
super.write(pts, buffers) |
|||
|
|||
if ( |
|||
!this.hasSequenceHeader && |
|||
this.functions._mpeg1_decoder_has_sequence_header(this.decoder) |
|||
) { |
|||
this.loadSequnceHeader() |
|||
} |
|||
} |
|||
|
|||
loadSequnceHeader() { |
|||
this.hasSequenceHeader = true |
|||
this.frameRate = this.functions._mpeg1_decoder_get_frame_rate(this.decoder) |
|||
this.codedSize = this.functions._mpeg1_decoder_get_coded_size(this.decoder) |
|||
|
|||
if (this.destination) { |
|||
const w = this.functions._mpeg1_decoder_get_width(this.decoder) |
|||
const h = this.functions._mpeg1_decoder_get_height(this.decoder) |
|||
this.destination.resize(w, h) |
|||
this.resolution.width = w |
|||
this.resolution.height = h |
|||
this.options.onResolutionDecode?.(w, h) |
|||
} |
|||
|
|||
if (this.decodeFirstFrame) { |
|||
this.decode() |
|||
} |
|||
} |
|||
|
|||
decode() { |
|||
const startTime = Now() |
|||
|
|||
if (!this.decoder) { |
|||
return false |
|||
} |
|||
|
|||
const didDecode = this.functions._mpeg1_decoder_decode(this.decoder) |
|||
if (!didDecode) { |
|||
return false |
|||
} |
|||
|
|||
// Invoke decode callbacks
|
|||
if (this.destination) { |
|||
const ptrY = this.functions._mpeg1_decoder_get_y_ptr(this.decoder) |
|||
const ptrCr = this.functions._mpeg1_decoder_get_cr_ptr(this.decoder) |
|||
const ptrCb = this.functions._mpeg1_decoder_get_cb_ptr(this.decoder) |
|||
|
|||
const dy = this.instance.heapU8.subarray(ptrY, ptrY + this.codedSize) |
|||
const dcr = this.instance.heapU8.subarray( |
|||
ptrCr, |
|||
ptrCr + (this.codedSize >> 2) |
|||
) |
|||
const dcb = this.instance.heapU8.subarray( |
|||
ptrCb, |
|||
ptrCb + (this.codedSize >> 2) |
|||
) |
|||
|
|||
this.destination.render(dy, dcr, dcb, false) |
|||
} |
|||
|
|||
this.advanceDecodedTime(1 / this.frameRate) |
|||
|
|||
const elapsedTime = Now() - startTime |
|||
if (this.onDecodeCallback) { |
|||
this.onDecodeCallback(this, elapsedTime) |
|||
} |
|||
return true |
|||
} |
|||
} |
2846
src/views/components/jsmpeg/modules/decoder/mpeg1.js
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,7 @@ |
|||
import TS from './ts' |
|||
|
|||
const Demuxer = { |
|||
TS |
|||
} |
|||
|
|||
export default Demuxer |
@ -0,0 +1,228 @@ |
|||
/* eslint-disable */ |
|||
import BitBuffer from '../buffer' |
|||
import MP2 from '../decoder/mp2' |
|||
import MP2WASM from '../decoder/mp2-wasm' |
|||
import MPEG1 from '../decoder/mpeg1' |
|||
import MPEG1WASM from '../decoder/mpeg1-wasm' |
|||
|
|||
export default class TS { |
|||
/** @type {BitBuffer} */ |
|||
bits |
|||
/** @type {{[key:string]: {destination: MPEG1|MPEG1WASM|MP2|MP2WASM,currentLength: number,totalLength: number, pts: number, buffers: BitBuffer}}} */ |
|||
pesPacketInfo |
|||
constructor(options) { |
|||
this.bits = null |
|||
this.leftoverBytes = null |
|||
|
|||
this.guessVideoFrameEnd = true |
|||
this.pidsToStreamIds = {} |
|||
|
|||
this.pesPacketInfo = {} |
|||
this.startTime = 0 |
|||
this.currentTime = 0 |
|||
} |
|||
|
|||
connect(streamId, destination) { |
|||
this.pesPacketInfo[streamId] = { |
|||
destination: destination, |
|||
currentLength: 0, |
|||
totalLength: 0, |
|||
pts: 0, |
|||
buffers: [] |
|||
} |
|||
} |
|||
|
|||
write(buffer) { |
|||
if (this.leftoverBytes) { |
|||
const totalLength = buffer.byteLength + this.leftoverBytes.byteLength |
|||
this.bits = new BitBuffer(totalLength) |
|||
this.bits.write([this.leftoverBytes, buffer]) |
|||
} else { |
|||
this.bits = new BitBuffer(buffer) |
|||
} |
|||
|
|||
while (this.bits.has(188 << 3) && this.parsePacket()) {} |
|||
|
|||
const leftoverCount = this.bits.byteLength - (this.bits.index >> 3) |
|||
this.leftoverBytes = |
|||
leftoverCount > 0 ? this.bits.bytes.subarray(this.bits.index >> 3) : null |
|||
} |
|||
|
|||
parsePacket() { |
|||
// Check if we're in sync with packet boundaries; attempt to resync if not.
|
|||
if (this.bits.read(8) !== 0x47) { |
|||
if (!this.resync()) { |
|||
// Couldn't resync; maybe next time...
|
|||
return false |
|||
} |
|||
} |
|||
|
|||
const end = (this.bits.index >> 3) + 187 |
|||
const transportError = this.bits.read(1) |
|||
const payloadStart = this.bits.read(1) |
|||
const transportPriority = this.bits.read(1) |
|||
const pid = this.bits.read(13) |
|||
const transportScrambling = this.bits.read(2) |
|||
const adaptationField = this.bits.read(2) |
|||
const continuityCounter = this.bits.read(4) |
|||
|
|||
// If this is the start of a new payload; signal the end of the previous
|
|||
// frame, if we didn't do so already.
|
|||
let streamId = this.pidsToStreamIds[pid] |
|||
if (payloadStart && streamId) { |
|||
const pi = this.pesPacketInfo[streamId] |
|||
if (pi && pi.currentLength) { |
|||
this.packetComplete(pi) |
|||
} |
|||
} |
|||
|
|||
// Extract current payload
|
|||
if (adaptationField & 0x1) { |
|||
if (adaptationField & 0x2) { |
|||
const adaptationFieldLength = this.bits.read(8) |
|||
this.bits.skip(adaptationFieldLength << 3) |
|||
} |
|||
|
|||
if (payloadStart && this.bits.nextBytesAreStartCode()) { |
|||
this.bits.skip(24) |
|||
streamId = this.bits.read(8) |
|||
this.pidsToStreamIds[pid] = streamId |
|||
|
|||
const packetLength = this.bits.read(16) |
|||
this.bits.skip(8) |
|||
const ptsDtsFlag = this.bits.read(2) |
|||
this.bits.skip(6) |
|||
const headerLength = this.bits.read(8) |
|||
const payloadBeginIndex = this.bits.index + (headerLength << 3) |
|||
|
|||
const pi = this.pesPacketInfo[streamId] |
|||
if (pi) { |
|||
let pts = 0 |
|||
if (ptsDtsFlag & 0x2) { |
|||
// The Presentation Timestamp is encoded as 33(!) bit
|
|||
// integer, but has a "marker bit" inserted at weird places
|
|||
// in between, making the whole thing 5 bytes in size.
|
|||
// You can't make this shit up...
|
|||
this.bits.skip(4) |
|||
const p32_30 = this.bits.read(3) |
|||
this.bits.skip(1) |
|||
const p29_15 = this.bits.read(15) |
|||
this.bits.skip(1) |
|||
const p14_0 = this.bits.read(15) |
|||
this.bits.skip(1) |
|||
|
|||
// Can't use bit shifts here; we need 33 bits of precision,
|
|||
// so we're using JavaScript's double number type. Also
|
|||
// divide by the 90khz clock to get the pts in seconds.
|
|||
pts = (p32_30 * 1073741824 + p29_15 * 32768 + p14_0) / 90000 |
|||
|
|||
this.currentTime = pts |
|||
if (this.startTime === -1) { |
|||
this.startTime = pts |
|||
} |
|||
} |
|||
|
|||
const payloadLength = packetLength ? packetLength - headerLength - 3 : 0 |
|||
this.packetStart(pi, pts, payloadLength) |
|||
} |
|||
|
|||
// Skip the rest of the header without parsing it
|
|||
this.bits.index = payloadBeginIndex |
|||
} |
|||
|
|||
if (streamId) { |
|||
// Attempt to detect if the PES packet is complete. For Audio (and
|
|||
// other) packets, we received a total packet length with the PES
|
|||
// header, so we can check the current length.
|
|||
|
|||
// For Video packets, we have to guess the end by detecting if this
|
|||
// TS packet was padded - there's no good reason to pad a TS packet
|
|||
// in between, but it might just fit exactly. If this fails, we can
|
|||
// only wait for the next PES header for that stream.
|
|||
|
|||
const pi = this.pesPacketInfo[streamId] |
|||
if (pi) { |
|||
const start = this.bits.index >> 3 |
|||
const complete = this.packetAddData(pi, start, end) |
|||
|
|||
const hasPadding = !payloadStart && adaptationField & 0x2 |
|||
if (complete || (this.guessVideoFrameEnd && hasPadding)) { |
|||
this.packetComplete(pi) |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
this.bits.index = end << 3 |
|||
return true |
|||
} |
|||
|
|||
resync() { |
|||
// Check if we have enough data to attempt a resync. We need 5 full packets.
|
|||
if (!this.bits.has((188 * 6) << 3)) { |
|||
return false |
|||
} |
|||
|
|||
const byteIndex = this.bits.index >> 3 |
|||
|
|||
// Look for the first sync token in the first 187 bytes
|
|||
for (let i = 0; i < 187; i++) { |
|||
if (this.bits.bytes[byteIndex + i] === 0x47) { |
|||
// Look for 4 more sync tokens, each 188 bytes appart
|
|||
let foundSync = true |
|||
for (let j = 1; j < 5; j++) { |
|||
if (this.bits.bytes[byteIndex + i + 188 * j] !== 0x47) { |
|||
foundSync = false |
|||
break |
|||
} |
|||
} |
|||
|
|||
if (foundSync) { |
|||
this.bits.index = (byteIndex + i + 1) << 3 |
|||
return true |
|||
} |
|||
} |
|||
} |
|||
|
|||
// In theory, we shouldn't arrive here. If we do, we had enough data but
|
|||
// still didn't find sync - this can only happen if we were fed garbage
|
|||
// data. Check your source!
|
|||
// console.warn('JSMpeg: Possible garbage data. Skipping.')
|
|||
this.bits.skip(187 << 3) |
|||
return false |
|||
} |
|||
|
|||
packetStart(pi, pts, payloadLength) { |
|||
pi.totalLength = payloadLength |
|||
pi.currentLength = 0 |
|||
pi.pts = pts |
|||
} |
|||
|
|||
packetAddData(pi, start, end) { |
|||
pi.buffers.push(this.bits.bytes.subarray(start, end)) |
|||
pi.currentLength += end - start |
|||
|
|||
const complete = pi.totalLength !== 0 && pi.currentLength >= pi.totalLength |
|||
return complete |
|||
} |
|||
|
|||
packetComplete(pi) { |
|||
// 在这里将视频流写入了解码器
|
|||
pi.destination.write(pi.pts, pi.buffers) |
|||
pi.totalLength = 0 |
|||
pi.currentLength = 0 |
|||
pi.buffers = [] |
|||
} |
|||
|
|||
static STREAM = { |
|||
PACK_HEADER: 0xba, |
|||
SYSTEM_HEADER: 0xbb, |
|||
PROGRAM_MAP: 0xbc, |
|||
PRIVATE_1: 0xbd, |
|||
PADDING: 0xbe, |
|||
PRIVATE_2: 0xbf, |
|||
AUDIO_1: 0xc0, |
|||
VIDEO_1: 0xe0, |
|||
DIRECTORY: 0xff |
|||
} |
|||
} |
@ -0,0 +1,94 @@ |
|||
/*! jsmpeg v1.0 | (c) Dominic Szablewski | MIT license */ |
|||
|
|||
import AudioOutput from './audio-output' |
|||
import BitBuffer from './buffer' |
|||
import Decoder from './decoder' |
|||
import Demuxer from './demuxer' |
|||
import Player from './player' |
|||
import Renderer from './renderer' |
|||
import Source from './source' |
|||
import VideoElement from './video-element' |
|||
|
|||
// This sets up the JSMpeg "Namespace". The object is empty apart from the Now()
|
|||
// utility function and the automatic CreateVideoElements() after DOMReady.
|
|||
export default class JSMpeg { |
|||
// The Player sets up the connections between source, demuxer, decoders,
|
|||
// renderer and audio output. It ties everything together, is responsible
|
|||
// of scheduling decoding and provides some convenience methods for
|
|||
// external users.
|
|||
static Player = Player |
|||
|
|||
// A Video Element wraps the Player, shows HTML controls to start/pause
|
|||
// the video and handles Audio unlocking on iOS. VideoElements can be
|
|||
// created directly in HTML using the <div class="jsmpeg"/> tag.
|
|||
static VideoElement = VideoElement |
|||
|
|||
// The BitBuffer wraps a Uint8Array and allows reading an arbitrary number
|
|||
// of bits at a time. On writing, the BitBuffer either expands its
|
|||
// internal buffer (for static files) or deletes old data (for streaming).
|
|||
static BitBuffer = BitBuffer |
|||
|
|||
// A Source provides raw data from HTTP, a WebSocket connection or any
|
|||
// other mean. Sources must support the following API:
|
|||
// .connect(destinationNode)
|
|||
// .write(buffer)
|
|||
// .start() - start reading
|
|||
// .resume(headroom) - continue reading; headroom to play pos in seconds
|
|||
// .established - boolean, true after connection is established
|
|||
// .completed - boolean, true if the source is completely loaded
|
|||
// .progress - float 0-1
|
|||
static Source = Source |
|||
|
|||
// A Demuxer may sit between a Source and a Decoder. It separates the
|
|||
// incoming raw data into Video, Audio and other Streams. API:
|
|||
// .connect(streamId, destinationNode)
|
|||
// .write(buffer)
|
|||
// .currentTime – float, in seconds
|
|||
// .startTime - float, in seconds
|
|||
static Demuxer = Demuxer |
|||
|
|||
// A Decoder accepts an incoming Stream of raw Audio or Video data, buffers
|
|||
// it and upon `.decode()` decodes a single frame of data. Video decoders
|
|||
// call `destinationNode.render(Y, Cr, CB)` with the decoded pixel data;
|
|||
// Audio decoders call `destinationNode.play(left, right)` with the decoded
|
|||
// PCM data. API:
|
|||
// .connect(destinationNode)
|
|||
// .write(pts, buffer)
|
|||
// .decode()
|
|||
// .seek(time)
|
|||
// .currentTime - float, in seconds
|
|||
// .startTime - float, in seconds
|
|||
static Decoder = Decoder |
|||
|
|||
// A Renderer accepts raw YCrCb data in 3 separate buffers via the render()
|
|||
// method. Renderers typically convert the data into the RGBA color space
|
|||
// and draw it on a Canvas, but other output - such as writing PNGs - would
|
|||
// be conceivable. API:
|
|||
// .render(y, cr, cb) - pixel data as Uint8Arrays
|
|||
// .enabled - wether the renderer does anything upon receiving data
|
|||
static Renderer = Renderer |
|||
|
|||
// Audio Outputs accept raw Stero PCM data in 2 separate buffers via the
|
|||
// play() method. Outputs typically play the audio on the user's device.
|
|||
// API:
|
|||
// .play(sampleRate, left, right) - rate in herz; PCM data as Uint8Arrays
|
|||
// .stop()
|
|||
// .enqueuedTime - float, in seconds
|
|||
// .enabled - wether the output does anything upon receiving data
|
|||
static AudioOutput = AudioOutput |
|||
|
|||
static CreateVideoElements() { |
|||
const elements = document.querySelectorAll('.jsmpeg') |
|||
for (let i = 0; i < elements.length; i++) { |
|||
new VideoElement(elements[i]) |
|||
} |
|||
} |
|||
} |
|||
|
|||
// Automatically create players for all found <div class="jsmpeg"/> elements.
|
|||
// if (document.readyState === 'complete') {
|
|||
// JSMpeg.CreateVideoElements();
|
|||
// }
|
|||
// else {
|
|||
// document.addEventListener('DOMContentLoaded', JSMpeg.CreateVideoElements);
|
|||
// }
|
728
src/views/components/jsmpeg/modules/player.js
File diff suppressed because it is too large
View File
File diff suppressed because it is too large
View File
@ -0,0 +1,130 @@ |
|||
import { Fill } from '../../utils' |
|||
|
|||
export default class CanvasRenderer { |
|||
/** @type {HTMLCanvasElement} */ |
|||
canvas |
|||
/** |
|||
* |
|||
* @param {import('../../types').PlayerOptions} options |
|||
*/ |
|||
constructor(options) { |
|||
this.canvas = options.canvas ?? document.createElement('canvas') |
|||
this.width = this.canvas.width |
|||
this.height = this.canvas.height |
|||
this.enabled = true |
|||
|
|||
this.context = this.canvas.getContext('2d') |
|||
} |
|||
|
|||
destroy() { |
|||
// Nothing to do here
|
|||
} |
|||
|
|||
clear() { |
|||
if (!this.context) return |
|||
|
|||
const w = this.canvas.width |
|||
const h = this.canvas.height |
|||
|
|||
this.context.fillStyle = '#000' |
|||
this.context.fillRect(0, 0, w, h) |
|||
} |
|||
|
|||
resize(width, height) { |
|||
this.width = width | 0 |
|||
this.height = height | 0 |
|||
|
|||
this.canvas.width = this.width |
|||
this.canvas.height = this.height |
|||
|
|||
this.imageData = this.context.getImageData(0, 0, this.width, this.height) |
|||
Fill(this.imageData.data, 255) |
|||
} |
|||
|
|||
renderProgress(progress) { |
|||
const w = this.canvas.width |
|||
const h = this.canvas.height |
|||
const ctx = this.context |
|||
|
|||
ctx.fillStyle = '#222' |
|||
ctx.fillRect(0, 0, w, h) |
|||
ctx.fillStyle = '#fff' |
|||
ctx.fillRect(0, h - h * progress, w, h * progress) |
|||
} |
|||
|
|||
render(y, cb, cr) { |
|||
this.YCbCrToRGBA(y, cb, cr, this.imageData.data) |
|||
this.context.putImageData(this.imageData, 0, 0) |
|||
} |
|||
|
|||
YCbCrToRGBA(y, cb, cr, rgba) { |
|||
if (!this.enabled) { |
|||
return |
|||
} |
|||
|
|||
// Chroma values are the same for each block of 4 pixels, so we proccess
|
|||
// 2 lines at a time, 2 neighboring pixels each.
|
|||
// I wish we could use 32bit writes to the RGBA buffer instead of writing
|
|||
// each byte separately, but we need the automatic clamping of the RGBA
|
|||
// buffer.
|
|||
|
|||
const w = ((this.width + 15) >> 4) << 4 |
|||
const w2 = w >> 1 |
|||
|
|||
let yIndex1 = 0 |
|||
let yIndex2 = w |
|||
const yNext2Lines = w + (w - this.width) |
|||
|
|||
let cIndex = 0 |
|||
const cNextLine = w2 - (this.width >> 1) |
|||
|
|||
let rgbaIndex1 = 0 |
|||
let rgbaIndex2 = this.width * 4 |
|||
const rgbaNext2Lines = this.width * 4 |
|||
|
|||
const cols = this.width >> 1 |
|||
const rows = this.height >> 1 |
|||
|
|||
let ccb, ccr, r, g, b |
|||
|
|||
for (let row = 0; row < rows; row++) { |
|||
for (let col = 0; col < cols; col++) { |
|||
ccb = cb[cIndex] |
|||
ccr = cr[cIndex] |
|||
cIndex++ |
|||
|
|||
r = ccb + ((ccb * 103) >> 8) - 179 |
|||
g = ((ccr * 88) >> 8) - 44 + ((ccb * 183) >> 8) - 91 |
|||
b = ccr + ((ccr * 198) >> 8) - 227 |
|||
|
|||
// Line 1
|
|||
const y1 = y[yIndex1++] |
|||
const y2 = y[yIndex1++] |
|||
rgba[rgbaIndex1] = y1 + r |
|||
rgba[rgbaIndex1 + 1] = y1 - g |
|||
rgba[rgbaIndex1 + 2] = y1 + b |
|||
rgba[rgbaIndex1 + 4] = y2 + r |
|||
rgba[rgbaIndex1 + 5] = y2 - g |
|||
rgba[rgbaIndex1 + 6] = y2 + b |
|||
rgbaIndex1 += 8 |
|||
|
|||
// Line 2
|
|||
const y3 = y[yIndex2++] |
|||
const y4 = y[yIndex2++] |
|||
rgba[rgbaIndex2] = y3 + r |
|||
rgba[rgbaIndex2 + 1] = y3 - g |
|||
rgba[rgbaIndex2 + 2] = y3 + b |
|||
rgba[rgbaIndex2 + 4] = y4 + r |
|||
rgba[rgbaIndex2 + 5] = y4 - g |
|||
rgba[rgbaIndex2 + 6] = y4 + b |
|||
rgbaIndex2 += 8 |
|||
} |
|||
|
|||
yIndex1 += yNext2Lines |
|||
yIndex2 += yNext2Lines |
|||
rgbaIndex1 += rgbaNext2Lines |
|||
rgbaIndex2 += rgbaNext2Lines |
|||
cIndex += cNextLine |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,9 @@ |
|||
import CanvasRenderer from './canvas2d' |
|||
import WebGLRenderer from './webgl' |
|||
|
|||
const Renderer = { |
|||
Canvas2D: CanvasRenderer, |
|||
WebGL: WebGLRenderer |
|||
} |
|||
|
|||
export default Renderer |
@ -0,0 +1,307 @@ |
|||
export default class WebGLRenderer { |
|||
/** @type {HTMLCanvasElement} */ |
|||
canvas |
|||
/** @type {WebGLRenderingContext} */ |
|||
gl |
|||
constructor(options) { |
|||
this.canvas = options.canvas ?? document.createElement('canvas') |
|||
this.width = this.canvas.width |
|||
this.height = this.canvas.height |
|||
this.enabled = true |
|||
|
|||
this.hasTextureData = {} |
|||
|
|||
const contextCreateOptions = { |
|||
preserveDrawingBuffer: !!options.preserveDrawingBuffer, |
|||
alpha: false, |
|||
depth: false, |
|||
stencil: false, |
|||
antialias: false, |
|||
premultipliedAlpha: false |
|||
} |
|||
|
|||
this.gl = |
|||
this.canvas.getContext('webgl', contextCreateOptions) || |
|||
this.canvas.getContext('experimental-webgl', contextCreateOptions) |
|||
|
|||
if (!this.gl) { |
|||
throw new Error('Failed to get WebGL Context') |
|||
} |
|||
|
|||
const gl = this.gl |
|||
let vertexAttr = null |
|||
|
|||
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false) |
|||
|
|||
// Init buffers
|
|||
this.vertexBuffer = gl.createBuffer() |
|||
const vertexCoords = new Float32Array([0, 0, 0, 1, 1, 0, 1, 1]) |
|||
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer) |
|||
gl.bufferData(gl.ARRAY_BUFFER, vertexCoords, gl.STATIC_DRAW) |
|||
|
|||
// Setup the main YCrCbToRGBA shader
|
|||
this.program = this.createProgram( |
|||
WebGLRenderer.SHADER.VERTEX_IDENTITY, |
|||
WebGLRenderer.SHADER.FRAGMENT_YCRCB_TO_RGBA |
|||
) |
|||
vertexAttr = gl.getAttribLocation(this.program, 'vertex') |
|||
gl.enableVertexAttribArray(vertexAttr) |
|||
gl.vertexAttribPointer(vertexAttr, 2, gl.FLOAT, false, 0, 0) |
|||
|
|||
this.textureY = this.createTexture(0, 'textureY') |
|||
this.textureCb = this.createTexture(1, 'textureCb') |
|||
this.textureCr = this.createTexture(2, 'textureCr') |
|||
|
|||
// Setup the loading animation shader
|
|||
this.loadingProgram = this.createProgram( |
|||
WebGLRenderer.SHADER.VERTEX_IDENTITY, |
|||
WebGLRenderer.SHADER.FRAGMENT_LOADING |
|||
) |
|||
vertexAttr = gl.getAttribLocation(this.loadingProgram, 'vertex') |
|||
gl.enableVertexAttribArray(vertexAttr) |
|||
gl.vertexAttribPointer(vertexAttr, 2, gl.FLOAT, false, 0, 0) |
|||
|
|||
this.shouldCreateUnclampedViews = !this.allowsClampedTextureData() |
|||
} |
|||
|
|||
destroy(removeCanvas = true) { |
|||
const gl = this.gl |
|||
|
|||
this.deleteTexture(gl.TEXTURE0, this.textureY) |
|||
this.deleteTexture(gl.TEXTURE1, this.textureCb) |
|||
this.deleteTexture(gl.TEXTURE2, this.textureCr) |
|||
|
|||
gl.useProgram(null) |
|||
gl.deleteProgram(this.program) |
|||
gl.deleteProgram(this.loadingProgram) |
|||
|
|||
gl.bindBuffer(gl.ARRAY_BUFFER, null) |
|||
gl.deleteBuffer(this.vertexBuffer) |
|||
|
|||
gl.getExtension('WEBGL_lose_context')?.loseContext() |
|||
// gl.clear()
|
|||
|
|||
if (removeCanvas) { |
|||
// 默认不移除canvas
|
|||
this.canvas.remove() |
|||
} |
|||
} |
|||
|
|||
clear() { |
|||
// 设置背景颜色
|
|||
this.gl?.clearColor(0, 0, 0, 1) |
|||
this.gl?.clear(this.gl.COLOR_BUFFER_BIT) |
|||
} |
|||
|
|||
resize(width, height) { |
|||
this.width = width | 0 |
|||
this.height = height | 0 |
|||
|
|||
this.canvas.width = this.width |
|||
this.canvas.height = this.height |
|||
|
|||
this.gl.useProgram(this.program) |
|||
|
|||
const codedWidth = ((this.width + 15) >> 4) << 4 |
|||
this.gl.viewport(0, 0, codedWidth, this.height) |
|||
} |
|||
|
|||
createTexture(index, name) { |
|||
const gl = this.gl |
|||
const texture = gl.createTexture() |
|||
|
|||
gl.bindTexture(gl.TEXTURE_2D, texture) |
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR) |
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR) |
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE) |
|||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE) |
|||
gl.uniform1i(gl.getUniformLocation(this.program, name), index) |
|||
|
|||
return texture |
|||
} |
|||
|
|||
createProgram(vsh, fsh) { |
|||
const gl = this.gl |
|||
const program = gl.createProgram() |
|||
|
|||
gl.attachShader(program, this.compileShader(gl.VERTEX_SHADER, vsh)) |
|||
gl.attachShader(program, this.compileShader(gl.FRAGMENT_SHADER, fsh)) |
|||
gl.linkProgram(program) |
|||
gl.useProgram(program) |
|||
|
|||
return program |
|||
} |
|||
|
|||
compileShader(type, source) { |
|||
const gl = this.gl |
|||
const shader = gl.createShader(type) |
|||
gl.shaderSource(shader, source) |
|||
gl.compileShader(shader) |
|||
|
|||
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { |
|||
throw new Error(gl.getShaderInfoLog(shader)) |
|||
} |
|||
|
|||
return shader |
|||
} |
|||
|
|||
allowsClampedTextureData() { |
|||
const gl = this.gl |
|||
const texture = gl.createTexture() |
|||
|
|||
gl.bindTexture(gl.TEXTURE_2D, texture) |
|||
gl.texImage2D( |
|||
gl.TEXTURE_2D, |
|||
0, |
|||
gl.LUMINANCE, |
|||
1, |
|||
1, |
|||
0, |
|||
gl.LUMINANCE, |
|||
gl.UNSIGNED_BYTE, |
|||
new Uint8ClampedArray([0]) |
|||
) |
|||
return gl.getError() === 0 |
|||
} |
|||
|
|||
renderProgress(progress) { |
|||
const gl = this.gl |
|||
|
|||
gl.useProgram(this.loadingProgram) |
|||
|
|||
const loc = gl.getUniformLocation(this.loadingProgram, 'progress') |
|||
gl.uniform1f(loc, progress) |
|||
|
|||
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4) |
|||
} |
|||
|
|||
render(y, cb, cr, isClampedArray) { |
|||
if (!this.enabled) { |
|||
return |
|||
} |
|||
|
|||
const gl = this.gl |
|||
const w = ((this.width + 15) >> 4) << 4 |
|||
const h = this.height |
|||
const w2 = w >> 1 |
|||
const h2 = h >> 1 |
|||
|
|||
// In some browsers WebGL doesn't like Uint8ClampedArrays (this is a bug
|
|||
// and should be fixed soon-ish), so we have to create a Uint8Array view
|
|||
// for each plane.
|
|||
if (isClampedArray && this.shouldCreateUnclampedViews) { |
|||
y = new Uint8Array(y.buffer) |
|||
cb = new Uint8Array(cb.buffer) |
|||
cr = new Uint8Array(cr.buffer) |
|||
} |
|||
|
|||
gl.useProgram(this.program) |
|||
|
|||
this.updateTexture(gl.TEXTURE0, this.textureY, w, h, y) |
|||
this.updateTexture(gl.TEXTURE1, this.textureCb, w2, h2, cb) |
|||
this.updateTexture(gl.TEXTURE2, this.textureCr, w2, h2, cr) |
|||
|
|||
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4) |
|||
} |
|||
|
|||
updateTexture(unit, texture, w, h, data) { |
|||
const gl = this.gl |
|||
gl.activeTexture(unit) |
|||
gl.bindTexture(gl.TEXTURE_2D, texture) |
|||
|
|||
if (this.hasTextureData[unit]) { |
|||
gl.texSubImage2D( |
|||
gl.TEXTURE_2D, |
|||
0, |
|||
0, |
|||
0, |
|||
w, |
|||
h, |
|||
gl.LUMINANCE, |
|||
gl.UNSIGNED_BYTE, |
|||
data |
|||
) |
|||
} else { |
|||
this.hasTextureData[unit] = true |
|||
gl.texImage2D( |
|||
gl.TEXTURE_2D, |
|||
0, |
|||
gl.LUMINANCE, |
|||
w, |
|||
h, |
|||
0, |
|||
gl.LUMINANCE, |
|||
gl.UNSIGNED_BYTE, |
|||
data |
|||
) |
|||
} |
|||
} |
|||
|
|||
deleteTexture(unit, texture) { |
|||
const gl = this.gl |
|||
gl.activeTexture(unit) |
|||
gl.bindTexture(gl.TEXTURE_2D, null) |
|||
gl.deleteTexture(texture) |
|||
} |
|||
|
|||
static IsSupported() { |
|||
try { |
|||
if (!window.WebGLRenderingContext) { |
|||
return false |
|||
} |
|||
|
|||
const canvas = document.createElement('canvas') |
|||
return !!( |
|||
canvas.getContext('webgl') || canvas.getContext('experimental-webgl') |
|||
) |
|||
} catch (err) { |
|||
return false |
|||
} |
|||
} |
|||
|
|||
static SHADER = { |
|||
FRAGMENT_YCRCB_TO_RGBA: [ |
|||
'precision mediump float;', |
|||
'uniform sampler2D textureY;', |
|||
'uniform sampler2D textureCb;', |
|||
'uniform sampler2D textureCr;', |
|||
'varying vec2 texCoord;', |
|||
|
|||
'mat4 rec601 = mat4(', |
|||
'1.16438, 0.00000, 1.59603, -0.87079,', |
|||
'1.16438, -0.39176, -0.81297, 0.52959,', |
|||
'1.16438, 2.01723, 0.00000, -1.08139,', |
|||
'0, 0, 0, 1', |
|||
');', |
|||
|
|||
'void main() {', |
|||
'float y = texture2D(textureY, texCoord).r;', |
|||
'float cb = texture2D(textureCb, texCoord).r;', |
|||
'float cr = texture2D(textureCr, texCoord).r;', |
|||
|
|||
'gl_FragColor = vec4(y, cr, cb, 1.0) * rec601;', |
|||
'}' |
|||
].join('\n'), |
|||
|
|||
FRAGMENT_LOADING: [ |
|||
'precision mediump float;', |
|||
'uniform float progress;', |
|||
'varying vec2 texCoord;', |
|||
|
|||
'void main() {', |
|||
'float c = ceil(progress-(1.0-texCoord.y));', |
|||
'gl_FragColor = vec4(c,c,c,1);', |
|||
'}' |
|||
].join('\n'), |
|||
|
|||
VERTEX_IDENTITY: [ |
|||
'attribute vec2 vertex;', |
|||
'varying vec2 texCoord;', |
|||
|
|||
'void main() {', |
|||
'texCoord = vertex;', |
|||
'gl_Position = vec4((vertex * 2.0 - 1.0) * vec2(1, -1), 0.0, 1.0);', |
|||
'}' |
|||
].join('\n') |
|||
} |
|||
} |
@ -0,0 +1,133 @@ |
|||
import { Now } from '../../utils' |
|||
|
|||
export default class AjaxProgressiveSource { |
|||
constructor(url, options) { |
|||
this.url = url |
|||
this.destination = null |
|||
this.request = null |
|||
this.streaming = false |
|||
|
|||
this.completed = false |
|||
this.established = false |
|||
this.progress = 0 |
|||
|
|||
this.fileSize = 0 |
|||
this.loadedSize = 0 |
|||
this.chunkSize = options.chunkSize || 1024 * 1024 |
|||
|
|||
this.isLoading = false |
|||
this.loadStartTime = 0 |
|||
this.throttled = options.throttled !== false |
|||
this.aborted = false |
|||
|
|||
this.onEstablishedCallback = options.onSourceEstablished |
|||
this.onCompletedCallback = options.onSourceCompleted |
|||
} |
|||
|
|||
connect(destination) { |
|||
this.destination = destination |
|||
} |
|||
|
|||
start() { |
|||
this.request = new XMLHttpRequest() |
|||
|
|||
this.request.onreadystatechange = function() { |
|||
if (this.request.readyState === this.request.DONE) { |
|||
this.fileSize = parseInt( |
|||
this.request.getResponseHeader('Content-Length') |
|||
) |
|||
this.loadNextChunk() |
|||
} |
|||
}.bind(this) |
|||
|
|||
this.request.onprogress = this.onProgress.bind(this) |
|||
this.request.open('HEAD', this.url) |
|||
this.request.send() |
|||
} |
|||
|
|||
resume(secondsHeadroom) { |
|||
if (this.isLoading || !this.throttled) { |
|||
return |
|||
} |
|||
|
|||
// Guess the worst case loading time with lots of safety margin. This is
|
|||
// somewhat arbitrary...
|
|||
const worstCaseLoadingTime = this.loadTime * 8 + 2 |
|||
if (worstCaseLoadingTime > secondsHeadroom) { |
|||
this.loadNextChunk() |
|||
} |
|||
} |
|||
|
|||
destroy() { |
|||
this.request.abort() |
|||
this.aborted = true |
|||
} |
|||
|
|||
loadNextChunk() { |
|||
const start = this.loadedSize |
|||
const end = Math.min(this.loadedSize + this.chunkSize - 1, this.fileSize - 1) |
|||
|
|||
if (start >= this.fileSize || this.aborted) { |
|||
this.completed = true |
|||
if (this.onCompletedCallback) { |
|||
this.onCompletedCallback(this) |
|||
} |
|||
return |
|||
} |
|||
|
|||
this.isLoading = true |
|||
this.loadStartTime = Now() |
|||
this.request = new XMLHttpRequest() |
|||
|
|||
this.request.onreadystatechange = function() { |
|||
if ( |
|||
this.request.readyState === this.request.DONE && |
|||
this.request.status >= 200 && |
|||
this.request.status < 300 |
|||
) { |
|||
this.onChunkLoad(this.request.response) |
|||
} else if (this.request.readyState === this.request.DONE) { |
|||
// Retry?
|
|||
if (this.loadFails++ < 3) { |
|||
this.loadNextChunk() |
|||
} |
|||
} |
|||
}.bind(this) |
|||
|
|||
if (start === 0) { |
|||
this.request.onprogress = this.onProgress.bind(this) |
|||
} |
|||
|
|||
this.request.open('GET', this.url + '?' + start + '-' + end) |
|||
this.request.setRequestHeader('Range', 'bytes=' + start + '-' + end) |
|||
this.request.responseType = 'arraybuffer' |
|||
this.request.send() |
|||
} |
|||
|
|||
onProgress(ev) { |
|||
this.progress = ev.loaded / ev.total |
|||
} |
|||
|
|||
onChunkLoad(data) { |
|||
const isFirstChunk = !this.established |
|||
this.established = true |
|||
this.progress = 1 |
|||
|
|||
this.loadedSize += data.byteLength |
|||
this.loadFails = 0 |
|||
this.isLoading = false |
|||
|
|||
if (isFirstChunk && this.onEstablishedCallback) { |
|||
this.onEstablishedCallback(this) |
|||
} |
|||
|
|||
if (this.destination) { |
|||
this.destination.write(data) |
|||
} |
|||
|
|||
this.loadTime = Now() - this.loadStartTime |
|||
if (!this.throttled) { |
|||
this.loadNextChunk() |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,68 @@ |
|||
'use strict' |
|||
|
|||
export default class AjaxSource { |
|||
constructor(url, options) { |
|||
this.url = url |
|||
this.destination = null |
|||
this.request = null |
|||
this.streaming = false |
|||
|
|||
this.completed = false |
|||
this.established = false |
|||
this.progress = 0 |
|||
|
|||
this.onEstablishedCallback = options.onSourceEstablished |
|||
this.onCompletedCallback = options.onSourceCompleted |
|||
} |
|||
|
|||
connect(destination) { |
|||
this.destination = destination |
|||
} |
|||
|
|||
start() { |
|||
this.request = new XMLHttpRequest() |
|||
|
|||
this.request.onreadystatechange = function() { |
|||
if ( |
|||
this.request.readyState === this.request.DONE && |
|||
this.request.status === 200 |
|||
) { |
|||
this.onLoad(this.request.response) |
|||
} |
|||
}.bind(this) |
|||
|
|||
this.request.onprogress = this.onProgress.bind(this) |
|||
this.request.open('GET', this.url) |
|||
this.request.responseType = 'arraybuffer' |
|||
this.request.send() |
|||
} |
|||
|
|||
resume(secondsHeadroom) { |
|||
// Nothing to do here
|
|||
} |
|||
|
|||
destroy() { |
|||
this.request.abort() |
|||
} |
|||
|
|||
onProgress(ev) { |
|||
this.progress = ev.loaded / ev.total |
|||
} |
|||
|
|||
onLoad(data) { |
|||
this.established = true |
|||
this.completed = true |
|||
this.progress = 1 |
|||
|
|||
if (this.onEstablishedCallback) { |
|||
this.onEstablishedCallback(this) |
|||
} |
|||
if (this.onCompletedCallback) { |
|||
this.onCompletedCallback(this) |
|||
} |
|||
|
|||
if (this.destination) { |
|||
this.destination.write(data) |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,80 @@ |
|||
'use strict' |
|||
|
|||
export default class FetchSource { |
|||
constructor(url, options) { |
|||
this.url = url |
|||
this.destination = null |
|||
this.request = null |
|||
this.streaming = true |
|||
|
|||
this.completed = false |
|||
this.established = false |
|||
this.progress = 0 |
|||
this.aborted = false |
|||
|
|||
this.onEstablishedCallback = options.onSourceEstablished |
|||
this.onCompletedCallback = options.onSourceCompleted |
|||
} |
|||
|
|||
connect(destination) { |
|||
this.destination = destination |
|||
} |
|||
|
|||
start() { |
|||
const params = { |
|||
method: 'GET', |
|||
headers: new Headers(), |
|||
keepAlive: 'default' |
|||
} |
|||
|
|||
self |
|||
.fetch(this.url, params) |
|||
.then( |
|||
function(res) { |
|||
if (res.ok && res.status >= 200 && res.status <= 299) { |
|||
this.progress = 1 |
|||
this.established = true |
|||
return this.pump(res.body.getReader()) |
|||
} else { |
|||
// error
|
|||
} |
|||
}.bind(this) |
|||
) |
|||
.catch(function(err) { |
|||
throw err |
|||
}) |
|||
} |
|||
|
|||
pump(reader) { |
|||
return reader |
|||
.read() |
|||
.then( |
|||
function(result) { |
|||
if (result.done) { |
|||
this.completed = true |
|||
} else { |
|||
if (this.aborted) { |
|||
return reader.cancel() |
|||
} |
|||
|
|||
if (this.destination) { |
|||
this.destination.write(result.value.buffer) |
|||
} |
|||
|
|||
return this.pump(reader) |
|||
} |
|||
}.bind(this) |
|||
) |
|||
.catch(function(err) { |
|||
throw err |
|||
}) |
|||
} |
|||
|
|||
resume(secondsHeadroom) { |
|||
// Nothing to do here
|
|||
} |
|||
|
|||
abort() { |
|||
this.aborted = true |
|||
} |
|||
} |
@ -0,0 +1,13 @@ |
|||
import AjaxSource from './ajax' |
|||
import AjaxProgressiveSource from './ajax-progressive' |
|||
import FetchSource from './fetch' |
|||
import WSSource from './websocket' |
|||
|
|||
const Source = { |
|||
Ajax: AjaxSource, |
|||
AjaxProgressive: AjaxProgressiveSource, |
|||
Fetch: FetchSource, |
|||
WebSocket: WSSource |
|||
} |
|||
|
|||
export default Source |
@ -0,0 +1,237 @@ |
|||
|
|||
/* eslint-disable */ |
|||
'use strict' |
|||
|
|||
import TS from '../demuxer/ts' |
|||
|
|||
export default class WSSource { |
|||
timer = { |
|||
heartbeat: null, |
|||
streamInterrupt: null |
|||
} |
|||
reconnectInterval |
|||
shouldAttemptReconnect |
|||
progress = 0 |
|||
reconnectTimeoutId = 0 |
|||
reconnectCount = 0 |
|||
callbacks = { connect: [], data: [] } |
|||
streaming = true |
|||
completed = false |
|||
established = false |
|||
isPaused = false |
|||
isStreamInterrupt = false |
|||
/** @type {TS} */ |
|||
destination |
|||
/** @type {WebSocket} */ |
|||
socket |
|||
/** @type {string} */ |
|||
url |
|||
onEstablishedCallback |
|||
onCompletedCallback |
|||
onClosedCallback |
|||
onStreamInterruptCallback |
|||
onConnectedCallback |
|||
onStreamTimeoutFirstReceiveCallback |
|||
/** |
|||
* |
|||
* @param {string} url |
|||
* @param {import('../../types').PlayerOptions} options |
|||
*/ |
|||
constructor(url, options) { |
|||
this.url = url |
|||
this.options = options |
|||
|
|||
this.reconnectInterval = |
|||
options.reconnectInterval !== undefined ? options.reconnectInterval : 5 |
|||
this.shouldAttemptReconnect = !!this.reconnectInterval |
|||
|
|||
this.onEstablishedCallback = options.onSourceEstablished |
|||
this.onCompletedCallback = options.onSourceCompleted // Never used
|
|||
this.onClosedCallback = options.onSourceClosed |
|||
this.onConnectedCallback = options.onSourceConnected |
|||
this.onStreamInterruptCallback = options.onSourceStreamInterrupt |
|||
this.onStreamContinueCallback = options.onSourceStreamContinue |
|||
} |
|||
|
|||
connect(destination) { |
|||
this.destination = destination |
|||
} |
|||
|
|||
changeUrl(url = '') { |
|||
clearTimeout(this.timer.streamInterrupt) |
|||
|
|||
if (typeof url === 'string' && url !== '') { |
|||
if (this.url !== url) { |
|||
this.destroy() |
|||
this.url = url |
|||
this.start() |
|||
} |
|||
} else { |
|||
this.destroy() |
|||
this.url = '' |
|||
} |
|||
} |
|||
|
|||
reload() { |
|||
this.destroy() |
|||
this.start() |
|||
} |
|||
|
|||
destroy() { |
|||
clearTimeout(this.reconnectTimeoutId) |
|||
this.reconnectTimeoutId = 0 |
|||
this.shouldAttemptReconnect = false |
|||
this.socket && this.socket.close() |
|||
if (this.socket) { |
|||
this.socket.onmessage = null |
|||
this.socket.onopen = null |
|||
this.socket.onerror = null |
|||
this.socket.onclose = null |
|||
this.socket.onmessage = null |
|||
this.socket = null |
|||
} |
|||
} |
|||
|
|||
start() { |
|||
this.reconnectTimeoutId = 0 |
|||
this.reconnectCount = 0 |
|||
this.shouldAttemptReconnect = !!this.reconnectInterval |
|||
this.progress = 0 |
|||
this.established = false |
|||
this.isPaused = false |
|||
|
|||
this.wsConnect() |
|||
} |
|||
|
|||
wsConnect() { |
|||
if (!this.url) return |
|||
// 连java的websocket时,第二个参数要么传值,要么不传值,不能传null,否则会一直出现连接失败的问题
|
|||
try { |
|||
this.socket = new WebSocket(this.url, this.options?.protocols) |
|||
this.socket.binaryType = 'arraybuffer' |
|||
this.socket.onmessage = this.onMessage.bind(this) |
|||
this.socket.onopen = this.onOpen.bind(this) |
|||
this.socket.onerror = this.onError.bind(this) |
|||
this.socket.onclose = this.onClose.bind(this) |
|||
} catch (error) { |
|||
console.error('websocket connect error: ', error) |
|||
} |
|||
} |
|||
|
|||
pause() { |
|||
if (!this.isPaused) { |
|||
clearTimeout(this.timer.streamInterrupt) |
|||
this.isPaused = true |
|||
if (this.socket?.readyState === WebSocket.OPEN) { |
|||
this.socket.onmessage = null |
|||
} |
|||
} |
|||
// if (this.reconnectTimeoutId) {
|
|||
// clearTimeout(this.reconnectTimeoutId)
|
|||
// this.reconnectTimeoutId = null
|
|||
// }
|
|||
} |
|||
|
|||
continue() { |
|||
// Nothing to do here
|
|||
if (this.isPaused) { |
|||
this.isPaused = false |
|||
if (this.socket == null) { |
|||
this.start() |
|||
} else if (this.socket?.readyState === WebSocket.OPEN) { |
|||
this.socket.onmessage = this.onMessage.bind(this) |
|||
this.startStreamTimeoutTimer() |
|||
} |
|||
} |
|||
} |
|||
|
|||
onOpen() { |
|||
this.progress = 1 |
|||
this.reconnectTimeoutId = 0 |
|||
this.reconnectCount = 0 |
|||
this.isOpened = true |
|||
if (this.onConnectedCallback) { |
|||
this.onConnectedCallback(this) |
|||
} |
|||
this.startStreamTimeoutTimer() |
|||
} |
|||
|
|||
onError(err) { |
|||
// console.error(err)
|
|||
} |
|||
|
|||
onClose() { |
|||
this.established = false |
|||
if (this.progress >= 1) { |
|||
// progress>=1,表示已经建立连接后的断开
|
|||
this.progress = 0 |
|||
if (this.onClosedCallback) { |
|||
this.onClosedCallback(this) |
|||
} |
|||
clearTimeout(this.reconnectTimeoutId) |
|||
this.reconnectTimeoutId = setTimeout(this.start.bind(this), 5000) |
|||
return |
|||
} |
|||
|
|||
if (this.shouldAttemptReconnect && this.reconnectCount < 10) { |
|||
// 最多重连10次
|
|||
clearTimeout(this.reconnectTimeoutId) |
|||
this.reconnectTimeoutId = setTimeout( |
|||
this.wsConnect.bind(this), |
|||
this.reconnectInterval * 1000 |
|||
) |
|||
this.reconnectCount += 1 |
|||
console.log('websocket 重连次数: ', this.reconnectCount) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* |
|||
* @param {MessageEvent} ev |
|||
*/ |
|||
onMessage(ev) { |
|||
this.startStreamTimeoutTimer() |
|||
try { |
|||
if (!this.established) { |
|||
this.established = true |
|||
this.isStreamInterrupt = false |
|||
this.onEstablishedCallback?.(this) |
|||
console.log(ev) |
|||
} else if (this.isStreamInterrupt) { |
|||
this.isStreamInterrupt = false |
|||
this.onStreamContinueCallback?.(this) |
|||
} |
|||
|
|||
if (this.destination) { |
|||
this.destination.write(ev.data) |
|||
} |
|||
} catch (error) { |
|||
if (error.message?.indexOf('memory access out of bounds') > -1) { |
|||
this.reload() |
|||
} else { |
|||
console.error(error) |
|||
} |
|||
} |
|||
if (this.recorder) { |
|||
try { |
|||
this.recorder.write?.(ev.data) |
|||
} catch (error) { |
|||
this.recorder = null |
|||
} |
|||
} |
|||
} |
|||
|
|||
startStreamTimeoutTimer() { |
|||
if (this.timer.streamInterrupt) { |
|||
clearTimeout(this.timer.streamInterrupt) |
|||
} |
|||
this.timer.streamInterrupt = setTimeout(() => { |
|||
console.warn('[JSMpeg]: 等待视频流超时') |
|||
this.timer.streamInterrupt = null |
|||
this.isStreamInterrupt = true |
|||
if (this.onStreamInterruptCallback) { |
|||
this.onStreamInterruptCallback() |
|||
} |
|||
}, 5000) |
|||
} |
|||
} |
@ -0,0 +1,178 @@ |
|||
/* eslint-disable */ |
|||
import Player from './player' |
|||
|
|||
export default class VideoElement { |
|||
constructor(element) { |
|||
const url = element.dataset.url |
|||
|
|||
if (!url) { |
|||
throw 'VideoElement has no `data-url` attribute' |
|||
} |
|||
|
|||
// Setup the div container, canvas and play button
|
|||
function addStyles(element, styles) { |
|||
for (const name in styles) { |
|||
element.style[name] = styles[name] |
|||
} |
|||
} |
|||
|
|||
this.container = element |
|||
addStyles(this.container, { |
|||
display: 'inline-block', |
|||
position: 'relative', |
|||
minWidth: '80px', |
|||
minHeight: '80px' |
|||
}) |
|||
|
|||
this.canvas = document.createElement('canvas') |
|||
this.canvas.width = 960 |
|||
this.canvas.height = 540 |
|||
addStyles(this.canvas, { |
|||
display: 'block', |
|||
width: '100%' |
|||
}) |
|||
this.container.appendChild(this.canvas) |
|||
|
|||
this.playButton = document.createElement('div') |
|||
this.playButton.innerHTML = VideoElement.PLAY_BUTTON |
|||
addStyles(this.playButton, { |
|||
zIndex: 2, |
|||
position: 'absolute', |
|||
top: '0', |
|||
bottom: '0', |
|||
left: '0', |
|||
right: '0', |
|||
maxWidth: '75px', |
|||
maxHeight: '75px', |
|||
margin: 'auto', |
|||
opacity: '0.7', |
|||
cursor: 'pointer' |
|||
}) |
|||
this.container.appendChild(this.playButton) |
|||
|
|||
// Parse the data-options - we try to decode the values as json. This way
|
|||
// we can get proper boolean and number values. If JSON.parse() fails,
|
|||
// treat it as a string.
|
|||
const options = { canvas: this.canvas } |
|||
for (const option in element.dataset) { |
|||
try { |
|||
options[option] = JSON.parse(element.dataset[option]) |
|||
} catch (err) { |
|||
options[option] = element.dataset[option] |
|||
} |
|||
} |
|||
|
|||
// Create the player instance
|
|||
this.player = new Player(url, options) |
|||
element.playerInstance = this.player |
|||
|
|||
// Setup the poster element, if any
|
|||
if (options.poster && !options.autoplay && !this.player.options.streaming) { |
|||
options.decodeFirstFrame = false |
|||
this.poster = new Image() |
|||
this.poster.src = options.poster |
|||
this.poster.addEventListener('load', this.posterLoaded) |
|||
addStyles(this.poster, { |
|||
display: 'block', |
|||
zIndex: 1, |
|||
position: 'absolute', |
|||
top: 0, |
|||
left: 0, |
|||
bottom: 0, |
|||
right: 0 |
|||
}) |
|||
this.container.appendChild(this.poster) |
|||
} |
|||
|
|||
// Add the click handler if this video is pausable
|
|||
if (!this.player.options.streaming) { |
|||
this.container.addEventListener('click', this.onClick.bind(this)) |
|||
} |
|||
|
|||
// Hide the play button if this video immediately begins playing
|
|||
if (options.autoplay || this.player.options.streaming) { |
|||
this.playButton.style.display = 'none' |
|||
} |
|||
|
|||
// Set up the unlock audio buton for iOS devices. iOS only allows us to
|
|||
// play audio after a user action has initiated playing. For autoplay or
|
|||
// streaming players we set up a muted speaker icon as the button. For all
|
|||
// others, we can simply use the play button.
|
|||
if (this.player.audioOut && !this.player.audioOut.unlocked) { |
|||
let unlockAudioElement = this.container |
|||
|
|||
if (options.autoplay || this.player.options.streaming) { |
|||
this.unmuteButton = document.createElement('div') |
|||
this.unmuteButton.innerHTML = VideoElement.UNMUTE_BUTTON |
|||
addStyles(this.unmuteButton, { |
|||
zIndex: 2, |
|||
position: 'absolute', |
|||
bottom: '10px', |
|||
right: '20px', |
|||
width: '75px', |
|||
height: '75px', |
|||
margin: 'auto', |
|||
opacity: '0.7', |
|||
cursor: 'pointer' |
|||
}) |
|||
this.container.appendChild(this.unmuteButton) |
|||
unlockAudioElement = this.unmuteButton |
|||
} |
|||
|
|||
this.unlockAudioBound = this.onUnlockAudio.bind(this, unlockAudioElement) |
|||
unlockAudioElement.addEventListener( |
|||
'touchstart', |
|||
this.unlockAudioBound, |
|||
false |
|||
) |
|||
unlockAudioElement.addEventListener('click', this.unlockAudioBound, true) |
|||
} |
|||
} |
|||
|
|||
onUnlockAudio(element, ev) { |
|||
if (this.unmuteButton) { |
|||
ev.preventDefault() |
|||
ev.stopPropagation() |
|||
} |
|||
this.player.audioOut.unlock( |
|||
function() { |
|||
if (this.unmuteButton) { |
|||
this.unmuteButton.style.display = 'none' |
|||
} |
|||
element.removeEventListener('touchstart', this.unlockAudioBound) |
|||
element.removeEventListener('click', this.unlockAudioBound) |
|||
}.bind(this) |
|||
) |
|||
} |
|||
|
|||
onClick(ev) { |
|||
if (this.player.isPlaying) { |
|||
this.player.pause() |
|||
this.playButton.style.display = 'block' |
|||
} else { |
|||
this.player.play() |
|||
this.playButton.style.display = 'none' |
|||
if (this.poster) { |
|||
this.poster.style.display = 'none' |
|||
} |
|||
} |
|||
} |
|||
|
|||
static PLAY_BUTTON = |
|||
'<svg style="max-width: 75px; max-height: 75px;" ' + |
|||
'viewBox="0 0 200 200" alt="Play video">' + |
|||
'<circle cx="100" cy="100" r="90" fill="none" ' + |
|||
'stroke-width="15" stroke="#fff"/>' + |
|||
'<polygon points="70, 55 70, 145 145, 100" fill="#fff"/>' + |
|||
'</svg>' |
|||
|
|||
static UNMUTE_BUTTON = |
|||
'<svg style="max-width: 75px; max-height: 75px;" viewBox="0 0 75 75">' + |
|||
'<polygon class="audio-speaker" stroke="none" fill="#fff" ' + |
|||
'points="39,13 22,28 6,28 6,47 21,47 39,62 39,13"/>' + |
|||
'<g stroke="#fff" stroke-width="5">' + |
|||
'<path d="M 49,50 69,26"/>' + |
|||
'<path d="M 69,50 49,26"/>' + |
|||
'</g>' + |
|||
'</svg>' |
|||
} |
@ -0,0 +1,159 @@ |
|||
|
|||
/* eslint-disable */ |
|||
import Source from './source' |
|||
|
|||
export default class WASM { |
|||
constructor() { |
|||
this.stackSize = 5 * 1024 * 1024 // emscripten default
|
|||
this.pageSize = 64 * 1024 // wasm page size
|
|||
this.onInitCallback = null |
|||
this.ready = false |
|||
} |
|||
|
|||
write(buffer) { |
|||
this.loadFromBuffer(buffer, this.onInitCallback) |
|||
} |
|||
|
|||
loadFromFile(url, callback) { |
|||
this.onInitCallback = callback |
|||
const ajax = new Source.Ajax(url, {}) |
|||
ajax.connect(this) |
|||
ajax.start() |
|||
} |
|||
|
|||
loadFromBuffer(buffer, callback) { |
|||
this.moduleInfo = this.readDylinkSection(buffer) |
|||
if (!this.moduleInfo) { |
|||
this.callback && this.callback(null) |
|||
return |
|||
} |
|||
|
|||
this.memory = new WebAssembly.Memory({ initial: 256 }) |
|||
const env = { |
|||
memory: this.memory, |
|||
memoryBase: 0, |
|||
__memory_base: 0, |
|||
table: new WebAssembly.Table({ |
|||
initial: this.moduleInfo.tableSize, |
|||
element: 'anyfunc' |
|||
}), |
|||
tableBase: 0, |
|||
__table_base: 0, |
|||
abort: this.c_abort.bind(this), |
|||
___assert_fail: this.c_assertFail.bind(this), |
|||
_sbrk: this.c_sbrk.bind(this) |
|||
} |
|||
|
|||
this.brk = this.align(this.moduleInfo.memorySize + this.stackSize) |
|||
WebAssembly.instantiate(buffer, { env: env }).then( |
|||
function(results) { |
|||
this.instance = results.instance |
|||
if (this.instance.exports.__post_instantiate) { |
|||
this.instance.exports.__post_instantiate() |
|||
} |
|||
this.createHeapViews() |
|||
this.ready = true |
|||
callback && callback(this) |
|||
}.bind(this) |
|||
) |
|||
} |
|||
|
|||
createHeapViews() { |
|||
this.instance.heapU8 = new Uint8Array(this.memory.buffer) |
|||
this.instance.heapU32 = new Uint32Array(this.memory.buffer) |
|||
this.instance.heapF32 = new Float32Array(this.memory.buffer) |
|||
} |
|||
|
|||
align(addr) { |
|||
const a = Math.pow(2, this.moduleInfo.memoryAlignment) |
|||
return Math.ceil(addr / a) * a |
|||
} |
|||
|
|||
c_sbrk(size) { |
|||
const previousBrk = this.brk |
|||
this.brk += size |
|||
|
|||
if (this.brk > this.memory.buffer.byteLength) { |
|||
const bytesNeeded = this.brk - this.memory.buffer.byteLength |
|||
const pagesNeeded = Math.ceil(bytesNeeded / this.pageSize) |
|||
this.memory.grow(pagesNeeded) |
|||
this.createHeapViews() |
|||
} |
|||
return previousBrk |
|||
} |
|||
|
|||
c_abort(size) { |
|||
console.warn('JSMPeg: WASM abort', arguments) |
|||
} |
|||
|
|||
c_assertFail(size) { |
|||
console.warn('JSMPeg: WASM ___assert_fail', arguments) |
|||
} |
|||
|
|||
readDylinkSection(buffer) { |
|||
// Read the WASM header and dylink section of the .wasm binary data
|
|||
// to get the needed table size and static data size.
|
|||
|
|||
// https://github.com/WebAssembly/tool-conventions/blob/master/DynamicLinking.md
|
|||
// https://github.com/kripken/emscripten/blob/20602efb955a7c6c20865a495932427e205651d2/src/support.js
|
|||
|
|||
const bytes = new Uint8Array(buffer) |
|||
let next = 0 |
|||
|
|||
function readVarUint() { |
|||
let ret = 0 |
|||
let mul = 1 |
|||
while (1) { |
|||
const byte = bytes[next++] |
|||
ret += (byte & 0x7f) * mul |
|||
mul *= 0x80 |
|||
if (!(byte & 0x80)) { |
|||
return ret |
|||
} |
|||
} |
|||
} |
|||
|
|||
function matchNextBytes(expected) { |
|||
for (let i = 0; i < expected.length; i++) { |
|||
const b = |
|||
typeof expected[i] === 'string' |
|||
? expected[i].charCodeAt(0) |
|||
: expected[i] |
|||
if (bytes[next++] !== b) { |
|||
return false |
|||
} |
|||
} |
|||
return true |
|||
} |
|||
|
|||
// Make sure we have a wasm header
|
|||
if (!matchNextBytes([0, 'a', 's', 'm'])) { |
|||
console.warn('JSMpeg: WASM header not found') |
|||
return null |
|||
} |
|||
|
|||
// Make sure we have a dylink section
|
|||
next = 9 |
|||
const sectionSize = readVarUint() |
|||
if (!matchNextBytes([6, 'd', 'y', 'l', 'i', 'n', 'k'])) { |
|||
console.warn('JSMpeg: No dylink section found in WASM') |
|||
return null |
|||
} |
|||
|
|||
return { |
|||
memorySize: readVarUint(), |
|||
memoryAlignment: readVarUint(), |
|||
tableSize: readVarUint(), |
|||
tableAlignment: readVarUint() |
|||
} |
|||
} |
|||
|
|||
static IsSupported() { |
|||
return !!window.WebAssembly |
|||
} |
|||
|
|||
static GetModule() { |
|||
WASM.CACHED_MODULE = WASM.CACHED_MODULE || new WASM() |
|||
return WASM.CACHED_MODULE |
|||
} |
|||
} |
@ -0,0 +1,94 @@ |
|||
import WSSource from '../modules/source/websocket' |
|||
|
|||
export interface JSMpeg { |
|||
Player(url, options: PlayerOptions): JSMpegPlayer |
|||
} |
|||
|
|||
export interface PlayerOptions { |
|||
/** 容器元素或选择器字符串 */ |
|||
contianer: HTMLElement | string |
|||
/** 用于视频渲染的HTML画布元素。如果没有给出,渲染器将创建自己的Canvas元素。 */ |
|||
canvas?: HTMLCanvasElement |
|||
/** 是否循环播放视频(仅静态文件)。默认true */ |
|||
autoplay?: boolean |
|||
/** 是否解码音频。默认true */ |
|||
audio?: boolean |
|||
/** 是否解码视频。默认true */ |
|||
video?: boolean |
|||
/** 一个图像的URL,用来在视频播放之前作为海报显示。 */ |
|||
poster?: string |
|||
/** 是否禁用后台播放,当TAB处于非活动状态时是否暂停播放。注意,浏览器通常会在非活动标签中限制JS。默认true */ |
|||
pauseWhenHidden?: boolean |
|||
/** 是否禁用WebGL,始终使用Canvas2D渲染器。默认.false */ |
|||
disableGl?: boolean |
|||
/** 是否禁用WebAssembly并始终使用JavaScript解码器。默认false */ |
|||
disableWebAssembly?: boolean |
|||
/** WebGL上下文是否创建-必要的“截图”通过。默认false */ |
|||
preserveDrawingBuffer?: boolean |
|||
/** 是否以块的形式加载数据(仅静态文件)。当启用时,回放可以在完整加载源之前开始 */ |
|||
progressive?: boolean |
|||
/** 使用时,当不需要回放时是否推迟加载块。默认=progressive */ |
|||
throttled?: boolean |
|||
/** 使用时,以字节为单位加载的块大小。默认(1 mb)1024*1024 */ |
|||
chunkSize?: number |
|||
/** 是否解码并显示视频的第一帧。设置画布大小和使用框架作为“海报”图像很有用。这在使用或流源时没有影响。默认true */ |
|||
decodeFirstFrame?: boolean |
|||
/** 流媒体时,以秒为单位的最大排队音频长度。 */ |
|||
maxAudioLag?: number |
|||
/** 流媒体时,视频解码缓冲区的字节大小。默认的512 * 1024 (512 kb)。对于非常高的比特率,您可能需要增加此值。 */ |
|||
videoBufferSize?: string |
|||
/** 流媒体时,音频解码缓冲区的字节大小。默认的128 * 1024 (128 kb)。对于非常高的比特率,您可能需要增加此值。 */ |
|||
audioBufferSize?: string |
|||
/** 在每个解码和渲染视频帧后调用的回调 */ |
|||
onVideoDecode?: (decoder, time: number) => void |
|||
/** 在每个解码音频帧之后调用的回调函数 */ |
|||
onAudioDecode?: (decoder, time: number) => void |
|||
/** 每当播放开始时调用的回调 */ |
|||
onPlay?: (player: Player) => void |
|||
/** 当回放暂停时(例如.pause()被调用或源程序结束时)调用的回调函数。 */ |
|||
onPause?: (player: Player) => void |
|||
/** 当回放到达源端时调用的回调函数(仅在loop=false时调用) */ |
|||
onEnded?: (player: Player) => void |
|||
/** 当没有足够的数据供回放时调用的回调 */ |
|||
onStalled?: (player: Player) => void |
|||
/** 当源首次接收到数据时调用的回调 */ |
|||
onSourceEstablished?: (source: WSSource) => void |
|||
/** 当源接收到所有数据时调用的回调 */ |
|||
onSourceCompleted: (source: WSSource) => void |
|||
/** 当onSourceStreamInterrupt触发后websocket第一次接收到流时触发 */ |
|||
onSourceStreamContinue: (source: WSSource) => void |
|||
/** 当websocket超过一定时间没有收到流时触发 */ |
|||
onSourceStreamInterrupt: (source: WSSource) => void |
|||
/** 当websocket连接上服务端时触发 */ |
|||
onSourceConnected: (source: WSSource) => void |
|||
/** 当websocket关闭后触发 */ |
|||
onSourceClosed: (source: WSSource) => void |
|||
/** 当获取到分辨率时触发 */ |
|||
onResolutionDecode: (width: number, height: number) => void |
|||
} |
|||
|
|||
export interface JSMpegPlayer { |
|||
/** 只读,是否暂停播放 */ |
|||
readonly paused: boolean |
|||
/** 获取或设置音频音量(0-1) */ |
|||
volume: number |
|||
/** 以秒为单位获取或设置当前播放位置 */ |
|||
currentTime: number |
|||
startTime: number |
|||
/** 开始播放 */ |
|||
play(): void |
|||
/** 暂停播放 */ |
|||
pause(): void |
|||
/** 停止回放,搜索开始 */ |
|||
stop(): void |
|||
/** 一个视频帧的提前播放。这并不解码音频。如果没有足够的数据,则返回成功 */ |
|||
nextFrame(): void |
|||
/** 停止播放,断开源和清理WebGL和WebAudio状态。该播放器将不能再使用。 */ |
|||
destroy(): void |
|||
video |
|||
source |
|||
renderer |
|||
wasmModule |
|||
audioOut |
|||
audio |
|||
} |
@ -0,0 +1,70 @@ |
|||
export function Now() { |
|||
return window.performance |
|||
? window.performance.now() / 1000 |
|||
: Date.now() / 1000 |
|||
} |
|||
|
|||
export function Fill(array, value) { |
|||
if (array.fill) { |
|||
array.fill(value) |
|||
} else { |
|||
for (let i = 0; i < array.length; i++) { |
|||
array[i] = value |
|||
} |
|||
} |
|||
} |
|||
|
|||
export function Base64ToArrayBuffer(base64) { |
|||
const binary = window.atob(base64) |
|||
const length = binary.length |
|||
const bytes = new Uint8Array(length) |
|||
for (let i = 0; i < length; i++) { |
|||
bytes[i] = binary.charCodeAt(i) |
|||
} |
|||
return bytes.buffer |
|||
} |
|||
|
|||
/** |
|||
* |
|||
* @param {object} param |
|||
* @param {string|object|Array} param.data 数据,传入后url参数将被忽略 |
|||
* @param {string} param.url 文件下载地址 |
|||
* @param {string} param.name 文件名称 |
|||
* @param {string} param.mimeType 文件mime类型 |
|||
* @returns |
|||
*/ |
|||
export function saveToLocal( |
|||
blob, |
|||
name = 'JSMpeg_' + Date.now(), |
|||
mimeType = '' |
|||
) { |
|||
if (!blob) return |
|||
|
|||
const a = document.createElement('a') |
|||
a.style.display = 'none' |
|||
a.download = name |
|||
if (typeof blob === 'string') { |
|||
a.href = blob |
|||
} else { |
|||
blob = |
|||
blob instanceof Blob |
|||
? blob |
|||
: new Blob(blob instanceof Array ? blob : [blob], { |
|||
type: mimeType |
|||
}) |
|||
a.href = URL.createObjectURL(blob) |
|||
} |
|||
|
|||
setTimeout(() => { |
|||
a.click() |
|||
}, 0) |
|||
setTimeout(() => { |
|||
a.remove() |
|||
}, 1) |
|||
|
|||
if (blob instanceof Blob) { |
|||
setTimeout(() => { |
|||
URL.revokeObjectURL(blob) |
|||
}, 1000) |
|||
} |
|||
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue