JSMpeg.Player = (function(){ "use strict"; var Player = function(url, options) { options = options || {}; this.options = options; if (options.source) { this.source = new options.source(url, options); options.streaming = !!this.source.streaming; } else if (url.match(/^wss?:\/\//)) { this.source = new JSMpeg.Source.WebSocket(url, options); options.streaming = true; } else if (options.progressive !== false) { this.source = new JSMpeg.Source.AjaxProgressive(url, options); options.streaming = false; } else { this.source = new JSMpeg.Source.Ajax(url, options); options.streaming = false; } this.maxAudioLag = options.maxAudioLag || 0.25; this.loop = options.loop !== false; this.autoplay = !!options.autoplay || options.streaming; this.demuxer = new JSMpeg.Demuxer.TS(options); this.source.connect(this.demuxer); if (options.video !== false) { this.video = new JSMpeg.Decoder.MPEG1Video(options); this.renderer = !options.disableGl && JSMpeg.Renderer.WebGL.IsSupported() ? new JSMpeg.Renderer.WebGL(options) : new JSMpeg.Renderer.Canvas2D(options); this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.VIDEO_1, this.video); this.video.connect(this.renderer); } if (options.audio !== false && JSMpeg.AudioOutput.WebAudio.IsSupported()) { this.audio = new JSMpeg.Decoder.MP2Audio(options); this.audioOut = new JSMpeg.AudioOutput.WebAudio(options); this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.AUDIO_1, this.audio); this.audio.connect(this.audioOut); } //注册事件 var events = [ 'onended', //当媒介已到达结尾时 'oncanplay', //当文件就绪可以开始播放时(缓冲已足够开始时) 'onpreload', //预加载时,实时回调 'onplay' //每次从暂停或者加载完成开始播放时 ]; for(var i=0,eNum=events.length;i this.maxAudioLag) { this.audioOut.resetEnqueuedTime(); this.audioOut.enabled = false; } decoded = this.audio.decode(); } while (decoded); this.audioOut.enabled = true; } }; Player.prototype.updateForStaticFile = function() { var notEnoughData = false, headroom = 0; // If we have an audio track, we always try to sync the video to the audio. // Gaps and discontinuities are far more percetable in audio than in video. if (this.audio && this.audio.canPlay) { // Do we have to decode and enqueue some more audio data? while ( !notEnoughData && this.audio.decodedTime - this.audio.currentTime < 0.25 ) { notEnoughData = !this.audio.decode(); } // Sync video to audio if (this.video && this.video.currentTime < this.audio.currentTime) { notEnoughData = !this.video.decode(); } headroom = this.demuxer.currentTime - this.audio.currentTime; } else if (this.video) { // Video only - sync it to player's wallclock var targetTime = (JSMpeg.Now() - this.startTime) + this.video.startTime, lateTime = targetTime - this.video.currentTime, frameTime = 1/this.video.frameRate; if (this.video && lateTime > 0) { // If the video is too far behind (>2 frames), simply reset the // target time to the next frame instead of trying to catch up. if (lateTime > frameTime * 2) { this.startTime += lateTime; } notEnoughData = !this.video.decode(); } headroom = this.demuxer.currentTime - targetTime; } // Notify the source of the playhead headroom, so it can decide whether to // continue loading further data. this.source.resume(headroom); // If we failed to decode and the source is complete, it means we reached // the end of our data. We may want to loop. if (notEnoughData && this.source.completed) { if(this.onended){ this.onended(); } if (this.loop) { this.seek(0); } else { this.pause(); } } }; return Player; })();