diff --git a/contrib/akamai/controlbar/ControlBar.js b/contrib/akamai/controlbar/ControlBar.js index b9c09c4b9d..08353979f9 100644 --- a/contrib/akamai/controlbar/ControlBar.js +++ b/contrib/akamai/controlbar/ControlBar.js @@ -27,7 +27,7 @@ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. - */ + * /** * @module ControlBar @@ -47,6 +47,7 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { var seeking = false; var videoControllerVisibleTimeout = 0; var liveThresholdSecs = 12; + var textTrackList = {}; var video, videoContainer, videoController, @@ -66,7 +67,8 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { thumbnailElem, thumbnailTimeLabel, idSuffix, - startedPlaying; + startedPlaying, + seekbarBufferInterval; //************************************************************************************ // THUMBNAIL CONSTANTS @@ -102,21 +104,25 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { self.player.on(dashjs.MediaPlayer.events.PLAYBACK_STARTED, onPlayStart, this); self.player.on(dashjs.MediaPlayer.events.PLAYBACK_PAUSED, onPlaybackPaused, this); self.player.on(dashjs.MediaPlayer.events.PLAYBACK_TIME_UPDATED, onPlayTimeUpdate, this); - self.player.on(dashjs.MediaPlayer.events.TEXT_TRACKS_ADDED, onTracksAdded, this); - self.player.on(dashjs.MediaPlayer.events.STREAM_INITIALIZED, onStreamInitialized, this); + self.player.on(dashjs.MediaPlayer.events.STREAM_ACTIVATED, onStreamActivated, this); + self.player.on(dashjs.MediaPlayer.events.STREAM_DEACTIVATED, onStreamDeactivated, this); self.player.on(dashjs.MediaPlayer.events.STREAM_TEARDOWN_COMPLETE, onStreamTeardownComplete, this); self.player.on(dashjs.MediaPlayer.events.SOURCE_INITIALIZED, onSourceInitialized, this); - } + self.player.on(dashjs.MediaPlayer.events.TEXT_TRACKS_ADDED, onTracksAdded, this); + self.player.on(dashjs.MediaPlayer.events.BUFFER_LEVEL_UPDATED, _onBufferLevelUpdated, this); + }; var removePlayerEventsListeners = function () { self.player.off(dashjs.MediaPlayer.events.PLAYBACK_STARTED, onPlayStart, this); self.player.off(dashjs.MediaPlayer.events.PLAYBACK_PAUSED, onPlaybackPaused, this); self.player.off(dashjs.MediaPlayer.events.PLAYBACK_TIME_UPDATED, onPlayTimeUpdate, this); - self.player.off(dashjs.MediaPlayer.events.TEXT_TRACKS_ADDED, onTracksAdded, this); - self.player.off(dashjs.MediaPlayer.events.STREAM_INITIALIZED, onStreamInitialized, this); + self.player.off(dashjs.MediaPlayer.events.STREAM_ACTIVATED, onStreamActivated, this); + self.player.off(dashjs.MediaPlayer.events.STREAM_DEACTIVATED, onStreamDeactivated, this); self.player.off(dashjs.MediaPlayer.events.STREAM_TEARDOWN_COMPLETE, onStreamTeardownComplete, this); self.player.off(dashjs.MediaPlayer.events.SOURCE_INITIALIZED, onSourceInitialized, this); - } + self.player.off(dashjs.MediaPlayer.events.TEXT_TRACKS_ADDED, onTracksAdded, this); + self.player.off(dashjs.MediaPlayer.events.BUFFER_LEVEL_UPDATED, _onBufferLevelUpdated, this); + }; var getControlId = function (id) { return id + (idSuffix ? idSuffix : ''); @@ -171,25 +177,21 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { setTime(displayUTCTimeCodes ? self.player.timeAsUTC() : self.player.time()); updateDuration(); togglePlayPauseBtnState(); + if (seekbarBufferInterval) { + clearInterval(seekbarBufferInterval); + } }; var onPlayTimeUpdate = function (/*e*/) { updateDuration(); if (!seeking) { - setTime(displayUTCTimeCodes ? self.player.timeAsUTC() : self.player.time()); + setTime(displayUTCTimeCodes ? player.timeAsUTC() : player.time()); if (seekbarPlay) { - if (self.player.isDynamic() && (self.player.duration() - self.player.time() < liveThresholdSecs)) { - seekbarPlay.style.width = '100%'; - } else { - seekbarPlay.style.width = (self.player.time() / self.player.duration() * 100) + '%'; - } - } - if (seekbarBuffer) { - seekbarBuffer.style.width = ((self.player.time() + getBufferLevel()) / self.player.duration() * 100) + '%'; + seekbarPlay.style.width = (player.time() / player.duration() * 100) + '%'; } if (seekbar.getAttribute('type') === 'range') { - seekbar.value = self.player.time(); + seekbar.value = player.time(); } } @@ -353,19 +355,6 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { thumbnailContainer.style.display = 'none'; }; - var getScrollOffset = function () { - if (window.pageXOffset) { - return { - x: window.pageXOffset, - y: window.pageYOffset - }; - } - return { - x: document.documentElement.scrollLeft, - y: document.documentElement.scrollTop - }; - }; - var seekLive = function () { self.player.seek(self.player.duration()); }; @@ -471,7 +460,7 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { if (document.fullscreenElement) { document.exitFullscreen(); } else if (document.exitFullscreen) { - document.exitFullscreen(); + document.exitFullscreen(); } else if (document.mozCancelFullScreen) { document.mozCancelFullScreen(); } else if (document.msExitFullscreen) { @@ -503,48 +492,42 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { // Audio Video MENU //************************************************************************************ - var onTracksAdded = function (e) { - // Subtitles/Captions Menu //XXX we need to add two layers for captions & subtitles if present. - if (!captionMenu) { - var contentFunc = function (element, index) { - if (isNaN(index)) { - return 'OFF'; - } - - var label = getLabelForLocale(element.labels); - if (label) { - return label + ' : ' + element.kind; - } - - return element.lang + ' : ' + element.kind; - }; - captionMenu = createMenu({ menuType: 'caption', arr: e.tracks }, contentFunc); + var onSourceInitialized = function () { + startedPlaying = false; + }; - var func = function () { - onMenuClick(captionMenu, captionBtn); - } - menuHandlersList.push(func); - captionBtn.addEventListener('click', func); - captionBtn.classList.remove('hide'); - } else if (e.index !== undefined) { - setMenuItemsState(e.index + 1, 'caption-list'); + var onStreamDeactivated = function (e) { + if (e.streamInfo && textTrackList[e.streamInfo.id]) { + delete textTrackList[e.streamInfo.id]; } }; - var onSourceInitialized = function () { - startedPlaying = false; - }; + var onStreamActivated = function (e) { + var streamInfo = e.streamInfo; - var onStreamInitialized = function (/*e*/) { updateDuration(); - var contentFunc; + //Bitrate Menu + createBitrateSwitchMenu(); + + //Track Switch Menu + createTrackSwitchMenu(); + + //Text Switch Menu + createCaptionSwitchMenu(streamInfo); + }; + + var createBitrateSwitchMenu = function () { + var contentFunc; + if (bitrateListBtn) { - destroyBitrateMenu(); + destroyMenu(bitrateListMenu, bitrateListBtn); + bitrateListMenu = null; var availableBitrates = { menuType: 'bitrate' }; availableBitrates.audio = self.player.getBitrateInfoListFor && self.player.getBitrateInfoListFor('audio') || []; availableBitrates.video = self.player.getBitrateInfoListFor && self.player.getBitrateInfoListFor('video') || []; availableBitrates.images = self.player.getBitrateInfoListFor && self.player.getBitrateInfoListFor('image') || []; + if (availableBitrates.audio.length > 1 || availableBitrates.video.length > 1 || availableBitrates.images.length > 1) { contentFunc = function (element, index) { var result = isNaN(index) ? ' Auto Switch' : Math.floor(element.bitrate / 1000) + ' kbps'; @@ -564,8 +547,16 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { bitrateListBtn.classList.add('hide'); } } - //Track Switch Menu - if (!trackSwitchMenu && trackSwitchBtn) { + }; + + var createTrackSwitchMenu = function () { + var contentFunc; + + if (trackSwitchBtn) { + + destroyMenu(trackSwitchMenu, trackSwitchBtn); + trackSwitchMenu = null; + var availableTracks = { menuType: 'track' }; availableTracks.audio = self.player.getTracksFor('audio'); availableTracks.video = self.player.getTracksFor('video'); // these return empty arrays so no need to check for null @@ -585,6 +576,57 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { } }; + var createCaptionSwitchMenu = function (streamId) { + // Subtitles/Captions Menu //XXX we need to add two layers for captions & subtitles if present. + var activeStreamInfo = player.getActiveStream().getStreamInfo(); + + if (captionBtn && (!activeStreamInfo.id || activeStreamInfo.id === streamId)) { + + destroyMenu(captionMenu, captionBtn); + captionMenu = null; + + var tracks = textTrackList[streamId] || []; + var contentFunc = function (element, index) { + if (isNaN(index)) { + return 'OFF'; + } + + var label = getLabelForLocale(element.labels); + if (label) { + return label + ' : ' + element.type; + } + + return element.lang + ' : ' + element.kind; + }; + captionMenu = createMenu({ menuType: 'caption', arr: tracks }, contentFunc); + + var func = function () { + onMenuClick(captionMenu, captionBtn); + }; + + menuHandlersList.push(func); + captionBtn.addEventListener('click', func); + captionBtn.classList.remove('hide'); + } + + }; + + var onTracksAdded = function (e) { + // Subtitles/Captions Menu //XXX we need to add two layers for captions & subtitles if present. + if (!textTrackList[e.streamId]) { + textTrackList[e.streamId] = []; + } + + textTrackList[e.streamId] = textTrackList[e.streamId].concat(e.tracks); + createCaptionSwitchMenu(e.streamId); + }; + + var _onBufferLevelUpdated = function () { + if (seekbarBuffer) { + seekbarBuffer.style.width = ((player.time() + getBufferLevel()) / player.duration() * 100) + '%'; + } + }; + var onStreamTeardownComplete = function (/*e*/) { setPlayBtn(); timeDisplay.textContent = '00:00'; @@ -761,52 +803,57 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { }; var setMenuItemsState = function (value, type) { - var item = typeof value === 'number' ? document.getElementById(type + 'Item_' + value) : this; - var nodes = item.parentElement.children; - - for (var i = 0; i < nodes.length; i++) { - nodes[i].selected = false; - nodes[i].classList.remove('menu-item-selected'); - nodes[i].classList.add('menu-item-unselected'); - } - item.selected = true; - item.classList.remove('menu-item-over'); - item.classList.remove('menu-item-unselected'); - item.classList.add('menu-item-selected'); - - if (type === undefined) { // User clicked so type is part of item binding. - switch (item.name) { - case 'video-bitrate-list': - case 'audio-bitrate-list': - var cfg = { - 'streaming': { - 'abr': { - 'autoSwitchBitrate': { + try { + var item = typeof value === 'number' ? document.getElementById(type + 'Item_' + value) : this; + if (item) { + var nodes = item.parentElement.children; + + for (var i = 0; i < nodes.length; i++) { + nodes[i].selected = false; + nodes[i].classList.remove('menu-item-selected'); + nodes[i].classList.add('menu-item-unselected'); + } + item.selected = true; + item.classList.remove('menu-item-over'); + item.classList.remove('menu-item-unselected'); + item.classList.add('menu-item-selected'); + + if (type === undefined) { // User clicked so type is part of item binding. + switch (item.name) { + case 'video-bitrate-list': + case 'audio-bitrate-list': + var cfg = { + 'streaming': { + 'abr': { + 'autoSwitchBitrate': {} + } } + }; + + if (item.index > 0) { + cfg.streaming.abr.autoSwitchBitrate[item.mediaType] = false; + self.player.updateSettings(cfg); + self.player.setQualityFor(item.mediaType, item.index - 1); + } else { + cfg.streaming.abr.autoSwitchBitrate[item.mediaType] = true; + self.player.updateSettings(cfg); } - } - }; - - if (item.index > 0) { - cfg.streaming.abr.autoSwitchBitrate[item.mediaType] = false; - self.player.updateSettings(cfg); - self.player.setQualityFor(item.mediaType, item.index - 1); - } else { - cfg.streaming.abr.autoSwitchBitrate[item.mediaType] = true; - self.player.updateSettings(cfg); + break; + case 'image-bitrate-list': + player.setQualityFor(self.mediaType, self.index); + break; + case 'caption-list': + self.player.setTextTrack(item.index - 1); + break; + case 'video-track-list': + case 'audio-track-list': + self.player.setCurrentTrack(self.player.getTracksFor(item.mediaType)[item.index]); + break; } - break; - case 'image-bitrate-list': - player.setQualityFor(self.mediaType, self.index); - break; - case 'caption-list': - self.player.setTextTrack(item.index - 1); - break; - case 'video-track-list': - case 'audio-track-list': - self.player.setCurrentTrack(self.player.getTracksFor(item.mediaType)[item.index]); - break; + } } + } catch (e) { + console.error(e); } }; @@ -833,13 +880,26 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { menu.style.top = menu_y + 'px'; }; - var destroyBitrateMenu = function () { - if (bitrateListMenu) { - menuHandlersList.forEach(function (item) { - bitrateListBtn.removeEventListener('click', item); - }); - videoController.removeChild(bitrateListMenu); - bitrateListMenu = null; + var destroyMenu = function (menu, btn) { + try { + if (menu && videoController) { + menuHandlersList.forEach(function (item) { + btn.removeEventListener('click', item); + }); + videoController.removeChild(menu); + } + } catch (e) { + } + }; + + var removeMenu = function (menu, btn) { + try { + if (menu) { + videoController.removeChild(menu); + menu = null; + btn.classList.add('hide'); + } + } catch (e) { } }; @@ -875,7 +935,6 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { return !!navigator.userAgent.match(/Trident.*rv[ :]*11\./); }; - //************************************************************************************ // PUBLIC API //************************************************************************************ @@ -885,6 +944,7 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { setDuration: setDuration, setTime: setTime, setPlayer: setPlayer, + removeMenu: removeMenu, initialize: function (suffix) { @@ -945,23 +1005,25 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { videoController.classList.remove('disable'); }, - reset: function () { - window.removeEventListener('resize', handleMenuPositionOnResize); - destroyBitrateMenu(); + resetSelectionMenus: function () { + destroyMenu(); menuHandlersList.forEach(function (item) { if (trackSwitchBtn) trackSwitchBtn.removeEventListener('click', item); if (captionBtn) captionBtn.removeEventListener('click', item); }); if (captionMenu) { - videoController.removeChild(captionMenu); - captionMenu = null; - captionBtn.classList.add('hide'); + this.removeMenu(captionMenu, captionBtn); } if (trackSwitchMenu) { - videoController.removeChild(trackSwitchMenu); - trackSwitchMenu = null; - trackSwitchBtn.classList.add('hide'); + this.removeMenu(trackSwitchMenu, trackSwitchBtn); } + }, + + reset: function () { + window.removeEventListener('resize', handleMenuPositionOnResize); + + this.resetSelectionMenus(); + menuHandlersList = []; seeking = false; diff --git a/index.d.ts b/index.d.ts index e8242e648e..19eff9c216 100644 --- a/index.d.ts +++ b/index.d.ts @@ -4,9 +4,13 @@ export as namespace dashjs; declare namespace dashjs { interface Logger { debug(...params: any[]): void; + info(...params: any[]): void; + warn(...params: any[]): void; + error(...params: any[]): void; + fatal(...params: any[]): void; } @@ -21,7 +25,9 @@ declare namespace dashjs { interface Debug { getLogger(): Logger; + setLogTimestampVisible(flag: boolean): void; + setCalleeNameVisible(flag: boolean): void; } @@ -31,22 +37,36 @@ declare namespace dashjs { data: unknown | null; } - interface VideoModel { } + interface VideoModel { + } interface ProtectionController { initializeForMedia(mediaInfo: ProtectionMediaInfo): void; + createKeySession(initData: ArrayBuffer, cdmData: Uint8Array): void; + removeKeySession(session: SessionToken): void; + closeKeySession(session: SessionToken): void; + setServerCertificate(serverCertificate: ArrayBuffer): void; + setMediaElement(element: HTMLMediaElement): void; + setSessionType(type: string): void; + setRobustnessLevel(level: string): void; + setProtectionData(protDataSet: ProtectionDataSet): void; + getSupportedKeySystemsFromContentProtection(cps: any[]): SupportedKeySystem[]; + getKeySystems(): KeySystem[]; + setKeySystems(keySystems: KeySystem[]): void; + stop(): void; + reset(): void; } @@ -60,14 +80,23 @@ declare namespace dashjs { interface OfflineController { loadRecordsFromStorage(): Promise; + getAllRecords(): OfflineRecord[]; + createRecord(manifestURL: string): Promise; + startRecord(id: string, mediaInfos: MediaInfo[]): void; + stopRecord(id: string): void; + resumeRecord(id: string): void; + deleteRecord(id: string): void; + getRecordProgression(id: string): number; + resetRecords(): void; + reset(): void; } @@ -112,34 +141,43 @@ declare namespace dashjs { dispatchEvent?: boolean; }; streaming?: { - metricsMaxListDepth?: number; - abandonLoadTimeout?: number; - liveDelayFragmentCount?: number; - liveDelay?: number; - scheduleWhilePaused?: boolean; - fastSwitchEnabled?: boolean; - flushBufferAtTrackSwitch?: boolean; - reuseExistingSourceBuffers?: boolean; + metricsMaxListDepth?: number, + abandonLoadTimeout?: number, calcSegmentAvailabilityRangeFromTimeline?: boolean, - bufferPruningInterval?: number; - bufferToKeep?: number; - jumpGaps?: boolean; - jumpLargeGaps?: boolean; - smallGapLimit?: number; - stableBufferTime?: number; - bufferTimeAtTopQuality?: number; - bufferTimeAtTopQualityLongForm?: number; - longFormContentDurationThreshold?: number; - wallclockTimeUpdateInterval?: number; - lowLatencyEnabled?: boolean; - keepProtectionMediaKeys?: boolean; - useManifestDateHeaderTimeSource?: boolean; - useSuggestedPresentationDelay?: boolean; - useAppendWindow?: boolean, - manifestUpdateRetryInterval?: number; - stallThreshold?: number; - filterUnsupportedEssentialProperties?: boolean; - eventControllerRefreshDelay?: number; + wallclockTimeUpdateInterval?: number, + lowLatencyEnabled?: boolean, + useManifestDateHeaderTimeSource?: boolean, + manifestUpdateRetryInterval?: number, + filterUnsupportedEssentialProperties?: boolean, + cacheInitSegments?: boolean, + eventControllerRefreshDelay?: number, + delay?: { + liveDelayFragmentCount?: number, + liveDelay?: number, + useSuggestedPresentationDelay?: boolean + }, + buffer?: { + fastSwitchEnabled?: boolean, + flushBufferAtTrackSwitch?: boolean, + reuseExistingSourceBuffers?: boolean, + bufferPruningInterval?: number, + bufferToKeep?: number, + bufferTimeAtTopQuality?: number, + bufferTimeAtTopQualityLongForm?: number, + initialBufferLevel?: number, + stableBufferTime?: number, + longFormContentDurationThreshold?: number, + stallThreshold?: number, + useAppendWindow?: boolean, + }, + protection?: { + keepProtectionMediaKeys?: boolean, + } + gaps?: { + jumpGaps?: boolean, + jumpLargeGaps?: boolean, + smallGapLimit?: number, + }, utcSynchronization?: { backgroundAttempts?: number, timeBetweenSyncAttempts?: number, @@ -153,6 +191,14 @@ declare namespace dashjs { value?: string } }, + scheduling?: { + defaultTimeout?: number, + lowLatencyTimeout?: number, + scheduleWhilePaused?: boolean + }, + text?: { + defaultEnabled?: boolean + }, liveCatchup?: { minDrift?: number; maxDrift?: number; @@ -181,26 +227,26 @@ declare namespace dashjs { selectionModeForInitialTrack?: TrackSelectionMode fragmentRequestTimeout?: number; retryIntervals?: { - 'MPD'?: number; - 'XLinkExpansion'?: number; - 'MediaSegment'?: number; - 'InitializationSegment'?: number; + 'MPD'?: number; + 'XLinkExpansion'?: number; + 'MediaSegment'?: number; + 'InitializationSegment'?: number; 'BitstreamSwitchingSegment'?: number; - 'IndexSegment'?: number; - 'FragmentInfoSegment'?: number; - 'other'?: number; - 'lowLatencyReductionFactor'?: number; + 'IndexSegment'?: number; + 'FragmentInfoSegment'?: number; + 'other'?: number; + 'lowLatencyReductionFactor'?: number; }; retryAttempts?: { - 'MPD'?: number; - 'XLinkExpansion'?: number; - 'MediaSegment'?: number; - 'InitializationSegment'?: number; + 'MPD'?: number; + 'XLinkExpansion'?: number; + 'MediaSegment'?: number; + 'InitializationSegment'?: number; 'BitstreamSwitchingSegment'?: number; - 'IndexSegment'?: number; - 'FragmentInfoSegment'?: number; - 'other'?: number; - 'lowLatencyMultiplyFactor'?: number; + 'IndexSegment'?: number; + 'FragmentInfoSegment'?: number; + 'other'?: number; + 'lowLatencyMultiplyFactor'?: number; }; abr?: { movingAverageMethod?: 'slidingWindow' | 'ewma'; @@ -263,137 +309,261 @@ declare namespace dashjs { export interface MediaPlayerClass { initialize(view?: HTMLElement, source?: string, autoPlay?: boolean): void; + on(type: AstInFutureEvent['type'], listener: (e: AstInFutureEvent) => void, scope?: object): void; + on(type: BufferEvent['type'], listener: (e: BufferEvent) => void, scope?: object): void; + on(type: CaptionRenderedEvent['type'], listener: (e: CaptionRenderedEvent) => void, scope?: object): void; + on(type: CaptionContainerResizeEvent['type'], listener: (e: CaptionContainerResizeEvent) => void, scope?: object): void; + on(type: DynamicToStaticEvent['type'], listener: (e: DynamicToStaticEvent) => void, scope?: object): void; + on(type: ErrorEvent['type'], listener: (e: ErrorEvent) => void, scope?: object): void; + on(type: FragmentLoadingCompletedEvent['type'], listener: (e: FragmentLoadingCompletedEvent) => void, scope?: object): void; + on(type: FragmentLoadingAbandonedEvent['type'], listener: (e: FragmentLoadingAbandonedEvent) => void, scope?: object): void; + on(type: KeyErrorEvent['type'], listener: (e: KeyErrorEvent) => void, scope?: object): void; + on(type: KeyMessageEvent['type'], listener: (e: KeyMessageEvent) => void, scope?: object): void; + on(type: KeySessionClosedEvent['type'], listener: (e: KeySessionClosedEvent) => void, scope?: object): void; + on(type: KeySessionEvent['type'], listener: (e: KeySessionEvent) => void, scope?: object): void; + on(type: KeyStatusesChangedEvent['type'], listener: (e: KeyStatusesChangedEvent) => void, scope?: object): void; + on(type: KeySystemSelectedEvent['type'], listener: (e: KeySystemSelectedEvent) => void, scope?: object): void; + on(type: LicenseRequestCompleteEvent['type'], listener: (e: LicenseRequestCompleteEvent) => void, scope?: object): void; + on(type: LogEvent['type'], listener: (e: LogEvent) => void, scope?: object): void; + on(type: ManifestLoadedEvent['type'], listener: (e: ManifestLoadedEvent) => void, scope?: object): void; + on(type: MetricEvent['type'], listener: (e: MetricEvent) => void, scope?: object): void; + on(type: MetricChangedEvent['type'], listener: (e: MetricChangedEvent) => void, scope?: object): void; + on(type: OfflineRecordEvent['type'], listener: (e: OfflineRecordEvent) => void, scope?: object): void; + on(type: OfflineRecordLoademetadataEvent['type'], listener: (e: OfflineRecordLoademetadataEvent) => void, scope?: object): void; + on(type: PeriodSwitchEvent['type'], listener: (e: PeriodSwitchEvent) => void, scope?: object): void; + on(type: PlaybackErrorEvent['type'], listener: (e: PlaybackErrorEvent) => void, scope?: object): void; + on(type: PlaybackPausedEvent['type'], listener: (e: PlaybackPausedEvent) => void, scope?: object): void; + on(type: PlaybackPlayingEvent['type'], listener: (e: PlaybackPlayingEvent) => void, scope?: object): void; + on(type: PlaybackRateChangedEvent['type'], listener: (e: PlaybackRateChangedEvent) => void, scope?: object): void; + on(type: PlaybackSeekingEvent['type'], listener: (e: PlaybackSeekingEvent) => void, scope?: object): void; + on(type: PlaybackStartedEvent['type'], listener: (e: PlaybackStartedEvent) => void, scope?: object): void; + on(type: PlaybackTimeUpdatedEvent['type'], listener: (e: PlaybackTimeUpdatedEvent) => void, scope?: object): void; + on(type: ProtectionCreatedEvent['type'], listener: (e: ProtectionCreatedEvent) => void, scope?: object): void; + on(type: ProtectionDestroyedEvent['type'], listener: (e: ProtectionDestroyedEvent) => void, scope?: object): void; + on(type: QualityChangeRenderedEvent['type'], listener: (e: QualityChangeRenderedEvent) => void, scope?: object): void; + on(type: QualityChangeRequestedEvent['type'], listener: (e: QualityChangeRequestedEvent) => void, scope?: object): void; + on(type: StreamInitializedEvent['type'], listener: (e: StreamInitializedEvent) => void, scope?: object): void; + on(type: TextTracksAddedEvent['type'], listener: (e: TextTracksAddedEvent) => void, scope?: object): void; + on(type: TtmlParsedEvent['type'], listener: (e: TtmlParsedEvent) => void, scope?: object): void; + on(type: TtmlToParseEvent['type'], listener: (e: TtmlToParseEvent) => void, scope?: object): void; + on(type: string, listener: (e: Event) => void, scope?: object): void; + off(type: string, listener: (e: any) => void, scope?: object): void; + extend(parentNameString: string, childInstance: object, override: boolean): void; + attachView(element: HTMLElement): void; + attachSource(urlOrManifest: string | object): void; + isReady(): boolean; + play(): void; + isPaused(): boolean; + pause(): void; + isSeeking(): boolean; + isDynamic(): boolean; + seek(value: number): void; - setPlaybackRate(value:number): void; + + setPlaybackRate(value: number): void; + getPlaybackRate(): number; + setMute(value: boolean): void; + isMuted(): boolean; + setVolume(value: number): void; + getVolume(): number; + time(streamId?: string): number; + duration(): number; + timeAsUTC(): number; + durationAsUTC(): number; + getActiveStream(): Stream | null; + getDVRWindowSize(): number; + getDVRSeekOffset(value: number): number; + convertToTimeCode(value: number): string; + formatUTC(time: number, locales: string, hour12: boolean, withDate?: boolean): string; + getVersion(): string; + getDebug(): Debug; + getBufferLength(type: MediaType): number; + getVideoModel(): VideoModel; + getTTMLRenderingDiv(): HTMLDivElement | null; + getVideoElement(): HTMLVideoElement; + getSource(): string | object; + getTopBitrateInfoFor(type: MediaType): BitrateInfo; + setAutoPlay(value: boolean): void; + getAutoPlay(): boolean; + getDashMetrics(): DashMetrics; + getDashAdapter(): DashAdapter; + getQualityFor(type: MediaType): number; + setQualityFor(type: MediaType, value: number): void; + updatePortalSize(): void; + enableText(enable: boolean): void; + setTextTrack(idx: number): void; + getTextDefaultLanguage(): string | undefined; + setTextDefaultLanguage(lang: string): void; + getTextDefaultEnabled(): boolean | undefined; + setTextDefaultEnabled(enable: boolean): void; + provideThumbnail(time: number, callback: (thumbnail: Thumbnail | null) => void): void; + getBitrateInfoListFor(type: MediaType): BitrateInfo[]; + getStreamsFromManifest(manifest: object): StreamInfo[]; + getTracksFor(type: MediaType): MediaInfo[]; + getTracksForTypeFromManifest(type: MediaType, manifest: object, streamInfo: StreamInfo): MediaInfo[]; + getCurrentTrackFor(type: MediaType): MediaInfo | null; + setInitialMediaSettingsFor(type: MediaType, value: MediaSettings): void; + getInitialMediaSettingsFor(type: MediaType): MediaSettings; + setCurrentTrack(track: MediaInfo): void; - getTrackSwitchModeFor(type: MediaType): TrackSwitchMode; - setTrackSwitchModeFor(type: MediaType, mode: TrackSwitchMode): void; - setSelectionModeForInitialTrack(mode: TrackSelectionMode): void; - getSelectionModeForInitialTrack(): TrackSelectionMode; + retrieveManifest(url: string, callback: (manifest: object | null, error: any) => void): void; + addUTCTimingSource(schemeIdUri: string, value: string): void; + removeUTCTimingSource(schemeIdUri: string, value: string): void; + clearDefaultUTCTimingSources(): void; + restoreDefaultUTCTimingSources(): void; + setXHRWithCredentialsForType(type: string, value: boolean): void; + getXHRWithCredentialsForType(type: string): boolean; + getProtectionController(): ProtectionController; + attachProtectionController(value: ProtectionController): void; + setProtectionData(value: ProtectionDataSet): void; + registerLicenseRequestFilter(filter: RequestFilter): void, + registerLicenseResponseFilter(filter: ResponseFilter): void, + unregisterLicenseRequestFilter(filter: RequestFilter): void, + unregisterLicenseResponseFilter(filter: ResponseFilter): void, + registerCustomCapabilitiesFilter(filter: CapabilitiesFilter): void, + unregisterCustomCapabilitiesFilter(filter: CapabilitiesFilter): void, + getOfflineController(): OfflineController; + enableManifestDateHeaderTimeSource(value: boolean): void; + displayCaptionsOnTop(value: boolean): void; + attachTTMLRenderingDiv(div: HTMLDivElement): void; + getCurrentTextTrackIndex(): number; + preload(): void; + reset(): void; + destroy(): void; + addABRCustomRule(type: string, rulename: string, rule: object): void; + removeABRCustomRule(rulename: string): void; + removeAllABRCustomRule(): void; + getCurrentLiveLatency(): number; + enableForcedTextStreaming(value: boolean): void; + isTextEnabled(): boolean; + getAverageThroughput(value: number): void; + getSettings(): MediaPlayerSettingClass; + updateSettings(settings: MediaPlayerSettingClass): void; + resetSettings(): void; } @@ -409,63 +579,63 @@ declare namespace dashjs { } interface MediaPlayerErrors { - MANIFEST_LOADER_PARSING_FAILURE_ERROR_CODE: 10; - MANIFEST_LOADER_LOADING_FAILURE_ERROR_CODE: 11; - XLINK_LOADER_LOADING_FAILURE_ERROR_CODE: 12; - SEGMENTS_UPDATE_FAILED_ERROR_CODE: 13; - SEGMENTS_UNAVAILABLE_ERROR_CODE: 14; - SEGMENT_BASE_LOADER_ERROR_CODE: 15; - TIME_SYNC_FAILED_ERROR_CODE: 16; - FRAGMENT_LOADER_LOADING_FAILURE_ERROR_CODE: 17; - FRAGMENT_LOADER_NULL_REQUEST_ERROR_CODE: 18; - URL_RESOLUTION_FAILED_GENERIC_ERROR_CODE: 19; - APPEND_ERROR_CODE: 20; - REMOVE_ERROR_CODE: 21; - DATA_UPDATE_FAILED_ERROR_CODE: 22; - CAPABILITY_MEDIASOURCE_ERROR_CODE: 23; - CAPABILITY_MEDIAKEYS_ERROR_CODE: 24; - DOWNLOAD_ERROR_ID_MANIFEST_CODE: 25; - DOWNLOAD_ERROR_ID_CONTENT_CODE: 27; - DOWNLOAD_ERROR_ID_INITIALIZATION_CODE: 28; - DOWNLOAD_ERROR_ID_XLINK_CODE: 29; - MANIFEST_ERROR_ID_PARSE_CODE: 31; - MANIFEST_ERROR_ID_NOSTREAMS_CODE: 32; - TIMED_TEXT_ERROR_ID_PARSE_CODE: 33; - MANIFEST_ERROR_ID_MULTIPLEXED_CODE: 34; - MEDIASOURCE_TYPE_UNSUPPORTED_CODE: 35; - MEDIA_KEYERR_CODE: 100; - MEDIA_KEYERR_UNKNOWN_CODE: 101; - MEDIA_KEYERR_CLIENT_CODE: 102; - MEDIA_KEYERR_SERVICE_CODE: 103; - MEDIA_KEYERR_OUTPUT_CODE: 104; - MEDIA_KEYERR_HARDWARECHANGE_CODE: 105; - MEDIA_KEYERR_DOMAIN_CODE: 106; - MEDIA_KEY_MESSAGE_ERROR_CODE: 107; - MEDIA_KEY_MESSAGE_NO_CHALLENGE_ERROR_CODE: 108; - SERVER_CERTIFICATE_UPDATED_ERROR_CODE: 109; - KEY_STATUS_CHANGED_EXPIRED_ERROR_CODE: 110; + MANIFEST_LOADER_PARSING_FAILURE_ERROR_CODE: 10; + MANIFEST_LOADER_LOADING_FAILURE_ERROR_CODE: 11; + XLINK_LOADER_LOADING_FAILURE_ERROR_CODE: 12; + SEGMENTS_UPDATE_FAILED_ERROR_CODE: 13; + SEGMENTS_UNAVAILABLE_ERROR_CODE: 14; + SEGMENT_BASE_LOADER_ERROR_CODE: 15; + TIME_SYNC_FAILED_ERROR_CODE: 16; + FRAGMENT_LOADER_LOADING_FAILURE_ERROR_CODE: 17; + FRAGMENT_LOADER_NULL_REQUEST_ERROR_CODE: 18; + URL_RESOLUTION_FAILED_GENERIC_ERROR_CODE: 19; + APPEND_ERROR_CODE: 20; + REMOVE_ERROR_CODE: 21; + DATA_UPDATE_FAILED_ERROR_CODE: 22; + CAPABILITY_MEDIASOURCE_ERROR_CODE: 23; + CAPABILITY_MEDIAKEYS_ERROR_CODE: 24; + DOWNLOAD_ERROR_ID_MANIFEST_CODE: 25; + DOWNLOAD_ERROR_ID_CONTENT_CODE: 27; + DOWNLOAD_ERROR_ID_INITIALIZATION_CODE: 28; + DOWNLOAD_ERROR_ID_XLINK_CODE: 29; + MANIFEST_ERROR_ID_PARSE_CODE: 31; + MANIFEST_ERROR_ID_NOSTREAMS_CODE: 32; + TIMED_TEXT_ERROR_ID_PARSE_CODE: 33; + MANIFEST_ERROR_ID_MULTIPLEXED_CODE: 34; + MEDIASOURCE_TYPE_UNSUPPORTED_CODE: 35; + MEDIA_KEYERR_CODE: 100; + MEDIA_KEYERR_UNKNOWN_CODE: 101; + MEDIA_KEYERR_CLIENT_CODE: 102; + MEDIA_KEYERR_SERVICE_CODE: 103; + MEDIA_KEYERR_OUTPUT_CODE: 104; + MEDIA_KEYERR_HARDWARECHANGE_CODE: 105; + MEDIA_KEYERR_DOMAIN_CODE: 106; + MEDIA_KEY_MESSAGE_ERROR_CODE: 107; + MEDIA_KEY_MESSAGE_NO_CHALLENGE_ERROR_CODE: 108; + SERVER_CERTIFICATE_UPDATED_ERROR_CODE: 109; + KEY_STATUS_CHANGED_EXPIRED_ERROR_CODE: 110; MEDIA_KEY_MESSAGE_NO_LICENSE_SERVER_URL_ERROR_CODE: 111; - KEY_SYSTEM_ACCESS_DENIED_ERROR_CODE: 112; - KEY_SESSION_CREATED_ERROR_CODE: 113; - MEDIA_KEY_MESSAGE_LICENSER_ERROR_CODE: 114; + KEY_SYSTEM_ACCESS_DENIED_ERROR_CODE: 112; + KEY_SESSION_CREATED_ERROR_CODE: 113; + MEDIA_KEY_MESSAGE_LICENSER_ERROR_CODE: 114; // MSS errors - MSS_NO_TFRF_CODE: 200; - MSS_UNSUPPORTED_CODEC_CODE: 201; + MSS_NO_TFRF_CODE: 200; + MSS_UNSUPPORTED_CODEC_CODE: 201; // Offline errors - OFFLINE_ERROR: 11000; - INDEXEDDB_QUOTA_EXCEED_ERROR: 11001; - INDEXEDDB_INVALID_STATE_ERROR: 11002; - INDEXEDDB_NOT_READABLE_ERROR: 11003; - INDEXEDDB_NOT_FOUND_ERROR: 11004; - INDEXEDDB_NETWORK_ERROR: 11005; - INDEXEDDB_DATA_ERROR: 11006; - INDEXEDDB_TRANSACTION_INACTIVE_ERROR: 11007; - INDEXEDDB_NOT_ALLOWED_ERROR: 11008; - INDEXEDDB_NOT_SUPPORTED_ERROR: 11009; - INDEXEDDB_VERSION_ERROR: 11010; - INDEXEDDB_TIMEOUT_ERROR: 11011; - INDEXEDDB_ABORT_ERROR: 11012; - INDEXEDDB_UNKNOWN_ERROR: 11013; + OFFLINE_ERROR: 11000; + INDEXEDDB_QUOTA_EXCEED_ERROR: 11001; + INDEXEDDB_INVALID_STATE_ERROR: 11002; + INDEXEDDB_NOT_READABLE_ERROR: 11003; + INDEXEDDB_NOT_FOUND_ERROR: 11004; + INDEXEDDB_NETWORK_ERROR: 11005; + INDEXEDDB_DATA_ERROR: 11006; + INDEXEDDB_TRANSACTION_INACTIVE_ERROR: 11007; + INDEXEDDB_NOT_ALLOWED_ERROR: 11008; + INDEXEDDB_NOT_SUPPORTED_ERROR: 11009; + INDEXEDDB_VERSION_ERROR: 11010; + INDEXEDDB_TIMEOUT_ERROR: 11011; + INDEXEDDB_ABORT_ERROR: 11012; + INDEXEDDB_UNKNOWN_ERROR: 11013; } interface MediaPlayerEvents { @@ -592,69 +762,74 @@ declare namespace dashjs { type: MediaPlayerEvents['ERROR']; error: { code: MediaPlayerErrors['MANIFEST_LOADER_PARSING_FAILURE_ERROR_CODE'] | - MediaPlayerErrors['MANIFEST_LOADER_LOADING_FAILURE_ERROR_CODE'] | - MediaPlayerErrors['XLINK_LOADER_LOADING_FAILURE_ERROR_CODE'] | - MediaPlayerErrors['SEGMENTS_UPDATE_FAILED_ERROR_CODE'] | - MediaPlayerErrors['SEGMENTS_UNAVAILABLE_ERROR_CODE'] | - MediaPlayerErrors['SEGMENT_BASE_LOADER_ERROR_CODE'] | - MediaPlayerErrors['TIME_SYNC_FAILED_ERROR_CODE'] | - MediaPlayerErrors['FRAGMENT_LOADER_LOADING_FAILURE_ERROR_CODE'] | - MediaPlayerErrors['FRAGMENT_LOADER_NULL_REQUEST_ERROR_CODE'] | - MediaPlayerErrors['URL_RESOLUTION_FAILED_GENERIC_ERROR_CODE'] | - MediaPlayerErrors['APPEND_ERROR_CODE'] | - MediaPlayerErrors['REMOVE_ERROR_CODE'] | - MediaPlayerErrors['DATA_UPDATE_FAILED_ERROR_CODE'] | - MediaPlayerErrors['CAPABILITY_MEDIASOURCE_ERROR_CODE'] | - MediaPlayerErrors['CAPABILITY_MEDIAKEYS_ERROR_CODE'] | - MediaPlayerErrors['DOWNLOAD_ERROR_ID_MANIFEST_CODE'] | - MediaPlayerErrors['DOWNLOAD_ERROR_ID_CONTENT_CODE'] | - MediaPlayerErrors['DOWNLOAD_ERROR_ID_INITIALIZATION_CODE'] | - MediaPlayerErrors['DOWNLOAD_ERROR_ID_XLINK_CODE'] | - MediaPlayerErrors['MANIFEST_ERROR_ID_PARSE_CODE'] | - MediaPlayerErrors['MANIFEST_ERROR_ID_NOSTREAMS_CODE'] | - MediaPlayerErrors['TIMED_TEXT_ERROR_ID_PARSE_CODE'] | - MediaPlayerErrors['MANIFEST_ERROR_ID_MULTIPLEXED_CODE'] | - MediaPlayerErrors['MEDIASOURCE_TYPE_UNSUPPORTED_CODE'] | - // Protection errors - MediaPlayerErrors['MEDIA_KEYERR_CODE'] | - MediaPlayerErrors['MEDIA_KEYERR_UNKNOWN_CODE'] | - MediaPlayerErrors['MEDIA_KEYERR_CLIENT_CODE'] | - MediaPlayerErrors['MEDIA_KEYERR_SERVICE_CODE'] | - MediaPlayerErrors['MEDIA_KEYERR_OUTPUT_CODE'] | - MediaPlayerErrors['MEDIA_KEYERR_HARDWARECHANGE_CODE'] | - MediaPlayerErrors['MEDIA_KEYERR_DOMAIN_CODE'] | - MediaPlayerErrors['MEDIA_KEY_MESSAGE_ERROR_CODE'] | - MediaPlayerErrors['MEDIA_KEY_MESSAGE_NO_CHALLENGE_ERROR_CODE'] | - MediaPlayerErrors['SERVER_CERTIFICATE_UPDATED_ERROR_CODE'] | - MediaPlayerErrors['KEY_STATUS_CHANGED_EXPIRED_ERROR_CODE'] | - MediaPlayerErrors['MEDIA_KEY_MESSAGE_NO_LICENSE_SERVER_URL_ERROR_CODE'] | - MediaPlayerErrors['KEY_SYSTEM_ACCESS_DENIED_ERROR_CODE'] | - MediaPlayerErrors['KEY_SESSION_CREATED_ERROR_CODE'] | - MediaPlayerErrors['MEDIA_KEY_MESSAGE_LICENSER_ERROR_CODE'] | - // Offline errors - MediaPlayerErrors['OFFLINE_ERROR'] | - MediaPlayerErrors['INDEXEDDB_QUOTA_EXCEED_ERROR'] | - MediaPlayerErrors['INDEXEDDB_INVALID_STATE_ERROR'] | - MediaPlayerErrors['INDEXEDDB_NOT_READABLE_ERROR'] | - MediaPlayerErrors['INDEXEDDB_NOT_FOUND_ERROR'] | - MediaPlayerErrors['INDEXEDDB_NETWORK_ERROR'] | - MediaPlayerErrors['INDEXEDDB_DATA_ERROR'] | - MediaPlayerErrors['INDEXEDDB_TRANSACTION_INACTIVE_ERROR'] | - MediaPlayerErrors['INDEXEDDB_NOT_ALLOWED_ERROR'] | - MediaPlayerErrors['INDEXEDDB_NOT_SUPPORTED_ERROR'] | - MediaPlayerErrors['INDEXEDDB_VERSION_ERROR'] | - MediaPlayerErrors['INDEXEDDB_TIMEOUT_ERROR'] | - MediaPlayerErrors['INDEXEDDB_ABORT_ERROR'] | - MediaPlayerErrors['INDEXEDDB_UNKNOWN_ERROR'] | - // MSS errors - MediaPlayerErrors['MSS_NO_TFRF_CODE'] | - MediaPlayerErrors['MSS_UNSUPPORTED_CODEC_CODE'], + MediaPlayerErrors['MANIFEST_LOADER_LOADING_FAILURE_ERROR_CODE'] | + MediaPlayerErrors['XLINK_LOADER_LOADING_FAILURE_ERROR_CODE'] | + MediaPlayerErrors['SEGMENTS_UPDATE_FAILED_ERROR_CODE'] | + MediaPlayerErrors['SEGMENTS_UNAVAILABLE_ERROR_CODE'] | + MediaPlayerErrors['SEGMENT_BASE_LOADER_ERROR_CODE'] | + MediaPlayerErrors['TIME_SYNC_FAILED_ERROR_CODE'] | + MediaPlayerErrors['FRAGMENT_LOADER_LOADING_FAILURE_ERROR_CODE'] | + MediaPlayerErrors['FRAGMENT_LOADER_NULL_REQUEST_ERROR_CODE'] | + MediaPlayerErrors['URL_RESOLUTION_FAILED_GENERIC_ERROR_CODE'] | + MediaPlayerErrors['APPEND_ERROR_CODE'] | + MediaPlayerErrors['REMOVE_ERROR_CODE'] | + MediaPlayerErrors['DATA_UPDATE_FAILED_ERROR_CODE'] | + MediaPlayerErrors['CAPABILITY_MEDIASOURCE_ERROR_CODE'] | + MediaPlayerErrors['CAPABILITY_MEDIAKEYS_ERROR_CODE'] | + MediaPlayerErrors['DOWNLOAD_ERROR_ID_MANIFEST_CODE'] | + MediaPlayerErrors['DOWNLOAD_ERROR_ID_CONTENT_CODE'] | + MediaPlayerErrors['DOWNLOAD_ERROR_ID_INITIALIZATION_CODE'] | + MediaPlayerErrors['DOWNLOAD_ERROR_ID_XLINK_CODE'] | + MediaPlayerErrors['MANIFEST_ERROR_ID_PARSE_CODE'] | + MediaPlayerErrors['MANIFEST_ERROR_ID_NOSTREAMS_CODE'] | + MediaPlayerErrors['TIMED_TEXT_ERROR_ID_PARSE_CODE'] | + MediaPlayerErrors['MANIFEST_ERROR_ID_MULTIPLEXED_CODE'] | + MediaPlayerErrors['MEDIASOURCE_TYPE_UNSUPPORTED_CODE'] | + // Protection errors + MediaPlayerErrors['MEDIA_KEYERR_CODE'] | + MediaPlayerErrors['MEDIA_KEYERR_UNKNOWN_CODE'] | + MediaPlayerErrors['MEDIA_KEYERR_CLIENT_CODE'] | + MediaPlayerErrors['MEDIA_KEYERR_SERVICE_CODE'] | + MediaPlayerErrors['MEDIA_KEYERR_OUTPUT_CODE'] | + MediaPlayerErrors['MEDIA_KEYERR_HARDWARECHANGE_CODE'] | + MediaPlayerErrors['MEDIA_KEYERR_DOMAIN_CODE'] | + MediaPlayerErrors['MEDIA_KEY_MESSAGE_ERROR_CODE'] | + MediaPlayerErrors['MEDIA_KEY_MESSAGE_NO_CHALLENGE_ERROR_CODE'] | + MediaPlayerErrors['SERVER_CERTIFICATE_UPDATED_ERROR_CODE'] | + MediaPlayerErrors['KEY_STATUS_CHANGED_EXPIRED_ERROR_CODE'] | + MediaPlayerErrors['MEDIA_KEY_MESSAGE_NO_LICENSE_SERVER_URL_ERROR_CODE'] | + MediaPlayerErrors['KEY_SYSTEM_ACCESS_DENIED_ERROR_CODE'] | + MediaPlayerErrors['KEY_SESSION_CREATED_ERROR_CODE'] | + MediaPlayerErrors['MEDIA_KEY_MESSAGE_LICENSER_ERROR_CODE'] | + // Offline errors + MediaPlayerErrors['OFFLINE_ERROR'] | + MediaPlayerErrors['INDEXEDDB_QUOTA_EXCEED_ERROR'] | + MediaPlayerErrors['INDEXEDDB_INVALID_STATE_ERROR'] | + MediaPlayerErrors['INDEXEDDB_NOT_READABLE_ERROR'] | + MediaPlayerErrors['INDEXEDDB_NOT_FOUND_ERROR'] | + MediaPlayerErrors['INDEXEDDB_NETWORK_ERROR'] | + MediaPlayerErrors['INDEXEDDB_DATA_ERROR'] | + MediaPlayerErrors['INDEXEDDB_TRANSACTION_INACTIVE_ERROR'] | + MediaPlayerErrors['INDEXEDDB_NOT_ALLOWED_ERROR'] | + MediaPlayerErrors['INDEXEDDB_NOT_SUPPORTED_ERROR'] | + MediaPlayerErrors['INDEXEDDB_VERSION_ERROR'] | + MediaPlayerErrors['INDEXEDDB_TIMEOUT_ERROR'] | + MediaPlayerErrors['INDEXEDDB_ABORT_ERROR'] | + MediaPlayerErrors['INDEXEDDB_UNKNOWN_ERROR'] | + // MSS errors + MediaPlayerErrors['MSS_NO_TFRF_CODE'] | + MediaPlayerErrors['MSS_UNSUPPORTED_CODEC_CODE'], message: string, data: object, } } - export type ErrorEvent = GenericErrorEvent | DownloadErrorEvent | ManifestErrorEvent | TimedTextErrorEvent | MediaPlayerErrorEvent; + export type ErrorEvent = + GenericErrorEvent + | DownloadErrorEvent + | ManifestErrorEvent + | TimedTextErrorEvent + | MediaPlayerErrorEvent; export interface CaptionRenderedEvent extends Event { type: MediaPlayerEvents['CAPTION_RENDERED']; @@ -668,7 +843,8 @@ declare namespace dashjs { export interface DynamicToStaticEvent extends Event { type: MediaPlayerEvents['DYNAMIC_TO_STATIC']; - } + } + export interface FragmentLoadingCompletedEvent extends Event { type: MediaPlayerEvents['FRAGMENT_LOADING_COMPLETED']; request: FragmentRequest; @@ -685,6 +861,7 @@ declare namespace dashjs { export class KeyError { constructor(sessionToken: SessionToken, errorString: string); + sessionToken: SessionToken; error: string; } @@ -696,6 +873,7 @@ declare namespace dashjs { export class KeyMessage { constructor(sessionToken: SessionToken, message: ArrayBuffer, defaultURL: string, messageType?: string); + sessionToken: SessionToken; message: ArrayBuffer; defaultURL: string; @@ -822,7 +1000,6 @@ declare namespace dashjs { export interface ProtectionCreatedEvent extends Event { type: MediaPlayerEvents['PROTECTION_CREATED']; controller: object; - manifest: object; } export interface ProtectionDestroyedEvent extends Event { @@ -924,22 +1101,35 @@ declare namespace dashjs { export interface SessionToken { session: MediaKeySession; initData: any; + getSessionID(): string; + getExpirationTime(): number; + getKeyStatuses(): MediaKeyStatusMap; + getSessionType(): string; } export interface Stream { initialize(streamInfo: StreamInfo, protectionController: ProtectionController): void; + activate(MediaSource: MediaSource): void; + deactivate(): void; + getDuration(): number; + getStartTime(): number; + getId(): string; + getStreamInfo(): StreamInfo | null; + getBitrateListFor(type: MediaType): BitrateInfo[]; + updateData(updatedStreamInfo: StreamInfo): void; + reset(): void; } @@ -975,20 +1165,31 @@ declare namespace dashjs { export interface DashMetrics { getCurrentRepresentationSwitch(type: MediaType): ICurrentRepresentationSwitch; + getCurrentBufferState(type: MediaType): IBufferState; + getCurrentBufferLevel(type: MediaType): number; + getCurrentHttpRequest(type: MediaType): object; + getHttpRequests(type: MediaType): object[]; + getCurrentDroppedFrames(): IDroppedFrames; + getCurrentSchedulingInfo(type: MediaType): object; + getCurrentDVRInfo(type: MediaType): IDVRInfo[]; + getCurrentManifestUpdate(): any; + getLatestFragmentRequestHeaderValueByID(id: string): string; + getLatestMPDRequestHeaderValueByID(type: MediaType, id: string): string; } export interface DashAdapter { getBandwidthForRepresentation(representationId: string, periodIdx: number): number; + getIndexForRepresentation(representationId: string, periodIdx: number): number; /** @@ -1032,11 +1233,17 @@ declare namespace dashjs { systemString: string; uuid: string; schemeIdURI: string; + getInitData(cp: object): ArrayBuffer; + getRequestHeadersFromMessage(message: ArrayBuffer): object | null; + getLicenseRequestFromMessage(message: ArrayBuffer): Uint8Array; + getLicenseServerURLFromInitData(initData: ArrayBuffer): string | null; + getCDMData(): ArrayBuffer | null; + getSessionId(): string | null; } @@ -1159,6 +1366,7 @@ declare namespace dashjs { export type MetricType = 'ManifestUpdate' | 'RequestsQueue'; export type TrackSwitchMode = 'alwaysReplace' | 'neverReplace'; export type TrackSelectionMode = 'highestBitrate' | 'firstTrack' | 'highestEfficiency' | 'widestRange'; + export function supportsMediaSource(): boolean; } diff --git a/package-lock.json b/package-lock.json index 8365c9975c..202ad64d25 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "dashjs", - "version": "3.2.2", + "version": "4.0.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 0a4bafd236..0346dca7c3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dashjs", - "version": "3.2.2", + "version": "4.0.0", "description": "A reference client implementation for the playback of MPEG DASH via Javascript and compliant browsers.", "author": "Dash Industry Forum", "license": "BSD-3-Clause", diff --git a/samples/advanced/settings.html b/samples/advanced/settings.html index 49b2fa0614..12a52c2df3 100644 --- a/samples/advanced/settings.html +++ b/samples/advanced/settings.html @@ -11,21 +11,22 @@ diff --git a/samples/captioning/caption_vtt.html b/samples/captioning/caption_vtt.html index a7539868d9..5744478985 100644 --- a/samples/captioning/caption_vtt.html +++ b/samples/captioning/caption_vtt.html @@ -19,7 +19,6 @@ player = dashjs.MediaPlayer({}).create(); player.initialize(video, url, true); - player.setTextDefaultEnabled(true); } diff --git a/samples/captioning/multi-track-captions.html b/samples/captioning/multi-track-captions.html index b5bbb9cdd0..b537ad0fca 100644 --- a/samples/captioning/multi-track-captions.html +++ b/samples/captioning/multi-track-captions.html @@ -51,7 +51,7 @@ function setTextDefaultEnabled() { var checkbox = document.getElementById("textDefaultEnabled"); - player.setTextDefaultEnabled(checkbox.checked); + player.updateSettings({streaming: {text: {defaultEnabled: checkbox.checked}}}); } function setLang() { diff --git a/samples/captioning/ttml-ebutt-sample.html b/samples/captioning/ttml-ebutt-sample.html index 663a3b4deb..7391ec0f23 100644 --- a/samples/captioning/ttml-ebutt-sample.html +++ b/samples/captioning/ttml-ebutt-sample.html @@ -32,7 +32,6 @@ player = dashjs.MediaPlayer().create(); player.initialize(videoElement, url, true); - player.setTextDefaultEnabled(true); player.attachTTMLRenderingDiv(TTMLRenderingDiv); controlbar = new ControlBar(player); // Checkout ControlBar.js for more info on how to target/add text tracks to UI controlbar.initialize(); diff --git a/samples/dash-if-reference-player/app/main.js b/samples/dash-if-reference-player/app/main.js index 12d4d3d4da..962bdd7e61 100644 --- a/samples/dash-if-reference-player/app/main.js +++ b/samples/dash-if-reference-player/app/main.js @@ -75,6 +75,8 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.contributors = data.items; }); + + /* ======= Chart related stuff ======= */ $scope.chartOptions = { legend: { labelBoxBorderColor: '#ffffff', @@ -126,13 +128,11 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' }, yaxes: [] }; - $scope.chartEnabled = true; $scope.maxPointsToChart = 30; $scope.maxChartableItems = 5; $scope.chartCount = 0; $scope.chartData = []; - $scope.chartState = { audio: { buffer: { data: [], selected: false, color: '#65080c', label: 'Audio Buffer Level' }, @@ -158,6 +158,7 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' } }; + /* ======= General ======= */ $scope.abrEnabled = true; $scope.toggleCCBubble = false; $scope.debugEnabled = false; @@ -188,6 +189,7 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.videoBitrate = 0; $scope.videoIndex = 0; $scope.videoPendingIndex = 0; + $scope.videoPendingMaxIndex = 0; $scope.videoMaxIndex = 0; $scope.videoBufferLength = 0; $scope.videoDroppedFrames = 0; @@ -202,6 +204,7 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.audioBitrate = 0; $scope.audioIndex = 0; $scope.audioPendingIndex = ''; + $scope.audioPendingMaxIndex = ''; $scope.audioMaxIndex = 0; $scope.audioBufferLength = 0; $scope.audioDroppedFrames = 0; @@ -269,7 +272,9 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' 'logLevel': dashjs.Debug.LOG_LEVEL_INFO }, 'streaming': { - 'fastSwitchEnabled': $scope.fastSwitchSelected, + 'buffer': { + 'fastSwitchEnabled': $scope.fastSwitchSelected, + }, 'jumpGaps': true, 'abr': { 'autoSwitchBitrate': { @@ -283,8 +288,8 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' } }; - reqConfig.open("GET", "dashjs_config.json", true); - reqConfig.setRequestHeader("Content-type", "application/json"); + reqConfig.open('GET', 'dashjs_config.json', true); + reqConfig.setRequestHeader('Content-type', 'application/json'); reqConfig.send(); $scope.player.on(dashjs.MediaPlayer.events.ERROR, function (e) { /* jshint ignore:line */ @@ -326,17 +331,14 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' break; } }); - $("#errorModal").modal('show'); + $('#errorModal').modal('show'); } }, $scope); $scope.player.initialize($scope.video, null, $scope.autoPlaySelected); + $scope.player.attachTTMLRenderingDiv($('#video-caption')[0]); - // Add HTML-rendered TTML subtitles except for Firefox < v49 (issue #1164) - if (doesTimeMarchesOn()) { - $scope.player.attachTTMLRenderingDiv($('#video-caption')[0]); - } var currentConfig = $scope.player.getSettings(); @@ -365,23 +367,28 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.isDynamic = e.data.type === 'dynamic'; }, $scope); - $scope.player.on(dashjs.MediaPlayer.events.QUALITY_CHANGE_REQUESTED, function (e) { /* jshint ignore:line */ - $scope[e.mediaType + 'Index'] = e.oldQuality + 1; + $scope.player.on(dashjs.MediaPlayer.events.SETTING_PLAYBACK_QUALITY, function (e) { /* jshint ignore:line */ + var dashAdapter = $scope.player.getDashAdapter(); + var maxIndex = dashAdapter.getMaxIndexForBufferType(e.mediaType, e.streamInfo.index); + var bitrate = Math.round(e.bitrateInfo.bitrate / 1000); + $scope[e.mediaType + 'PendingIndex'] = e.newQuality + 1; + $scope[e.mediaType + 'PendingMaxIndex'] = maxIndex; + $scope[e.mediaType + 'Bitrate'] = bitrate; $scope.plotPoint('pendingIndex', e.mediaType, e.newQuality + 1, getTimeForPlot()); $scope.safeApply(); }, $scope); + $scope.player.on(dashjs.MediaPlayer.events.PERIOD_SWITCH_COMPLETED, function (e) { /* jshint ignore:line */ + $scope.currentStreamInfo = e.toStreamInfo; + }, $scope); + $scope.player.on(dashjs.MediaPlayer.events.QUALITY_CHANGE_RENDERED, function (e) { /* jshint ignore:line */ $scope[e.mediaType + 'Index'] = e.newQuality + 1; $scope.plotPoint('index', e.mediaType, e.newQuality + 1, getTimeForPlot()); $scope.safeApply(); }, $scope); - $scope.player.on(dashjs.MediaPlayer.events.PERIOD_SWITCH_COMPLETED, function (e) { /* jshint ignore:line */ - $scope.streamInfo = e.toStreamInfo; - }, $scope); - $scope.player.on(dashjs.MediaPlayer.events.STREAM_INITIALIZED, function (e) { /* jshint ignore:line */ stopMetricsInterval(); @@ -422,7 +429,7 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' return violation.event.key === e.event.key; }) - if(!existingViolation || existingViolation.length === 0) { + if (!existingViolation || existingViolation.length === 0) { $scope.conformanceViolations.push(e); } } @@ -467,7 +474,9 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.changeABRStrategy = function (strategy) { $scope.player.updateSettings({ streaming: { - stallThreshold: 0.5, + buffer: { + stallThreshold: 0.5 + }, abr: { ABRStrategy: strategy } @@ -477,7 +486,9 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' if (strategy === 'abrLoLP') { $scope.player.updateSettings({ streaming: { - stallThreshold: 0.05 + buffer: { + stallThreshold: 0.05 + } } }); $scope.changeFetchThroughputCalculation('abrFetchThroughputCalculationMoofParsing'); @@ -509,7 +520,9 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.toggleFastSwitch = function () { $scope.player.updateSettings({ 'streaming': { - 'fastSwitchEnabled': $scope.fastSwitchSelected + 'buffer': { + 'fastSwitchEnabled': $scope.fastSwitchSelected + } } }); }; @@ -529,7 +542,9 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.toggleScheduleWhilePaused = function () { $scope.player.updateSettings({ 'streaming': { - 'scheduleWhilePaused': $scope.scheduleWhilePausedSelected + 'scheduling': { + 'scheduleWhilePaused': $scope.scheduleWhilePausedSelected + } } }); }; @@ -544,8 +559,10 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.toggleReuseExistingSourceBuffers = function () { $scope.player.updateSettings({ - 'streaming': { - 'reuseExistingSourceBuffers': $scope.reuseExistingSourceBuffersSelected + streaming: { + buffer: { + reuseExistingSourceBuffers: $scope.reuseExistingSourceBuffersSelected + } } }); }; @@ -566,7 +583,9 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.toggleJumpGaps = function () { $scope.player.updateSettings({ 'streaming': { - 'jumpGaps': $scope.jumpGapsSelected + 'gaps': { + 'jumpGaps': $scope.jumpGapsSelected + } } }); }; @@ -639,10 +658,14 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' var config = { 'streaming': { - 'liveDelay': $scope.defaultLiveDelay, - 'stableBufferTime': $scope.defaultStableBufferDelay, - 'bufferTimeAtTopQuality': $scope.defaultBufferTimeAtTopQuality, - 'bufferTimeAtTopQualityLongForm': $scope.defaultBufferTimeAtTopQualityLongForm, + 'buffer': { + 'stableBufferTime': $scope.defaultStableBufferDelay, + 'bufferTimeAtTopQuality': $scope.defaultBufferTimeAtTopQuality, + 'bufferTimeAtTopQualityLongForm': $scope.defaultBufferTimeAtTopQualityLongForm, + }, + 'delay': { + 'liveDelay': $scope.defaultLiveDelay + }, 'lowLatencyEnabled': $scope.lowLatencyModeSelected, abr: {}, cmcd: {} @@ -653,19 +676,19 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' var selectedConfig = $scope.selectedItem.bufferConfig; if (selectedConfig.liveDelay) { - config.streaming.liveDelay = selectedConfig.liveDelay; + config.streaming.delay.liveDelay = selectedConfig.liveDelay; } if (selectedConfig.stableBufferTime) { - config.streaming.stableBufferTime = selectedConfig.stableBufferTime; + config.streaming.buffer.stableBufferTime = selectedConfig.stableBufferTime; } if (selectedConfig.bufferTimeAtTopQuality) { - config.streaming.bufferTimeAtTopQuality = selectedConfig.bufferTimeAtTopQuality; + config.streaming.buffer.bufferTimeAtTopQuality = selectedConfig.bufferTimeAtTopQuality; } if (selectedConfig.bufferTimeAtTopQualityLongForm) { - config.streaming.bufferTimeAtTopQualityLongForm = selectedConfig.bufferTimeAtTopQualityLongForm; + config.streaming.buffer.bufferTimeAtTopQualityLongForm = selectedConfig.bufferTimeAtTopQualityLongForm; } if (selectedConfig.lowLatencyMode !== undefined) { @@ -675,7 +698,7 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' const initialLiveDelay = parseFloat($scope.initialLiveDelay); if (!isNaN(initialLiveDelay)) { - config.streaming.liveDelay = initialLiveDelay; + config.streaming.delay.liveDelay = initialLiveDelay; } const initBitrate = parseInt($scope.initialVideoBitrate); @@ -730,7 +753,7 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' }); } } - $scope.player.setTextDefaultEnabled($scope.initialSettings.textEnabled); + $scope.player.updateSettings({ streaming: { text: { defaultEnabled: $scope.initialSettings.textEnabled } } }); $scope.player.enableForcedTextStreaming($scope.initialSettings.forceTextStreaming); $scope.controlbar.enable(); }; @@ -749,7 +772,7 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' }; $scope.setLogLevel = function () { - var level = $("input[name='log-level']:checked").val(); + var level = $('input[name=\'log-level\']:checked').val(); switch (level) { case 'none': $scope.player.updateSettings({ 'debug': { 'logLevel': dashjs.Debug.LOG_LEVEL_NONE } }); @@ -777,18 +800,18 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' }; $scope.setCmcdMode = function () { - var mode = $("input[name='cmcd-mode']:checked").val(); + var mode = $('input[name=\'cmcd-mode\']:checked').val(); switch (mode) { case 'query': - $scope.player.updateSettings({ streaming: { cmcd: { mode: 'query' }}}); + $scope.player.updateSettings({ streaming: { cmcd: { mode: 'query' } } }); break; case 'header': - $scope.player.updateSettings({ streaming: { cmcd: { mode: 'header' }}}); + $scope.player.updateSettings({ streaming: { cmcd: { mode: 'header' } } }); break; default: - $scope.player.updateSettings({ streaming: { cmcd: { mode: 'query' }}}); + $scope.player.updateSettings({ streaming: { cmcd: { mode: 'query' } } }); } }; @@ -965,9 +988,9 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' var dashMetrics = $scope.player.getDashMetrics(); var dashAdapter = $scope.player.getDashAdapter(); - if (dashMetrics && $scope.streamInfo) { - var period = dashAdapter.getPeriodById($scope.streamInfo.id); - var periodIdx = period ? period.index : $scope.streamInfo.index; + if (dashMetrics && $scope.currentStreamInfo) { + var period = dashAdapter.getPeriodById($scope.currentStreamInfo.id); + var periodIdx = period ? period.index : $scope.currentStreamInfo.index; var maxIndex = dashAdapter.getMaxIndexForBufferType(type, periodIdx); var repSwitch = dashMetrics.getCurrentRepresentationSwitch(type, true); @@ -984,7 +1007,6 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope[type + 'BufferLength'] = bufferLevel; $scope[type + 'MaxIndex'] = maxIndex; - $scope[type + 'Bitrate'] = bitrate; $scope[type + 'DroppedFrames'] = droppedFPS; $scope[type + 'LiveLatency'] = liveLatency; @@ -1037,30 +1059,13 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' // //////////////////////////////////////// - function doesTimeMarchesOn() { - var version; - var REQUIRED_VERSION = 49.0; - - if (typeof navigator !== 'undefined') { - if (!navigator.userAgent.match(/Firefox/)) { - return true; - } - - version = parseFloat(navigator.userAgent.match(/rv:([0-9.]+)/)[1]); - - if (!isNaN(version) && version >= REQUIRED_VERSION) { - return true; - } - } - } - function setLatencyAttributes() { // get buffer default value var currentConfig = $scope.player.getSettings(); - $scope.defaultLiveDelay = currentConfig.streaming.liveDelay; - $scope.defaultStableBufferDelay = currentConfig.streaming.stableBufferTime; - $scope.defaultBufferTimeAtTopQuality = currentConfig.streaming.bufferTimeAtTopQuality; - $scope.defaultBufferTimeAtTopQualityLongForm = currentConfig.streaming.bufferTimeAtTopQualityLongForm; + $scope.defaultLiveDelay = currentConfig.streaming.delay.liveDelay; + $scope.defaultStableBufferDelay = currentConfig.streaming.buffer.stableBufferTime; + $scope.defaultBufferTimeAtTopQuality = currentConfig.streaming.buffer.bufferTimeAtTopQuality; + $scope.defaultBufferTimeAtTopQualityLongForm = currentConfig.streaming.buffer.bufferTimeAtTopQualityLongForm; $scope.lowLatencyModeSelected = currentConfig.streaming.lowLatencyEnabled; $scope.liveCatchupEnabled = currentConfig.streaming.liveCatchup.enabled; } @@ -1142,18 +1147,18 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' let castPlayer; - $window['__onGCastApiAvailable'] = function(isAvailable) { + $window['__onGCastApiAvailable'] = function (isAvailable) { if (isAvailable) { castContext = cast.framework.CastContext.getInstance(); castContext.setOptions({ - receiverApplicationId: CAST_APP_ID, - autoJoinPolicy: chrome.cast.AutoJoinPolicy.ORIGIN_SCOPED + receiverApplicationId: CAST_APP_ID, + autoJoinPolicy: chrome.cast.AutoJoinPolicy.ORIGIN_SCOPED }); castContext.addEventListener(cast.framework.CastContextEventType.CAST_STATE_CHANGED, function (e) { console.log('[Cast]', e); if (e.castState === cast.framework.CastState.CONNECTED) { onCastReady(); - } else if (e.castState === cast.framework.CastState.NOT_CONNECTED) { + } else if (e.castState === cast.framework.CastState.NOT_CONNECTED) { onCastEnd(); } }); @@ -1195,13 +1200,15 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' if (castSession) { castPlayer.reset(); castSession.loadMedia(request).then( - function() { + function () { let media = castSession.getMediaSession(); if (media) { console.info('cast media: ', media); } }, - function(errorCode) { console.log('Error code: ' + errorCode); } + function (errorCode) { + console.log('Error code: ' + errorCode); + } ); } } diff --git a/samples/dash-if-reference-player/app/sources.json b/samples/dash-if-reference-player/app/sources.json index 5ebc242c3a..a80a03b74a 100644 --- a/samples/dash-if-reference-player/app/sources.json +++ b/samples/dash-if-reference-player/app/sources.json @@ -58,6 +58,26 @@ "acronym": "AWS", "name": "AWS", "url": "https://www.elemental.com/" + }, + "dvb": { + "acronym": "DVB", + "name": "DVB", + "url": "https://dvb-2017-dm.s3.eu-central-1.amazonaws.com/overview.html" + }, + "hbbtv": { + "acronym": "HbbTV", + "name": "HbbTV", + "url": "https://www.hbbtv.org/" + }, + "google": { + "acronym": "Google", + "name": "Google", + "url": "https://www.google.com" + }, + "vdms": { + "acronym": "VDMS", + "name": "Verizon Media", + "url": "https://www.verizonmedia.com/" } }, "items": [ @@ -149,6 +169,36 @@ "name": "Clear Static SegmentList", "url": "//wowzaec2demo.streamlock.net/vod/_definst_/ElephantsDream/smil:ElephantsDream.smil/manifest_mvlist.mpd", "provider": "wowza" + }, + { + "name": "Caminandes 01, Llama Drama (25fps, 75gop, 1080p) ", + "url": "http://refapp.hbbtv.org/videos/01_llama_drama_1080p_25f75g6sv3/manifest.mpd", + "provider": "hbbtv" + }, + { + "name": "Caminandes 02, Gran Dillama (25fps, 75gop, 1080p, KID=1236, subob,evtib) v5 ", + "url": "http://refapp.hbbtv.org/videos/02_gran_dillama_1080p_25f75g6sv5/manifest_subob_evtib.mpd", + "provider": "hbbtv" + }, + { + "name": "Tears of Steel (25fps, 75gop, 1080p, KID=1237) v3", + "url": "http://refapp.hbbtv.org/videos/tears_of_steel_1080p_25f75g6sv3/manifest.mpd", + "provider": "hbbtv" + }, + { + "name": "Caminandes 02, Gran Dillama (25fps, 75gop, 1080p, KID=1236), multiaudio v4", + "url": "http://refapp.hbbtv.org/videos/02_gran_dillama_1080p_ma_25f75g6sv4/manifest_subob_evtib.mpd", + "provider": "hbbtv" + }, + { + "name": "Caminandes 02, Gran Dillama (25fps, 75gop, 1080p, KID=1236), multiaudio v5", + "url": "http://refapp.hbbtv.org/videos/02_gran_dillama_1080p_ma_25f75g6sv5/manifest.mpd", + "provider": "hbbtv" + }, + { + "name": "Spring (25fps, 75gop, 1920x804(2.40) h264, KID=148D) v1", + "url": "http://refapp.hbbtv.org/videos/spring_804p_v1/manifest.mpd", + "provider": "hbbtv" } ] }, @@ -205,6 +255,41 @@ "name": "Clear Dynamic SegmentList", "url": "//wowzaec2demo.streamlock.net/live/bigbuckbunny/manifest_mvlist.mpd", "provider": "wowza" + }, + { + "name": "Multiperiod - Number + Timeline - Compact manifest - Thumbnails (1 track) - In-the-clear", + "url": "https://d24rwxnt7vw9qb.cloudfront.net/v1/dash/e6d234965645b411ad572802b6c9d5a10799c9c1/All_Reference_Streams/4577dca5f8a44756875ab5cc913cd1f1/index.mpd", + "provider": "aws" + }, + { + "name": "Multiperiod - Number + Timeline - Full manifest - Thumbnails (1 track) - In-the-clear", + "url": "https://d24rwxnt7vw9qb.cloudfront.net/v1/dash/e6d234965645b411ad572802b6c9d5a10799c9c1/All_Reference_Streams/ee565ea510cb4b4d8df5f48918c3d6dc/index.mpd", + "provider": "aws" + }, + { + "name": "Multiperiod - Time + Timeline - Compact manifest - Thumbnails (1 track) - In-the-clear", + "url": "https://d24rwxnt7vw9qb.cloudfront.net/v1/dash/e6d234965645b411ad572802b6c9d5a10799c9c1/All_Reference_Streams/91d37b0389de47e0b5266736d3633077/index.mpd", + "provider": "aws" + }, + { + "name": "Multiperiod - Time + Timeline - Full manifest - Thumbnails (1 track) - In-the-clear", + "url": "https://d24rwxnt7vw9qb.cloudfront.net/v1/dash/e6d234965645b411ad572802b6c9d5a10799c9c1/All_Reference_Streams/6ba06d17f65b4e1cbd1238eaa05c02c1/index.mpd", + "provider": "aws" + }, + { + "name": "Single period - Number + Duration - Full manifest - Thumbnails (2 tracks: 174p/1080p) - In-the-clear", + "url": "https://d10gktn8v7end7.cloudfront.net/out/v1/6ee19df3afa24fe190a8ae16c2c88560/index.mpd", + "provider": "aws" + }, + { + "name": " Multiperiod DVB 2017 0.mpd Live Clear AVC + AAC", + "url": "https://pl8q5ug7b6.execute-api.eu-central-1.amazonaws.com/0.mpd", + "provider": "dvb" + }, + { + "name": "LiveSIM Caminandes 02, Gran Dillama (25fps, 25gop, 2sec, multi MOOF/MDAT, 1080p, KID=1236) v2", + "url": "http://refapp.hbbtv.org/livesim/02_llamav2/manifest.mpd", + "provider": "hbbtv" } ] }, @@ -320,12 +405,66 @@ "url": "https://dash.akamaized.net/dash264/CTA/imsc1/IT1-20171027_dash.mpd", "name": "IMSC1 Text Subtitles via sidecar file", "provider": "cta" + }, + { + "url": "https://pl8q5ug7b6.execute-api.eu-central-1.amazonaws.com/2.mpd", + "name": "Subtitles in multi period live", + "provider": "unified" } ] }, { "name": "DRM (modern)", "submenu": [ + { + "name": "Multiperiod - Number + Timeline - Compact manifest - Thumbnails (1 track) - Encryption (1 key) PlayReady/Widevine (DRMtoday) - Key rotation (60s)", + "url": "https://d24rwxnt7vw9qb.cloudfront.net/v1/dash/e6d234965645b411ad572802b6c9d5a10799c9c1/All_Reference_Streams/2fc23947945841b9b1be9768f9c13e75/index.mpd", + "protData": { + "com.widevine.alpha": { + "serverURL": "https://lic.staging.drmtoday.com/license-proxy-widevine/cenc/?specConform=true", + "httpRequestHeaders": { + "x-dt-custom-data": "ewogICAgInVzZXJJZCI6ICJhd3MtZWxlbWVudGFsOjpzcGVrZS10ZXN0aW5nIiwKICAgICJzZXNzaW9uSWQiOiAiZWxlbWVudGFsLXJlZnN0cmVhbSIsCiAgICAibWVyY2hhbnQiOiAiYXdzLWVsZW1lbnRhbCIKfQo=" + }, + "com.microsoft.playready": { + "serverURL": "https://lic.staging.drmtoday.com/license-proxy-headerauth/drmtoday/RightsManager.asmx", + "httpRequestHeaders": { + "x-dt-custom-data": "ewogICAgInVzZXJJZCI6ICJhd3MtZWxlbWVudGFsOjpzcGVrZS10ZXN0aW5nIiwKICAgICJzZXNzaW9uSWQiOiAiZWxlbWVudGFsLXJlZnN0cmVhbSIsCiAgICAibWVyY2hhbnQiOiAiYXdzLWVsZW1lbnRhbCIKfQo=" + } + } + } + }, + "provider": "aws" + }, + { + "name": "Multiperiod - Number + Timeline - Compact manifest - Thumbnails (1 track) - Encryption (2 keys : audio + video) - No key rotation", + "url": "https://d24rwxnt7vw9qb.cloudfront.net/v1/dash/e6d234965645b411ad572802b6c9d5a10799c9c1/All_Reference_Streams//6e16c26536564c2f9dbc5f725a820cff/index.mpd", + "protData": { + "com.widevine.alpha": { + "serverURL": "https://lic.staging.drmtoday.com/license-proxy-widevine/cenc/?specConform=true", + "httpRequestHeaders": { + "x-dt-custom-data": "ewogICAgInVzZXJJZCI6ICJhd3MtZWxlbWVudGFsOjpzcGVrZS10ZXN0aW5nIiwKICAgICJzZXNzaW9uSWQiOiAiZWxlbWVudGFsLXJlZnN0cmVhbSIsCiAgICAibWVyY2hhbnQiOiAiYXdzLWVsZW1lbnRhbCIKfQo=" + }, + "com.microsoft.playready": { + "serverURL": "https://lic.staging.drmtoday.com/license-proxy-headerauth/drmtoday/RightsManager.asmx", + "httpRequestHeaders": { + "x-dt-custom-data": "ewogICAgInVzZXJJZCI6ICJhd3MtZWxlbWVudGFsOjpzcGVrZS10ZXN0aW5nIiwKICAgICJzZXNzaW9uSWQiOiAiZWxlbWVudGFsLXJlZnN0cmVhbSIsCiAgICAibWVyY2hhbnQiOiAiYXdzLWVsZW1lbnRhbCIKfQo=" + } + } + } + }, + "provider": "aws" + }, + { + "name": "Shaka Demo Assets: Angel-One Widevine", + "url": "https://storage.googleapis.com/shaka-demo-assets/angel-one-widevine/dash.mpd", + "protData": { + "com.widevine.alpha": { + "serverURL": "https://widevine-proxy.appspot.com/proxy?video_id=&provider=" + } + }, + "moreInfo": "https://github.com/Axinom/dash-test-vectors", + "provider": "google" + }, { "name": "1080p with Widevine DRM, license expired after 60s", "url": "https://media.axprod.net/TestVectors/v7-MultiDRM-SingleKey/Manifest_1080p.mpd", @@ -819,6 +958,36 @@ } }, "provider": "wowza" + }, + { + "name": "Live Dash WV and PR with unencrypted ad breaks -- Always starts in encrypted content - Keys never change", + "url": "https://content.uplynk.com/playlist/6c526d97954b41deb90fe64328647a71.mpd?ad=bbbads&delay=25", + "protData": { + "com.widevine.alpha": { + "serverURL": "https://content.uplynk.com/wv", + "httpTimeout": 5000 + }, + "com.microsoft.playready": { + "serverURL": "https://content.uplynk.com/pr", + "httpTimeout": 5000 + } + }, + "provider": "vdms" + }, + { + "name": "Live Dash WV and PR - Starting in unencrypted ad (preroll) - Moving into encrypted content - Keys never change", + "url": "https://content.uplynk.com/playlist/4f1a9815a1af43d5ba64465d85bf11cf.mpd?ad=sintelads", + "protData": { + "com.widevine.alpha": { + "serverURL": "https://content.uplynk.com/wv", + "httpTimeout": 5000 + }, + "com.microsoft.playready": { + "serverURL": "https://content.uplynk.com/pr", + "httpTimeout": 5000 + } + }, + "provider": "vdms" } ] }, diff --git a/samples/dash-if-reference-player/dashjs_config.json b/samples/dash-if-reference-player/dashjs_config.json index d4355a3c85..f1b3e72d65 100644 --- a/samples/dash-if-reference-player/dashjs_config.json +++ b/samples/dash-if-reference-player/dashjs_config.json @@ -1,72 +1,5 @@ { "debug": { "logLevel": 5 - }, - "streaming": { - "metricsMaxListDepth": 50, - "abandonLoadTimeout": 10000, - "liveDelayFragmentCount": null, - "liveDelay": null, - "scheduleWhilePaused": true, - "fastSwitchEnabled": true, - "flushBufferAtTrackSwitch": false, - "bufferPruningInterval": 10, - "bufferToKeep": 20, - "jumpGaps": true, - "smallGapLimit": 1.5, - "stableBufferTime": -1, - "bufferTimeAtTopQuality": 30, - "bufferTimeAtTopQualityLongForm": 60, - "longFormContentDurationThreshold": 600, - "wallclockTimeUpdateInterval": 50, - "lowLatencyEnabled": false, - "keepProtectionMediaKeys": false, - "useManifestDateHeaderTimeSource": true, - "useSuggestedPresentationDelay": true, - "manifestUpdateRetryInterval": 100, - "liveCatchup": { - "minDrift": 0.02, - "maxDrift": 0, - "playbackRate": 0.5, - "latencyThreshold": null, - "enabled": false - }, - "lastBitrateCachingInfo": { "enabled": true, "ttl": 360000}, - "lastMediaSettingsCachingInfo": { "enabled": true, "ttl": 360000}, - "cacheLoadThresholds": {"video": 50, "audio": 5}, - "retryIntervals": { - "MPD": 500, - "XLinkExpansion": 500, - "MediaSegment": 1000, - "InitializationSegment": 1000, - "BitstreamSwitchingSegment": 1000, - "IndexSegment": 1000, - "other": 1000 - }, - "retryAttempts": { - "MPD": 3, - "XLinkExpansion": 1, - "MediaSegment": 3, - "InitializationSegment": 3, - "BitstreamSwitchingSegment": 3, - "IndexSegment": 3, - "other": 3 - }, - "abr": { - "movingAverageMethod": "slidingWindow", - "ABRStrategy": "abrDynamic", - "bandwidthSafetyFactor": 0.9, - "useDefaultABRRules": true, - "useBufferOccupancyABR": false, - "useDeadTimeLatency": true, - "limitBitrateByPortal": false, - "usePixelRatioInLimitBitrateByPortal": false, - "maxBitrate": { "audio": -1, "video": -1 }, - "minBitrate": { "audio": -1, "video": -1 }, - "maxRepresentationRatio": { "audio": 1, "video": 1 }, - "initialBitrate": { "audio": -1, "video": -1 }, - "initialRepresentationRatio": { "audio": -1, "video": -1 }, - "autoSwitchBitrate": { "audio": true, "video": true } - } } } \ No newline at end of file diff --git a/samples/dash-if-reference-player/index.html b/samples/dash-if-reference-player/index.html index c623e409df..aba93a1942 100644 --- a/samples/dash-if-reference-player/index.html +++ b/samples/dash-if-reference-player/index.html @@ -580,14 +580,14 @@ {{videoPendingIndex}} + Downloading : {{videoPendingIndex}} / {{videoPendingMaxIndex}}
{{videoIndex}} / {{videoMaxIndex}}
diff --git a/samples/drm/license-wrapping.html b/samples/drm/license-wrapping.html index 5093278ad4..3909178df9 100644 --- a/samples/drm/license-wrapping.html +++ b/samples/drm/license-wrapping.html @@ -21,7 +21,7 @@ player, url = "https://media.axprod.net/TestVectors/v7-MultiDRM-SingleKey/Manifest_1080p.mpd"; - var filterLicensRequest = function (request) { + var filterLicenseRequest = function (request) { console.log('LICENSE REQUEST', request); /* Here you can modify/overwrite the licens request (url, headers, data...) */ request.headers = { @@ -39,7 +39,7 @@ video = document.querySelector("video"); player = dashjs.MediaPlayer().create(); - player.registerLicenseRequestFilter(filterLicensRequest); + player.registerLicenseRequestFilter(filterLicenseRequest); player.registerLicenseResponseFilter(filterLicenseResponse); player.initialize(video, url, true); diff --git a/samples/getting-started/manual-load-with-custom-settings.html b/samples/getting-started/manual-load-with-custom-settings.html index 89a37a8da7..a3ef047893 100644 --- a/samples/getting-started/manual-load-with-custom-settings.html +++ b/samples/getting-started/manual-load-with-custom-settings.html @@ -25,7 +25,9 @@ }, 'streaming': { 'scheduleWhilePaused': false, /* stops the player from loading segments while paused */ - 'fastSwitchEnabled': true /* enables buffer replacement when switching bitrates for faster switching */ + 'buffer': { + 'fastSwitchEnabled': true /* enables buffer replacement when switching bitrates for faster switching */ + } } }); player.setAutoPlay(false); /* remove this line if you want the player to start automatically on load */ diff --git a/samples/live-streaming/live-delay-comparison-custom-manifest.html b/samples/live-streaming/live-delay-comparison-custom-manifest.html index eb12da5f3c..16fd12adc1 100644 --- a/samples/live-streaming/live-delay-comparison-custom-manifest.html +++ b/samples/live-streaming/live-delay-comparison-custom-manifest.html @@ -10,68 +10,71 @@ diff --git a/samples/live-streaming/live-delay-comparison-using-fragmentCount.html b/samples/live-streaming/live-delay-comparison-using-fragmentCount.html index 2867dbe51c..662e1c2bb4 100644 --- a/samples/live-streaming/live-delay-comparison-using-fragmentCount.html +++ b/samples/live-streaming/live-delay-comparison-using-fragmentCount.html @@ -20,32 +20,32 @@ video = document.querySelector("#video1"); player1 = dashjs.MediaPlayer().create(); player1.initialize(video,MPD_2S_SEGMENTS ,true); - player1.updateSettings({'streaming': { 'liveDelayFragmentCount': 0 }}); + player1.updateSettings({'streaming': { 'delay': {'liveDelayFragmentCount': 0 }}}); video = document.querySelector("#video2"); player2 = dashjs.MediaPlayer().create(); player2.initialize(video,MPD_2S_SEGMENTS ,true); - player2.updateSettings({'streaming': { 'liveDelayFragmentCount': 2 }}); + player2.updateSettings({'streaming': { 'delay': {'liveDelayFragmentCount': 2 }}}); video = document.querySelector("#video3"); player3 = dashjs.MediaPlayer().create(); player3.initialize(video,MPD_2S_SEGMENTS ,true); - player3.updateSettings({'streaming': { 'liveDelayFragmentCount': 4 }}); + player3.updateSettings({'streaming': { 'delay': {'liveDelayFragmentCount': 4 }}}); video = document.querySelector("#video4"); player4 = dashjs.MediaPlayer().create(); player4.initialize(video,MPD_6S_SEGMENTS ,true); - player4.updateSettings({'streaming': { 'liveDelayFragmentCount': 0 }}); + player4.updateSettings({'streaming': { 'delay': {'liveDelayFragmentCount': 0 }}}); video = document.querySelector("#video5"); player5 = dashjs.MediaPlayer().create(); player5.initialize(video,MPD_6S_SEGMENTS ,true); - player5.updateSettings({'streaming': { 'liveDelayFragmentCount': 2 }}); + player5.updateSettings({'streaming': { 'delay': {'liveDelayFragmentCount': 2 }}}); video = document.querySelector("#video6"); player6 = dashjs.MediaPlayer().create(); player6.initialize(video,MPD_6S_SEGMENTS ,true); - player6.updateSettings({'streaming': { 'liveDelayFragmentCount': 4 }}); + player6.updateSettings({'streaming': { 'delay': {'liveDelayFragmentCount': 4 }}}); setInterval( function() { diff --git a/samples/live-streaming/live-delay-comparison-using-setLiveDelay.html b/samples/live-streaming/live-delay-comparison-using-setLiveDelay.html index a35efe6839..e337a2f8de 100644 --- a/samples/live-streaming/live-delay-comparison-using-setLiveDelay.html +++ b/samples/live-streaming/live-delay-comparison-using-setLiveDelay.html @@ -1,138 +1,159 @@ - - - Live delay comparison using setLiveDelay + + + Live delay comparison using setLiveDelay - - - - - - - - - - This sample illustrates the combined effects of segment duration and the "setLiveDelay" MediaPlayer method on the latency of live stream playback. - The upper layer of videos are all playing a live stream with 2s segment duration, with setLiveDelay values of 2s, 4s, and 8s. The lower layer use 6s segment duration, - with setLiveDelay values of 6s, 12s, and 24s. Lowest latency is achieved with shorter segments and with a lower live delay value. Higher stability/robustness is achieved with a higher live delay which allows a larger forward buffer. - - - - -
- 2s segment, 2s target latency
-
- Seconds behind live:
- Buffer length: -
- 2s segment, 4s target latency
-
- Seconds behind live:
- Buffer length: -
- 2s segment, 8s target latency
-
- Seconds behind live:
- Buffer length: -
Wall clock time -
- : -
-
- 6s segment, 6s target latency
-
- Seconds behind live:
- Buffer length: -
- 6s segment, 12s target latency
-
- Seconds behind live:
- Buffer length: -
- 6s segment, 24s target latency
-
- Seconds behind live:
- Buffer length: -
- - + + + + + + + +This sample illustrates the combined effects of segment duration and the "setLiveDelay" MediaPlayer +method on the latency of live stream playback. +The upper layer of videos are all playing a live stream with 2s segment duration, with setLiveDelay values of 2s, 4s, +and 8s. The lower layer use 6s segment duration, +with setLiveDelay values of 6s, 12s, and 24s. Lowest latency is achieved with shorter segments and with a lower live +delay value. Higher stability/robustness is achieved with a higher live delay which allows a larger forward buffer. + + + + + + + + + + + + +
+ 2s segment, 2s target latency
+ +
+ Seconds behind live:
+ Buffer length: +
+ 2s segment, 4s target latency
+ +
+ Seconds behind live:
+ Buffer length: +
+ 2s segment, 8s target latency
+ +
+ Seconds behind live:
+ Buffer length: +
Wall clock time +
+ : +
+
+ 6s segment, 6s target latency
+ +
+ Seconds behind live:
+ Buffer length: +
+ 6s segment, 12s target latency
+ +
+ Seconds behind live:
+ Buffer length: +
+ 6s segment, 24s target latency
+ +
+ Seconds behind live:
+ Buffer length: +
+ + diff --git a/samples/live-streaming/synchronized-live-playback.html b/samples/live-streaming/synchronized-live-playback.html index 0c3bc52195..da3f19d6f1 100644 --- a/samples/live-streaming/synchronized-live-playback.html +++ b/samples/live-streaming/synchronized-live-playback.html @@ -15,7 +15,9 @@ var MPD_2S_SEGMENTS = "https://livesim.dashif.org/livesim/testpic_2s/Manifest.mpd"; var settings = { streaming: { - liveDelay: 10, + delay: { + liveDelay: 10 + }, liveCatchup: { enabled: true } diff --git a/samples/low-latency/index.html b/samples/low-latency/index.html index 5563429ac4..f3cf5f2f2e 100644 --- a/samples/low-latency/index.html +++ b/samples/low-latency/index.html @@ -34,7 +34,9 @@ player.updateSettings({ streaming: { - liveDelay: targetLatency, + delay: { + liveDelay: targetLatency + }, liveCatchup: { minDrift: minDrift, playbackRate: catchupPlaybackRate, diff --git a/samples/low-latency/l2all_index.html b/samples/low-latency/l2all_index.html index 27657f9b04..b397468d37 100644 --- a/samples/low-latency/l2all_index.html +++ b/samples/low-latency/l2all_index.html @@ -42,7 +42,9 @@ player.updateSettings({ streaming: { - liveDelay: targetLatency, + delay: { + liveDelay: targetLatency + }, liveCatchup: { minDrift: minDrift, playbackRate: catchupPlaybackRate, diff --git a/samples/low-latency/lolp_index.html b/samples/low-latency/lolp_index.html index bb8c4acf41..471f7a4a79 100644 --- a/samples/low-latency/lolp_index.html +++ b/samples/low-latency/lolp_index.html @@ -22,7 +22,9 @@ logLevel: dashjs.Debug.LOG_LEVEL_WARNING }, streaming: { - stallThreshold: 0.05, + buffer: { + stallThreshold: 0.05, + }, lowLatencyEnabled: true, abr: { useDefaultABRRules: true, @@ -46,7 +48,9 @@ player.updateSettings({ streaming: { - liveDelay: targetLatency, + delay: { + liveDelay: targetLatency + }, liveCatchup: { minDrift: minDrift, playbackRate: catchupPlaybackRate diff --git a/samples/offline/app/main.js b/samples/offline/app/main.js index fca8dbae79..c0958f4f62 100644 --- a/samples/offline/app/main.js +++ b/samples/offline/app/main.js @@ -273,8 +273,12 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri 'logLevel': dashjs.Debug.LOG_LEVEL_INFO }, 'streaming': { - 'fastSwitchEnabled': $scope.fastSwitchSelected, - 'jumpGaps': true, + 'buffer': { + 'fastSwitchEnabled': $scope.fastSwitchSelected + }, + 'gaps:': { + 'jumpGaps': true, + }, 'abr': { 'autoSwitchBitrate': { 'video': $scope.videoAutoSwitchSelected @@ -349,14 +353,14 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri // get buffer default value var currentConfig = $scope.player.getSettings(); - $scope.defaultLiveDelay = currentConfig.streaming.liveDelay; - $scope.defaultStableBufferDelay = currentConfig.streaming.stableBufferTime; - $scope.defaultBufferTimeAtTopQuality = currentConfig.streaming.bufferTimeAtTopQuality; - $scope.defaultBufferTimeAtTopQualityLongForm = currentConfig.streaming.bufferTimeAtTopQualityLongForm; + $scope.defaultLiveDelay = currentConfig.streaming.delay.liveDelay; + $scope.defaultStableBufferDelay = currentConfig.streaming.buffer.stableBufferTime; + $scope.defaultBufferTimeAtTopQuality = currentConfig.streaming.buffer.bufferTimeAtTopQuality; + $scope.defaultBufferTimeAtTopQualityLongForm = currentConfig.streaming.buffer.bufferTimeAtTopQualityLongForm; $scope.lowLatencyModeSelected = currentConfig.streaming.lowLatencyEnabled; - var initVideoTrackSwitchMode = $scope.player.getTrackSwitchModeFor('video'); - var initAudioTrackSwitchMode = $scope.player.getTrackSwitchModeFor('audio'); + var initVideoTrackSwitchMode = currentConfig.streaming.trackSwitchMode.video; + var initAudioTrackSwitchMode = currentConfig.streaming.trackSwitchMode.audio; //get default track switch mode if (initVideoTrackSwitchMode === 'alwaysReplace') { @@ -496,7 +500,9 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri $scope.toggleFastSwitch = function () { $scope.player.updateSettings({ 'streaming': { - 'fastSwitchEnabled': $scope.fastSwitchSelected + 'buffer': { + 'fastSwitchEnabled': $scope.fastSwitchSelected + }, } }); }; @@ -516,7 +522,9 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri $scope.toggleScheduleWhilePaused = function () { $scope.player.updateSettings({ 'streaming': { - 'scheduleWhilePaused': $scope.scheduleWhilePausedSelected + 'scheduling': { + 'scheduleWhilePaused': $scope.scheduleWhilePausedSelected + } } }); }; @@ -537,7 +545,9 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri $scope.toggleJumpGaps = function () { $scope.player.updateSettings({ 'streaming': { - 'jumpGaps': $scope.jumpGapsSelected + 'gaps': { + 'jumpGaps': $scope.jumpGapsSelected + } } }); }; @@ -589,12 +599,16 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri } var config = { - 'streaming': { - 'liveDelay': $scope.defaultLiveDelay, - 'stableBufferTime': $scope.defaultStableBufferDelay, - 'bufferTimeAtTopQuality': $scope.defaultBufferTimeAtTopQuality, - 'bufferTimeAtTopQualityLongForm': $scope.defaultBufferTimeAtTopQualityLongForm, - 'lowLatencyEnabled': $scope.lowLatencyModeSelected + streaming: { + delay: { + liveDelay: $scope.defaultLiveDelay + }, + buffer: { + stableBufferTime: $scope.defaultStableBufferDelay, + bufferTimeAtTopQuality: $scope.defaultBufferTimeAtTopQuality, + bufferTimeAtTopQualityLongForm: $scope.defaultBufferTimeAtTopQualityLongForm, + }, + lowLatencyEnabled: $scope.lowLatencyModeSelected } }; @@ -602,19 +616,19 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri var selectedConfig = $scope.selectedItem.bufferConfig; if (selectedConfig.liveDelay) { - config.streaming.liveDelay = selectedConfig.liveDelay; + config.streaming.delay.liveDelay = selectedConfig.liveDelay; } if (selectedConfig.stableBufferTime) { - config.streaming.stableBufferTime = selectedConfig.stableBufferTime; + config.streaming.buffer.stableBufferTime = selectedConfig.stableBufferTime; } if (selectedConfig.bufferTimeAtTopQuality) { - config.streaming.bufferTimeAtTopQuality = selectedConfig.bufferTimeAtTopQuality; + config.streaming.buffer.bufferTimeAtTopQuality = selectedConfig.bufferTimeAtTopQuality; } if (selectedConfig.bufferTimeAtTopQualityLongForm) { - config.streaming.bufferTimeAtTopQualityLongForm = selectedConfig.bufferTimeAtTopQualityLongForm; + config.streaming.buffer.bufferTimeAtTopQualityLongForm = selectedConfig.bufferTimeAtTopQualityLongForm; } if (selectedConfig.lowLatencyMode !== undefined) { @@ -990,7 +1004,8 @@ app.controller('DashController', function ($scope, $timeout, $q, sources, contri if (vars && vars.hasOwnProperty('stream')) { try { item = JSON.parse(atob(vars.stream)); - } catch (e) { } + } catch (e) { + } } diff --git a/src/core/Settings.js b/src/core/Settings.js index cc83ae1e86..81a7c8d5f1 100644 --- a/src/core/Settings.js +++ b/src/core/Settings.js @@ -83,6 +83,7 @@ import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; * stallThreshold: 0.5, * filterUnsupportedEssentialProperties: true, * eventControllerRefreshDelay: 100, + * cacheInitSegments: true, * utcSynchronization: { * backgroundAttempts: 2, * timeBetweenSyncAttempts: 30, @@ -310,7 +311,7 @@ import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; * Known issues: * 1. In IE11 with auto switching off, if a user switches to a quality they can not download in time the fragment may be appended in the same range as the playhead or even in the past, in IE11 it may cause a stutter or stall in playback. * @property {boolean} [flushBufferAtTrackSwitch=false] - * When enabled, after a track switch and in case buffer is being replaced (see MediaPlayer.setTrackSwitchModeFor(Constants.TRACK_SWITCH_MODE_ALWAYS_REPLACE)), the video element is flushed (seek at current playback time) once a segment of the new track is appended in buffer in order to force video decoder to play new track. + * When enabled, after a track switch and in case buffer is being replaced, the video element is flushed (seek at current playback time) once a segment of the new track is appended in buffer in order to force video decoder to play new track. * * This can be required on some devices like GoogleCast devices to make track switching functional. * @@ -368,6 +369,8 @@ import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; * Stall threshold used in BufferController.js to determine whether a track should still be changed and which buffer range to prune. * @property {boolean} [filterUnsupportedEssentialProperties=true] * Enable to filter all the AdaptationSets and Representations which contain an unsupported \ element. + * @property {boolean} [cacheInitSegments=true] + * Enables the caching of init segments to avoid requesting the init segments before each representation switch. * @property {number} [eventControllerRefreshDelay=100] * Defines the delay in milliseconds between two consecutive checks for events to be fired. * @property {module:Settings~UtcSynchronizationSettings} utcSynchronization Settings related to UTC clock synchronization @@ -615,34 +618,43 @@ function Settings() { dispatchEvent: false }, streaming: { - metricsMaxListDepth: 1000, + metricsMaxListDepth: 500, abandonLoadTimeout: 10000, - liveDelayFragmentCount: NaN, - liveDelay: null, - scheduleWhilePaused: true, - fastSwitchEnabled: false, - flushBufferAtTrackSwitch: false, calcSegmentAvailabilityRangeFromTimeline: false, - reuseExistingSourceBuffers: true, - bufferPruningInterval: 10, - bufferToKeep: 20, - jumpGaps: true, - jumpLargeGaps: true, - smallGapLimit: 1.5, - stableBufferTime: 12, - bufferTimeAtTopQuality: 30, - bufferTimeAtTopQualityLongForm: 60, - longFormContentDurationThreshold: 600, - wallclockTimeUpdateInterval: 50, + wallclockTimeUpdateInterval: 100, lowLatencyEnabled: false, - keepProtectionMediaKeys: false, useManifestDateHeaderTimeSource: true, - useSuggestedPresentationDelay: true, - useAppendWindow: true, manifestUpdateRetryInterval: 100, - stallThreshold: 0.5, filterUnsupportedEssentialProperties: true, + cacheInitSegments: true, eventControllerRefreshDelay: 100, + delay: { + liveDelayFragmentCount: NaN, + liveDelay: NaN, + useSuggestedPresentationDelay: true, + }, + protection: { + keepProtectionMediaKeys: false + }, + buffer: { + fastSwitchEnabled: true, + flushBufferAtTrackSwitch: false, + reuseExistingSourceBuffers: true, + bufferPruningInterval: 10, + bufferToKeep: 20, + bufferTimeAtTopQuality: 30, + bufferTimeAtTopQualityLongForm: 60, + initialBufferLevel: NaN, + stableBufferTime: 12, + longFormContentDurationThreshold: 600, + stallThreshold: 0.5, + useAppendWindow: true + }, + gaps: { + jumpGaps: true, + jumpLargeGaps: true, + smallGapLimit: 1.5, + }, utcSynchronization: { backgroundAttempts: 2, timeBetweenSyncAttempts: 30, @@ -656,6 +668,14 @@ function Settings() { value: 'http://time.akamai.com/?iso&ms' } }, + scheduling: { + defaultTimeout: 300, + lowLatencyTimeout: 100, + scheduleWhilePaused: true + }, + text: { + defaultEnabled: true + }, liveCatchup: { minDrift: 0.02, maxDrift: 0, diff --git a/src/core/events/CoreEvents.js b/src/core/events/CoreEvents.js index 2579f06f80..f5d1ac80ac 100644 --- a/src/core/events/CoreEvents.js +++ b/src/core/events/CoreEvents.js @@ -43,18 +43,17 @@ class CoreEvents extends EventsBase { this.ATTEMPT_BACKGROUND_SYNC = 'attemptBackgroundSync'; this.BUFFERING_COMPLETED = 'bufferingCompleted'; this.BUFFER_CLEARED = 'bufferCleared'; - this.BUFFER_LEVEL_UPDATED = 'bufferLevelUpdated'; - this.BYTES_APPENDED = 'bytesAppended'; + this.BYTES_APPENDED_IN_SINK = 'bytesAppendedInSink'; this.BYTES_APPENDED_END_FRAGMENT = 'bytesAppendedEndFragment'; this.CHECK_FOR_EXISTENCE_COMPLETED = 'checkForExistenceCompleted'; this.CURRENT_TRACK_CHANGED = 'currentTrackChanged'; this.DATA_UPDATE_COMPLETED = 'dataUpdateCompleted'; - this.DATA_UPDATE_STARTED = 'dataUpdateStarted'; this.INBAND_EVENTS = 'inbandEvents'; - this.INITIALIZATION_LOADED = 'initializationLoaded'; + this.INITIAL_STREAM_SWITCH = 'initialStreamSwitch'; this.INIT_FRAGMENT_LOADED = 'initFragmentLoaded'; this.INIT_FRAGMENT_NEEDED = 'initFragmentNeeded'; this.INTERNAL_MANIFEST_LOADED = 'internalManifestLoaded'; + this.INTERNAL_BUFFER_UPDATED = 'internalBufferUpdated'; this.ORIGINAL_MANIFEST_LOADED = 'originalManifestLoaded'; this.LIVE_EDGE_SEARCH_COMPLETED = 'liveEdgeSearchCompleted'; this.LOADING_COMPLETED = 'loadingCompleted'; @@ -65,24 +64,23 @@ class CoreEvents extends EventsBase { this.MEDIA_FRAGMENT_LOADED = 'mediaFragmentLoaded'; this.MEDIA_FRAGMENT_NEEDED = 'mediaFragmentNeeded'; this.QUOTA_EXCEEDED = 'quotaExceeded'; - this.REPRESENTATION_UPDATE_STARTED = 'representationUpdateStarted'; - this.REPRESENTATION_UPDATE_COMPLETED = 'representationUpdateCompleted'; this.SEGMENTS_LOADED = 'segmentsLoaded'; this.SERVICE_LOCATION_BLACKLIST_ADD = 'serviceLocationBlacklistAdd'; this.SERVICE_LOCATION_BLACKLIST_CHANGED = 'serviceLocationBlacklistChanged'; - this.SOURCEBUFFER_REMOVE_COMPLETED = 'sourceBufferRemoveCompleted'; + this.SET_FRAGMENTED_TEXT_AFTER_DISABLED = 'setFragmentedTextAfterDisabled'; + this.SET_NON_FRAGMENTED_TEXT = 'setNonFragmentedText'; this.STREAMS_COMPOSED = 'streamsComposed'; this.STREAM_BUFFERING_COMPLETED = 'streamBufferingCompleted'; - this.STREAM_COMPLETED = 'streamCompleted'; + this.STREAM_REQUESTING_COMPLETED = 'streamRequestingCompleted'; this.TEXT_TRACKS_QUEUE_INITIALIZED = 'textTracksQueueInitialized'; this.TIME_SYNCHRONIZATION_COMPLETED = 'timeSynchronizationComplete'; + this.TRACK_REPLACEMENT_STARTED = 'trackReplacementStarted'; this.UPDATE_TIME_SYNC_OFFSET = 'updateTimeSyncOffset'; this.URL_RESOLUTION_FAILED = 'urlResolutionFailed'; this.VIDEO_CHUNK_RECEIVED = 'videoChunkReceived'; this.WALLCLOCK_TIME_UPDATED = 'wallclockTimeUpdated'; this.XLINK_ELEMENT_LOADED = 'xlinkElementLoaded'; this.XLINK_READY = 'xlinkReady'; - this.SEGMENTBASE_INIT_REQUEST_NEEDED = 'segmentBaseInitRequestNeeded'; this.SEGMENTBASE_SEGMENTSLIST_REQUEST_NEEDED = 'segmentBaseSegmentsListRequestNeeded'; this.SEEK_TARGET = 'seekTarget'; } diff --git a/src/dash/DashAdapter.js b/src/dash/DashAdapter.js index e06cbedf87..7987b8cf3d 100644 --- a/src/dash/DashAdapter.js +++ b/src/dash/DashAdapter.js @@ -48,7 +48,6 @@ function DashAdapter() { dashManifestModel, patchManifestModel, voPeriods, - voAdaptations, currentMediaInfo, constants, cea608parser; @@ -65,10 +64,6 @@ function DashAdapter() { // #region PUBLIC FUNCTIONS // -------------------------------------------------- - function getVoAdaptations() { - return voAdaptations; - } - function getVoPeriods() { return voPeriods; } @@ -110,7 +105,6 @@ function DashAdapter() { representationInfo.id = voRepresentation.id; representationInfo.quality = voRepresentation.index; representationInfo.bandwidth = dashManifestModel.getBandwidth(realRepresentation); - representationInfo.DVRWindow = voRepresentation.segmentAvailabilityRange; representationInfo.fragmentDuration = voRepresentation.segmentDuration || (voRepresentation.segments && voRepresentation.segments.length > 0 ? voRepresentation.segments[0].duration : NaN); representationInfo.MSETimeOffset = voRepresentation.MSETimeOffset; representationInfo.mediaInfo = convertAdaptationToMediaInfo(voRepresentation.adaptation); @@ -137,14 +131,13 @@ function DashAdapter() { let selectedVoPeriod = getPeriodForStreamInfo(streamInfo, voPeriods); if (!selectedVoPeriod) return null; - let periodId = selectedVoPeriod.id; - voAdaptations[periodId] = voAdaptations[periodId] || dashManifestModel.getAdaptationsForPeriod(selectedVoPeriod); + const voAdaptations = dashManifestModel.getAdaptationsForPeriod(selectedVoPeriod); let realAdaptation = getAdaptationForType(streamInfo.index, type, streamInfo); if (!realAdaptation) return null; let idx = dashManifestModel.getIndexForAdaptation(realAdaptation, voPeriods[0].mpd.manifest, streamInfo.index); - return convertAdaptationToMediaInfo(voAdaptations[periodId][idx]); + return convertAdaptationToMediaInfo(voAdaptations[idx]); } /** @@ -257,12 +250,12 @@ function DashAdapter() { if (!adaptationsForType || adaptationsForType.length === 0) return mediaArr; - voAdaptations[periodId] = voAdaptations[periodId] || dashManifestModel.getAdaptationsForPeriod(selectedVoPeriod); + const voAdaptations = dashManifestModel.getAdaptationsForPeriod(selectedVoPeriod); for (i = 0, ln = adaptationsForType.length; i < ln; i++) { data = adaptationsForType[i]; idx = dashManifestModel.getIndexForAdaptation(data, manifest, streamInfo.index); - media = convertAdaptationToMediaInfo(voAdaptations[periodId][idx]); + media = convertAdaptationToMediaInfo(voAdaptations[idx]); if (type === constants.EMBEDDED_TEXT) { let accessibilityLength = media.accessibility.length; @@ -277,7 +270,7 @@ function DashAdapter() { if (parts[0].substring(0, 2) === 'CC') { for (j = 0; j < parts.length; j++) { if (!media) { - media = convertAdaptationToMediaInfo.call(this, voAdaptations[periodId][idx]); + media = convertAdaptationToMediaInfo.call(this, voAdaptations[idx]); } convertVideoInfoToEmbeddedTextInfo(media, parts[j].substring(0, 3), parts[j].substring(4)); mediaArr.push(media); @@ -286,7 +279,7 @@ function DashAdapter() { } else { for (j = 0; j < parts.length; j++) { // Only languages for CC1, CC2, ... if (!media) { - media = convertAdaptationToMediaInfo.call(this, voAdaptations[periodId][idx]); + media = convertAdaptationToMediaInfo.call(this, voAdaptations[idx]); } convertVideoInfoToEmbeddedTextInfo(media, 'CC' + (j + 1), parts[j]); mediaArr.push(media); @@ -324,8 +317,6 @@ function DashAdapter() { checkConfig(); voPeriods = getRegularPeriods(newManifest); - - voAdaptations = {}; } /** @@ -806,7 +797,6 @@ function DashAdapter() { function reset() { voPeriods = []; - voAdaptations = {}; currentMediaInfo = {}; } @@ -849,7 +839,7 @@ function DashAdapter() { return; } - let {name, target, leaf} = result; + let { name, target, leaf } = result; // short circuit for attribute selectors if (operation.xpath.findsAttribute()) { @@ -945,8 +935,15 @@ function DashAdapter() { } function getAdaptationForMediaInfo(mediaInfo) { - if (!mediaInfo || !mediaInfo.streamInfo || mediaInfo.streamInfo.id === undefined || !voAdaptations[mediaInfo.streamInfo.id]) return null; - return voAdaptations[mediaInfo.streamInfo.id][mediaInfo.index]; + try { + const selectedVoPeriod = getPeriodForStreamInfo(mediaInfo.streamInfo, voPeriods); + const voAdaptations = dashManifestModel.getAdaptationsForPeriod(selectedVoPeriod); + + if (!mediaInfo || !mediaInfo.streamInfo || mediaInfo.streamInfo.id === undefined || !voAdaptations) return null; + return voAdaptations[mediaInfo.index]; + } catch (e) { + return null; + } } function getPeriodForStreamInfo(streamInfo, voPeriodsArray) { @@ -1017,7 +1014,7 @@ function DashAdapter() { } mediaInfo.isText = dashManifestModel.getIsTextTrack(mediaInfo.mimeType); - mediaInfo.supplementalProperties = dashManifestModel.getSupplementalPropperties(realAdaptation); + mediaInfo.supplementalProperties = dashManifestModel.getSupplementalProperties(realAdaptation); return mediaInfo; } @@ -1170,7 +1167,6 @@ function DashAdapter() { getBaseURLsFromElement: getBaseURLsFromElement, getRepresentationSortFunction: getRepresentationSortFunction, getCodec: getCodec, - getVoAdaptations: getVoAdaptations, getVoPeriods: getVoPeriods, getPeriodById, setCurrentMediaInfo: setCurrentMediaInfo, diff --git a/src/dash/DashHandler.js b/src/dash/DashHandler.js index 17750a2d53..9c4797c80e 100644 --- a/src/dash/DashHandler.js +++ b/src/dash/DashHandler.js @@ -31,30 +31,26 @@ import FragmentRequest from '../streaming/vo/FragmentRequest'; import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; import FactoryMaker from '../core/FactoryMaker'; +import MediaPlayerEvents from '../streaming/MediaPlayerEvents'; import { replaceIDForTemplate, - unescapeDollarsInTemplate, replaceTokenForTemplate, - getTimeBasedSegment + unescapeDollarsInTemplate } from './utils/SegmentsUtils'; -import SegmentsController from './controllers/SegmentsController'; function DashHandler(config) { config = config || {}; - const context = this.context; const eventBus = config.eventBus; - const events = config.events; const debug = config.debug; - const dashConstants = config.dashConstants; const urlUtils = config.urlUtils; const type = config.type; const streamInfo = config.streamInfo; + const segmentsController = config.segmentsController; const timelineConverter = config.timelineConverter; - const dashMetrics = config.dashMetrics; const baseURLController = config.baseURLController; let instance, @@ -63,20 +59,13 @@ function DashHandler(config) { lastSegment, requestedTime, isDynamicManifest, - dynamicStreamCompleted, - selectedMimeType, - segmentsController; + dynamicStreamCompleted; function setup() { logger = debug.getLogger(instance); resetInitialSettings(); - segmentsController = SegmentsController(context).create(config); - - eventBus.on(events.INITIALIZATION_LOADED, onInitializationLoaded, instance); - eventBus.on(events.SEGMENTS_LOADED, onSegmentsLoaded, instance); - eventBus.on(events.REPRESENTATION_UPDATE_STARTED, onRepresentationUpdateStarted, instance); - eventBus.on(events.DYNAMIC_TO_STATIC, onDynamicToStatic, instance); + eventBus.on(MediaPlayerEvents.DYNAMIC_TO_STATIC, onDynamicToStatic, instance); } function initialize(isDynamic) { @@ -113,17 +102,11 @@ function DashHandler(config) { function resetInitialSettings() { resetIndex(); requestedTime = null; - segmentsController = null; - selectedMimeType = null; } function reset() { resetInitialSettings(); - - eventBus.off(events.INITIALIZATION_LOADED, onInitializationLoaded, instance); - eventBus.off(events.SEGMENTS_LOADED, onSegmentsLoaded, instance); - eventBus.off(events.REPRESENTATION_UPDATE_STARTED, onRepresentationUpdateStarted, instance); - eventBus.off(events.DYNAMIC_TO_STATIC, onDynamicToStatic, instance); + eventBus.off(MediaPlayerEvents.DYNAMIC_TO_STATIC, onDynamicToStatic, instance); } function setRequestUrl(request, destination, representation) { @@ -152,7 +135,12 @@ function DashHandler(config) { return true; } - function generateInitRequest(mediaInfo, representation, mediaType) { + function getInitRequest(mediaInfo, representation) { + if (!representation) return null; + return _generateInitRequest(mediaInfo, representation, getType()); + } + + function _generateInitRequest(mediaInfo, representation, mediaType) { const request = new FragmentRequest(); const period = representation.adaptation.period; const presentationStartTime = period.start; @@ -160,8 +148,8 @@ function DashHandler(config) { request.mediaType = mediaType; request.type = HTTPRequest.INIT_SEGMENT_TYPE; request.range = representation.range; - request.availabilityStartTime = timelineConverter.calcAvailabilityStartTimeFromPresentationTime(presentationStartTime, period.mpd, isDynamicManifest); - request.availabilityEndTime = timelineConverter.calcAvailabilityEndTimeFromPresentationTime(presentationStartTime + period.duration, period.mpd, isDynamicManifest); + request.availabilityStartTime = timelineConverter.calcAvailabilityStartTimeFromPresentationTime(presentationStartTime, representation, isDynamicManifest); + request.availabilityEndTime = timelineConverter.calcAvailabilityEndTimeFromPresentationTime(presentationStartTime + period.duration, representation, isDynamicManifest); request.quality = representation.index; request.mediaInfo = mediaInfo; request.representationId = representation.id; @@ -172,42 +160,7 @@ function DashHandler(config) { } } - function getInitRequest(mediaInfo, representation) { - if (!representation) return null; - const request = generateInitRequest(mediaInfo, representation, getType()); - return request; - } - - function setMimeType(newMimeType) { - selectedMimeType = newMimeType; - } - - function setExpectedLiveEdge(liveEdge) { - timelineConverter.setExpectedLiveEdge(liveEdge); - dashMetrics.updateManifestUpdateInfo({presentationStartTime: liveEdge}); - } - - function onRepresentationUpdateStarted(e) { - processRepresentation(e.representation); - } - - function processRepresentation(voRepresentation) { - const hasInitialization = voRepresentation.hasInitialization(); - const hasSegments = voRepresentation.hasSegments(); - - // If representation has initialization and segments information, REPRESENTATION_UPDATE_COMPLETED can be triggered immediately - // otherwise, it means that a request has to be made to get initialization and/or segments informations - if (hasInitialization && hasSegments) { - eventBus.trigger(events.REPRESENTATION_UPDATE_COMPLETED, - { representation: voRepresentation }, - { streamId: streamInfo.id, mediaType: type } - ); - } else { - segmentsController.update(voRepresentation, selectedMimeType, hasInitialization, hasSegments); - } - } - - function getRequestForSegment(mediaInfo, segment) { + function _getRequestForSegment(mediaInfo, segment) { if (segment === null || segment === undefined) { return null; } @@ -244,40 +197,39 @@ function DashHandler(config) { } } - function isMediaFinished(representation) { + function isMediaFinished(representation, bufferingTime) { let isFinished = false; - if (!representation) return isFinished; + if (!representation || !lastSegment) return isFinished; - if (!isDynamicManifest) { - if (segmentIndex >= representation.availableSegmentsNumber) { - isFinished = true; - } - } else { - if (dynamicStreamCompleted) { - isFinished = true; - } else if (lastSegment) { - const time = parseFloat((lastSegment.presentationStartTime - representation.adaptation.period.start).toFixed(5)); - const endTime = lastSegment.duration > 0 ? time + 1.5 * lastSegment.duration : time; - const duration = representation.adaptation.period.duration; - - isFinished = endTime >= duration; - } + // if the buffer is filled up we are done + + // we are replacing existing stuff. + if (lastSegment.presentationStartTime + lastSegment.duration > bufferingTime) { + return false; } + + + if (isDynamicManifest && dynamicStreamCompleted) { + isFinished = true; + } else if (lastSegment) { + const time = parseFloat((lastSegment.presentationStartTime - representation.adaptation.period.start).toFixed(5)); + const endTime = lastSegment.duration > 0 ? time + lastSegment.duration : time; + const duration = representation.adaptation.period.duration; + + return isFinite(duration) && endTime >= duration - 0.05; + } + return isFinished; } - function getSegmentRequestForTime(mediaInfo, representation, time, options) { + function getSegmentRequestForTime(mediaInfo, representation, time) { let request = null; if (!representation || !representation.segmentInfoType) { return request; } - const idx = segmentIndex; - const keepIdx = options ? options.keepIdx : false; - const ignoreIsFinished = (options && options.ignoreIsFinished) ? true : false; - if (requestedTime !== time) { // When playing at live edge with 0 delay we may loop back with same time and index until it is available. Reduces verboseness of logs. requestedTime = time; logger.debug('Getting the request for time : ' + time); @@ -288,21 +240,7 @@ function DashHandler(config) { segmentIndex = segment.availabilityIdx; lastSegment = segment; logger.debug('Index for time ' + time + ' is ' + segmentIndex); - request = getRequestForSegment(mediaInfo, segment); - } else { - const finished = !ignoreIsFinished ? isMediaFinished(representation) : false; - if (finished) { - request = new FragmentRequest(); - request.action = FragmentRequest.ACTION_COMPLETE; - request.index = segmentIndex - 1; - request.mediaType = type; - request.mediaInfo = mediaInfo; - logger.debug('Signal complete in getSegmentRequestForTime'); - } - } - - if (keepIdx && idx >= 0) { - segmentIndex = representation.segmentInfoType === dashConstants.SEGMENT_TIMELINE && isDynamicManifest ? segmentIndex : idx; + request = _getRequestForSegment(mediaInfo, segment); } return request; @@ -323,7 +261,7 @@ function DashHandler(config) { lastSegment ? lastSegment.mediaStartTime : -1 ); if (!segment) return null; - request = getRequestForSegment(mediaInfo, segment); + request = _getRequestForSegment(mediaInfo, segment); return request; } @@ -352,7 +290,7 @@ function DashHandler(config) { return null; } else { if (segment) { - request = getRequestForSegment(mediaInfo, segment); + request = _getRequestForSegment(mediaInfo, segment); segmentIndex = segment.availabilityIdx; } else { if (isDynamicManifest) { @@ -365,16 +303,6 @@ function DashHandler(config) { if (segment) { lastSegment = segment; - } else { - const finished = isMediaFinished(representation, segment); - if (finished) { - request = new FragmentRequest(); - request.action = FragmentRequest.ACTION_COMPLETE; - request.index = segmentIndex - 1; - request.mediaType = getType(); - request.mediaInfo = mediaInfo; - logger.debug('Signal complete'); - } } return request; @@ -384,96 +312,24 @@ function DashHandler(config) { return !isFinite(representation.adaptation.period.duration); } - function onInitializationLoaded(e) { - const representation = e.representation; - if (!representation.segments) return; - - eventBus.trigger(events.REPRESENTATION_UPDATE_COMPLETED, - { representation: representation }, - { streamId: streamInfo.id, mediaType: type } - ); - } - - function onSegmentsLoaded(e) { - if (e.error) return; - - const fragments = e.segments; - const representation = e.representation; - const segments = []; - let count = 0; - - let i, - len, - s, - seg; - - for (i = 0, len = fragments ? fragments.length : 0; i < len; i++) { - s = fragments[i]; - - seg = getTimeBasedSegment( - timelineConverter, - isDynamicManifest, - representation, - s.startTime, - s.duration, - s.timescale, - s.media, - s.mediaRange, - count); - - if (seg) { - segments.push(seg); - seg = null; - count++; - } - } - - if (segments.length > 0) { - representation.segmentAvailabilityRange = { - start: segments[0].presentationStartTime, - end: segments[segments.length - 1].presentationStartTime - }; - representation.availableSegmentsNumber = segments.length; - representation.segments = segments; - - if (isDynamicManifest) { - const lastSegment = segments[segments.length - 1]; - const liveEdge = lastSegment.presentationStartTime - 8; - // the last segment is the Expected, not calculated, live edge. - setExpectedLiveEdge(liveEdge); - } - } - - if (!representation.hasInitialization()) { - return; - } - - eventBus.trigger(events.REPRESENTATION_UPDATE_COMPLETED, - { representation: representation }, - { streamId: streamInfo.id, mediaType: type } - ); - } - function onDynamicToStatic() { logger.debug('Dynamic stream complete'); dynamicStreamCompleted = true; } instance = { - initialize: initialize, - getStreamId: getStreamId, - getType: getType, - getStreamInfo: getStreamInfo, - getInitRequest: getInitRequest, - getRequestForSegment: getRequestForSegment, - getSegmentRequestForTime: getSegmentRequestForTime, - getNextSegmentRequest: getNextSegmentRequest, - setCurrentIndex: setCurrentIndex, - getCurrentIndex: getCurrentIndex, - isMediaFinished: isMediaFinished, - reset: reset, - resetIndex: resetIndex, - setMimeType: setMimeType, + initialize, + getStreamId, + getType, + getStreamInfo, + getInitRequest, + getSegmentRequestForTime, + getNextSegmentRequest, + setCurrentIndex, + getCurrentIndex, + isMediaFinished, + reset, + resetIndex, getNextSegmentRequestIdempotent }; diff --git a/src/dash/SegmentBaseLoader.js b/src/dash/SegmentBaseLoader.js index 9668fff815..741410aa94 100644 --- a/src/dash/SegmentBaseLoader.js +++ b/src/dash/SegmentBaseLoader.js @@ -131,14 +131,13 @@ function SegmentBaseLoader() { } } - function checkConfig() { - if (!baseURLController || !baseURLController.hasOwnProperty('resolve')) { - throw new Error('setConfig function has to be called previously'); - } + function loadInitialization(representation, mediaType) { + return new Promise((resolve) => { + _loadInitializationRecursively(representation, mediaType, resolve); + }); } - function loadInitialization(streamId, mediaType, representation, loadingInfo) { - checkConfig(); + function _loadInitializationRecursively(representation, mediaType, resolve, loadingInfo) { let initRange = null; const baseUrl = representation ? baseURLController.resolve(representation.path) : null; const info = loadingInfo || { @@ -166,33 +165,32 @@ function SegmentBaseLoader() { representation.range = initRange; // note that we don't explicitly set rep.initialization as this // will be computed when all BaseURLs are resolved later - eventBus.trigger(events.INITIALIZATION_LOADED, - { representation: representation }, - { streamId: streamId, mediaType: mediaType } - ); + resolve(representation); } else { info.range.end = info.bytesLoaded + info.bytesToLoad; - loadInitialization(streamId, mediaType, representation, info); + return _loadInitializationRecursively(representation, mediaType, resolve, info); } }; const onerror = function () { - eventBus.trigger(events.INITIALIZATION_LOADED, - { representation: representation }, - { streamId: streamId, mediaType: mediaType } - ); + resolve(representation); }; - urlLoader.load({request: request, success: onload, error: onerror}); + urlLoader.load({ request: request, success: onload, error: onerror }); logger.debug('Perform init search: ' + info.url); } - function loadSegments(streamId, mediaType, representation, range, callback, loadingInfo) { - checkConfig(); + function loadSegments(representation, mediaType, range) { + return new Promise((resolve) => { + _loadSegmentsRecursively(representation, mediaType, range, resolve); + }); + } + + function _loadSegmentsRecursively(representation, mediaType, range, resolve, callback, loadingInfo) { if (range && (range.start === undefined || range.end === undefined)) { const parts = range ? range.toString().split('-') : null; - range = parts ? {start: parseFloat(parts[0]), end: parseFloat(parts[1])} : null; + range = parts ? { start: parseFloat(parts[0]), end: parseFloat(parts[1]) } : null; } callback = !callback ? onLoaded : callback; @@ -226,7 +224,7 @@ function SegmentBaseLoader() { info.range.end = info.range.start + (sidx.size || extraBytes); } else if (loadedLength < info.bytesLoaded) { // if we have reached a search limit or if we have reached the end of the file we have to stop trying to find sidx - callback(streamId, mediaType, null, representation); + callback(null, representation, resolve); return; } else { const lastBox = isoFile.getLastBox(); @@ -238,7 +236,7 @@ function SegmentBaseLoader() { info.range.end += extraBytes; } } - loadSegments(streamId, mediaType, representation, info.range, callback, info); + _loadSegmentsRecursively(representation, mediaType, info.range, resolve, null, info); } else { const ref = sidx.references; let loadMultiSidx, @@ -256,7 +254,7 @@ function SegmentBaseLoader() { let segs = []; let count = 0; let offset = (sidx.offset || info.range.start) + sidx.size; - const tmpCallback = function (streamId, mediaType, result) { + const tmpCallback = function (result) { if (result) { segs = segs.concat(result); count++; @@ -266,10 +264,10 @@ function SegmentBaseLoader() { segs.sort(function (a, b) { return a.startTime - b.startTime < 0 ? -1 : 0; }); - callback(streamId, mediaType, segs, representation); + callback(segs, representation, resolve); } } else { - callback(streamId, mediaType, null, representation); + callback(null, representation, resolve); } }; @@ -277,24 +275,32 @@ function SegmentBaseLoader() { ss = offset; se = offset + ref[j].referenced_size - 1; offset = offset + ref[j].referenced_size; - r = {start: ss, end: se}; - loadSegments(streamId, mediaType, representation, r, tmpCallback, info); + r = { start: ss, end: se }; + _loadSegmentsRecursively(representation, mediaType, r, resolve, tmpCallback, info); } } else { logger.debug('Parsing segments from SIDX. representation ' + mediaType + ' - id: ' + representation.id + ' for range : ' + info.range.start + ' - ' + info.range.end); segments = getSegmentsForSidx(sidx, info); - callback(streamId, mediaType, segments, representation); + callback(segments, representation, resolve); } } }; const onerror = function () { - callback(streamId, mediaType, null, representation); + callback(null, representation, resolve); }; - urlLoader.load({request: request, success: onload, error: onerror}); - logger.debug('Perform SIDX load: ' + info.url + ' with range : ' + info.range.start + ' - ' + info.range.end); + urlLoader.load({ request: request, success: onload, error: onerror }); + logger.debug(`Perform SIDX load for type ${mediaType} : ${info.url} with range ${info.range.start} - ${info.range.end}`); + } + + function onLoaded(segments, representation, resolve) { + resolve({ + segments: segments, + representation: representation, + error: segments ? undefined : new DashJSError(errors.SEGMENT_BASE_LOADER_ERROR_CODE, errors.SEGMENT_BASE_LOADER_ERROR_MESSAGE) + }); } function reset() { @@ -346,23 +352,12 @@ function SegmentBaseLoader() { return request; } - function onLoaded(streamId, mediaType, segments, representation) { - eventBus.trigger(events.SEGMENTS_LOADED, - { - segments: segments, - representation: representation, - error: segments ? undefined : new DashJSError(errors.SEGMENT_BASE_LOADER_ERROR_CODE, errors.SEGMENT_BASE_LOADER_ERROR_MESSAGE) - }, - { streamId: streamId, mediaType: mediaType } - ); - } - instance = { - setConfig: setConfig, - initialize: initialize, - loadInitialization: loadInitialization, - loadSegments: loadSegments, - reset: reset + setConfig, + initialize, + loadInitialization, + loadSegments, + reset }; setup(); diff --git a/src/dash/WebmSegmentBaseLoader.js b/src/dash/WebmSegmentBaseLoader.js index 888912bcd5..18ac29c2bc 100644 --- a/src/dash/WebmSegmentBaseLoader.js +++ b/src/dash/WebmSegmentBaseLoader.js @@ -128,14 +128,14 @@ function WebmSegmentBaseLoader() { ebmlParser.consumeTagAndSize(WebM.Segment.Cues); while (ebmlParser.moreData() && - ebmlParser.consumeTagAndSize(WebM.Segment.Cues.CuePoint, true)) { + ebmlParser.consumeTagAndSize(WebM.Segment.Cues.CuePoint, true)) { cue = {}; cue.CueTime = ebmlParser.parseTag(WebM.Segment.Cues.CuePoint.CueTime); cue.CueTracks = []; while (ebmlParser.moreData() && - ebmlParser.consumeTag(WebM.Segment.Cues.CuePoint.CueTrackPositions, true)) { + ebmlParser.consumeTag(WebM.Segment.Cues.CuePoint.CueTrackPositions, true)) { const cueTrackPositionSize = ebmlParser.getMatroskaCodedNum(); const startPos = ebmlParser.getPos(); cueTrack = {}; @@ -250,11 +250,11 @@ function WebmSegmentBaseLoader() { // skip over any top level elements to get to the segment info while (ebmlParser.moreData() && - !ebmlParser.consumeTagAndSize(WebM.Segment.Info, true)) { + !ebmlParser.consumeTagAndSize(WebM.Segment.Info, true)) { if (!(ebmlParser.skipOverElement(WebM.Segment.SeekHead, true) || - ebmlParser.skipOverElement(WebM.Segment.Tracks, true) || - ebmlParser.skipOverElement(WebM.Segment.Cues, true) || - ebmlParser.skipOverElement(WebM.Void, true))) { + ebmlParser.skipOverElement(WebM.Segment.Tracks, true) || + ebmlParser.skipOverElement(WebM.Segment.Cues, true) || + ebmlParser.skipOverElement(WebM.Void, true))) { throw new Error('no valid top level element found'); } } @@ -277,7 +277,7 @@ function WebmSegmentBaseLoader() { // once we have what we need from segment info, we jump right to the // cues - request = getFragmentRequest(info); + request = _getFragmentRequest(info); const onload = function (response) { segments = parseSegments(response, segmentStart, segmentEnd, duration); @@ -298,113 +298,100 @@ function WebmSegmentBaseLoader() { logger.debug('Perform cues load: ' + info.url + ' bytes=' + info.range.start + '-' + info.range.end); } - function checkConfig() { - if (!baseURLController || !baseURLController.hasOwnProperty('resolve')) { - throw new Error('setConfig function has to be called previously'); - } - } - - function loadInitialization(streamId, mediaType, representation, loadingInfo) { - checkConfig(); - let request = null; - let baseUrl = representation ? baseURLController.resolve(representation.path) : null; - let initRange = representation ? representation.range.split('-') : null; - let info = loadingInfo || { - range: { - start: initRange ? parseFloat(initRange[0]) : null, - end: initRange ? parseFloat(initRange[1]) : null - }, - request: request, - url: baseUrl ? baseUrl.url : undefined, - init: true, - mediaType: mediaType - }; - - logger.info('Start loading initialization.'); - - request = getFragmentRequest(info); - - const onload = function () { - // note that we don't explicitly set rep.initialization as this - // will be computed when all BaseURLs are resolved later - eventBus.trigger(events.INITIALIZATION_LOADED, - { representation: representation }, - { streamId: streamId, mediaType: mediaType } - ); - }; - - const onloadend = function () { - eventBus.trigger(events.INITIALIZATION_LOADED, - { representation: representation }, - { streamId: streamId, mediaType: mediaType } - ); - }; + function loadInitialization(representation, mediaType) { + return new Promise((resolve) => { + let request = null; + let baseUrl = representation ? baseURLController.resolve(representation.path) : null; + let initRange = representation ? representation.range.split('-') : null; + let info = { + range: { + start: initRange ? parseFloat(initRange[0]) : null, + end: initRange ? parseFloat(initRange[1]) : null + }, + request: request, + url: baseUrl ? baseUrl.url : undefined, + init: true, + mediaType: mediaType + }; + + logger.info('Start loading initialization.'); + + request = _getFragmentRequest(info); + + const onload = function () { + // note that we don't explicitly set rep.initialization as this + // will be computed when all BaseURLs are resolved later + resolve(representation); + }; + + const onloadend = function () { + resolve(representation); + }; + + urlLoader.load({ + request: request, + success: onload, + error: onloadend + }); - urlLoader.load({ - request: request, - success: onload, - error: onloadend + logger.debug('Perform init load: ' + info.url); }); - - logger.debug('Perform init load: ' + info.url); } - function loadSegments(streamId, mediaType, representation, theRange, callback) { - checkConfig(); - let request = null; - let baseUrl = representation ? baseURLController.resolve(representation.path) : null; - let media = baseUrl ? baseUrl.url : undefined; - let bytesToLoad = 8192; - let info = { - bytesLoaded: 0, - bytesToLoad: bytesToLoad, - range: { - start: 0, - end: bytesToLoad - }, - request: request, - url: media, - init: false, - mediaType: mediaType - }; - - callback = !callback ? onLoaded : callback; - request = getFragmentRequest(info); - - // first load the header, but preserve the manifest range so we can - // load the cues after parsing the header - // NOTE: we expect segment info to appear in the first 8192 bytes - logger.debug('Parsing ebml header'); - - const onload = function (response) { - parseEbmlHeader(response, media, theRange, function (segments) { - callback(streamId, mediaType, segments, representation); + function loadSegments(representation, mediaType, theRange) { + return new Promise((resolve) => { + let request = null; + let baseUrl = representation ? baseURLController.resolve(representation.path) : null; + let media = baseUrl ? baseUrl.url : undefined; + let bytesToLoad = 8192; + let info = { + bytesLoaded: 0, + bytesToLoad: bytesToLoad, + range: { + start: 0, + end: bytesToLoad + }, + request: request, + url: media, + init: false, + mediaType: mediaType + }; + + request = _getFragmentRequest(info); + + // first load the header, but preserve the manifest range so we can + // load the cues after parsing the header + // NOTE: we expect segment info to appear in the first 8192 bytes + logger.debug('Parsing ebml header'); + + const onload = function (response) { + parseEbmlHeader(response, media, theRange, function (segments) { + resolve({ + segments: segments, + representation: representation, + error: segments ? undefined : new DashJSError(errors.SEGMENT_BASE_LOADER_ERROR_CODE, errors.SEGMENT_BASE_LOADER_ERROR_MESSAGE) + }); + }); + }; + + const onloadend = function () { + resolve({ + representation: representation, + error: new DashJSError(errors.SEGMENT_BASE_LOADER_ERROR_CODE, errors.SEGMENT_BASE_LOADER_ERROR_MESSAGE) + }); + }; + + urlLoader.load({ + request: request, + success: onload, + error: onloadend }); - }; - - const onloadend = function () { - callback(streamId, mediaType, null, representation); - }; - - urlLoader.load({ - request: request, - success: onload, - error: onloadend }); - } - function onLoaded(streamId, mediaType, segments, representation) { - eventBus.trigger(events.SEGMENTS_LOADED, - { - segments: segments, - representation: representation, - error: segments ? undefined : new DashJSError(errors.SEGMENT_BASE_LOADER_ERROR_CODE, errors.SEGMENT_BASE_LOADER_ERROR_MESSAGE) - }, - { streamId: streamId, mediaType: mediaType } - ); } - function getFragmentRequest(info) { + + function _getFragmentRequest(info) { const request = new FragmentRequest(); request.setInfo(info); return request; @@ -416,11 +403,11 @@ function WebmSegmentBaseLoader() { } instance = { - setConfig: setConfig, - initialize: initialize, - loadInitialization: loadInitialization, - loadSegments: loadSegments, - reset: reset + setConfig, + initialize, + loadInitialization, + loadSegments, + reset }; setup(); diff --git a/src/dash/controllers/RepresentationController.js b/src/dash/controllers/RepresentationController.js index 8e4e9257c6..bd535041ea 100644 --- a/src/dash/controllers/RepresentationController.js +++ b/src/dash/controllers/RepresentationController.js @@ -29,15 +29,14 @@ * POSSIBILITY OF SUCH DAMAGE. */ import Constants from '../../streaming/constants/Constants'; -import DashJSError from '../../streaming/vo/DashJSError'; import FactoryMaker from '../../core/FactoryMaker'; +import {getTimeBasedSegment} from '../utils/SegmentsUtils'; function RepresentationController(config) { config = config || {}; const eventBus = config.eventBus; const events = config.events; - const errors = config.errors; const abrController = config.abrController; const dashMetrics = config.dashMetrics; const playbackController = config.playbackController; @@ -45,6 +44,8 @@ function RepresentationController(config) { const type = config.type; const streamInfo = config.streamInfo; const dashConstants = config.dashConstants; + const segmentsController = config.segmentsController; + const isDynamic = config.isDynamic; let instance, realAdaptation, @@ -56,8 +57,6 @@ function RepresentationController(config) { resetInitialSettings(); eventBus.on(events.QUALITY_CHANGE_REQUESTED, onQualityChanged, instance); - eventBus.on(events.REPRESENTATION_UPDATE_COMPLETED, onRepresentationUpdated, instance); - eventBus.on(events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, instance); eventBus.on(events.MANIFEST_VALIDITY_CHANGED, onManifestValidityChanged, instance); } @@ -95,8 +94,6 @@ function RepresentationController(config) { function reset() { eventBus.off(events.QUALITY_CHANGE_REQUESTED, onQualityChanged, instance); - eventBus.off(events.REPRESENTATION_UPDATE_COMPLETED, onRepresentationUpdated, instance); - eventBus.off(events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, instance); eventBus.off(events.MANIFEST_VALIDITY_CHANGED, onManifestValidityChanged, instance); resetInitialSettings(); @@ -105,7 +102,7 @@ function RepresentationController(config) { function updateData(newRealAdaptation, availableRepresentations, type, quality) { checkConfig(); - startDataUpdate(); + updating = true; voAvailableRepresentations = availableRepresentations; @@ -114,10 +111,93 @@ function RepresentationController(config) { if (type !== Constants.VIDEO && type !== Constants.AUDIO && type !== Constants.FRAGMENTED_TEXT) { endDataUpdate(); - return; + return Promise.resolve(); + } + + const promises = []; + for (let i = 0, ln = voAvailableRepresentations.length; i < ln; i++) { + const currentRep = voAvailableRepresentations[i]; + promises.push(_updateRepresentation(currentRep)); } - updateAvailabilityWindow(playbackController.getIsDynamic(), true); + return Promise.all(promises); + } + + function _updateRepresentation(currentRep) { + return new Promise((resolve, reject) => { + const hasInitialization = currentRep.hasInitialization(); + const hasSegments = currentRep.hasSegments(); + + // If representation has initialization and segments information we are done + // otherwise, it means that a request has to be made to get initialization and/or segments information + const promises = []; + + promises.push(segmentsController.updateInitData(currentRep, hasInitialization)); + promises.push(segmentsController.updateSegmentData(currentRep, hasSegments)); + + Promise.all(promises) + .then((data) => { + if (data[0] && !data[0].error) { + currentRep = _onInitLoaded(currentRep, data[0]); + } + if (data[1] && !data[1].error) { + currentRep = _onSegmentsLoaded(currentRep, data[1]); + } + _onRepresentationUpdated(currentRep); + resolve(); + }) + .catch((e) => { + reject(e); + }); + }); + } + + function _onInitLoaded(representation, e) { + if (!e || e.error || !e.representation) { + return representation; + } + return e.representation; + } + + function _onSegmentsLoaded(representation, e) { + if (!e || e.error) return; + + const fragments = e.segments; + const segments = []; + let count = 0; + + let i, + len, + s, + seg; + + for (i = 0, len = fragments ? fragments.length : 0; i < len; i++) { + s = fragments[i]; + + seg = getTimeBasedSegment( + timelineConverter, + isDynamic, + representation, + s.startTime, + s.duration, + s.timescale, + s.media, + s.mediaRange, + count); + + if (seg) { + segments.push(seg); + seg = null; + count++; + } + } + + if (segments.length > 0) { + representation.availableSegmentsNumber = segments.length; + representation.segments = segments; + } + + return representation; } function addRepresentationSwitch() { @@ -141,7 +221,7 @@ function RepresentationController(config) { function isAllRepresentationsUpdated() { for (let i = 0, ln = voAvailableRepresentations.length; i < ln; i++) { let segmentInfoType = voAvailableRepresentations[i].segmentInfoType; - if (voAvailableRepresentations[i].segmentAvailabilityRange === null || !voAvailableRepresentations[i].hasInitialization() || + if (!voAvailableRepresentations[i].hasInitialization() || ((segmentInfoType === dashConstants.SEGMENT_BASE || segmentInfoType === dashConstants.BASE_URL) && !voAvailableRepresentations[i].segments) ) { return false; @@ -151,53 +231,6 @@ function RepresentationController(config) { return true; } - function setExpectedLiveEdge(liveEdge) { - timelineConverter.setExpectedLiveEdge(liveEdge); - dashMetrics.updateManifestUpdateInfo({presentationStartTime: liveEdge}); - } - - function updateRepresentation(representation, isDynamic) { - representation.segmentAvailabilityRange = timelineConverter.calcSegmentAvailabilityRange(representation, isDynamic); - - if (representation.segmentAvailabilityRange.end < representation.segmentAvailabilityRange.start) { - let error = new DashJSError(errors.SEGMENTS_UNAVAILABLE_ERROR_CODE, errors.SEGMENTS_UNAVAILABLE_ERROR_MESSAGE, {availabilityDelay: representation.segmentAvailabilityRange.start - representation.segmentAvailabilityRange.end}); - endDataUpdate(error); - return; - } - - if (isDynamic) { - setExpectedLiveEdge(representation.segmentAvailabilityRange.end); - } - } - - function updateAvailabilityWindow(isDynamic, notifyUpdate) { - checkConfig(); - - for (let i = 0, ln = voAvailableRepresentations.length; i < ln; i++) { - updateRepresentation(voAvailableRepresentations[i], isDynamic); - if (notifyUpdate) { - eventBus.trigger(events.REPRESENTATION_UPDATE_STARTED, - { representation: voAvailableRepresentations[i] }, - { streamId: streamInfo.id, mediaType: type } - ); - } - } - } - - function resetAvailabilityWindow() { - voAvailableRepresentations.forEach(rep => { - rep.segmentAvailabilityRange = null; - }); - } - - function startDataUpdate() { - updating = true; - eventBus.trigger(events.DATA_UPDATE_STARTED, - {}, - { streamId: streamInfo.id, mediaType: type } - ); - } - function endDataUpdate(error) { updating = false; eventBus.trigger(events.DATA_UPDATE_COMPLETED, @@ -210,50 +243,14 @@ function RepresentationController(config) { ); } - function postponeUpdate(postponeTimePeriod) { - let delay = postponeTimePeriod; - let update = function () { - if (isUpdating()) return; - - startDataUpdate(); - - // clear the segmentAvailabilityRange for all reps. - // this ensures all are updated before the live edge search starts - resetAvailabilityWindow(); - - updateAvailabilityWindow(playbackController.getIsDynamic(), true); - }; - eventBus.trigger(events.AST_IN_FUTURE, { delay: delay }); - setTimeout(update, delay); - } - - function onRepresentationUpdated(e) { + function _onRepresentationUpdated(r) { if (!isUpdating()) return; - if (e.error) { - endDataUpdate(e.error); - return; - } - - let r = e.representation; let manifestUpdateInfo = dashMetrics.getCurrentManifestUpdate(); let alreadyAdded = false; - let postponeTimePeriod = 0; let repInfo, - err, repSwitch; - if (r.adaptation.period.mpd.manifest.type === dashConstants.DYNAMIC && !r.adaptation.period.mpd.manifest.ignorePostponeTimePeriod && playbackController.getStreamController().getStreams().length <= 1) { - // We must put things to sleep unless till e.g. the startTime calculation in ScheduleController.onLiveEdgeSearchCompleted fall after the segmentAvailabilityRange.start - postponeTimePeriod = getRepresentationUpdatePostponeTimePeriod(r); - } - - if (postponeTimePeriod > 0) { - postponeUpdate(postponeTimePeriod); - err = new DashJSError(errors.SEGMENTS_UPDATE_FAILED_ERROR_CODE, errors.SEGMENTS_UPDATE_FAILED_ERROR_MESSAGE); - endDataUpdate(err); - return; - } if (manifestUpdateInfo) { for (let i = 0; i < manifestUpdateInfo.representationInfo.length; i++) { @@ -271,7 +268,10 @@ function RepresentationController(config) { if (isAllRepresentationsUpdated()) { abrController.setPlaybackQuality(getType(), streamInfo, getQualityForRepresentation(currentVoRepresentation)); - dashMetrics.updateManifestUpdateInfo({latency: currentVoRepresentation.segmentAvailabilityRange.end - playbackController.getTime()}); + const dvrInfo = dashMetrics.getCurrentDVRInfo(); + if (dvrInfo) { + dashMetrics.updateManifestUpdateInfo({ latency: dvrInfo.range.end - playbackController.getTime() }); + } repSwitch = dashMetrics.getCurrentRepresentationSwitch(getCurrentRepresentation().adaptation.type); @@ -282,32 +282,6 @@ function RepresentationController(config) { } } - function getRepresentationUpdatePostponeTimePeriod(representation) { - try { - const streamController = playbackController.getStreamController(); - const activeStreamInfo = streamController.getActiveStreamInfo(); - let startTimeAnchor = representation.segmentAvailabilityRange.start; - - if (activeStreamInfo && activeStreamInfo.id && activeStreamInfo.id !== streamInfo.id) { - // We need to consider the currently playing period if a period switch is performed. - startTimeAnchor = Math.min(playbackController.getTime(), startTimeAnchor); - } - - let segmentAvailabilityTimePeriod = representation.segmentAvailabilityRange.end - startTimeAnchor; - let liveDelay = playbackController.getLiveDelay(); - - return (liveDelay - segmentAvailabilityTimePeriod) * 1000; - } catch (e) { - return 0; - } - } - - function onWallclockTimeUpdated(e) { - if (e.isDynamic) { - updateAvailabilityWindow(e.isDynamic); - } - } - function onQualityChanged(e) { currentVoRepresentation = getRepresentationForQuality(e.newQuality); addRepresentationSwitch(); @@ -324,14 +298,13 @@ function RepresentationController(config) { } instance = { - getStreamId: getStreamId, - getType: getType, - getData: getData, - isUpdating: isUpdating, - updateData: updateData, - updateRepresentation: updateRepresentation, - getCurrentRepresentation: getCurrentRepresentation, - getRepresentationForQuality: getRepresentationForQuality, + getStreamId, + getType, + getData, + isUpdating, + updateData, + getCurrentRepresentation, + getRepresentationForQuality, reset: reset }; diff --git a/src/dash/controllers/SegmentBaseController.js b/src/dash/controllers/SegmentBaseController.js index 8feadcf039..91697d0b21 100644 --- a/src/dash/controllers/SegmentBaseController.js +++ b/src/dash/controllers/SegmentBaseController.js @@ -88,37 +88,37 @@ function SegmentBaseController(config) { } function initialize() { - eventBus.on(events.SEGMENTBASE_INIT_REQUEST_NEEDED, onInitSegmentBaseNeeded, instance); - eventBus.on(events.SEGMENTBASE_SEGMENTSLIST_REQUEST_NEEDED, onSegmentsListSegmentBaseNeeded, instance); - segmentBaseLoader.initialize(); webmSegmentBaseLoader.initialize(); } - function onInitSegmentBaseNeeded(e) { - if (isWebM(e.mimeType)) { - webmSegmentBaseLoader.loadInitialization(e.streamId, e.mediaType, e.representation); + function getSegmentBaseInitSegment(data) { + if (isWebM(data.representation.mimeType)) { + return webmSegmentBaseLoader.loadInitialization(data.representation, data.mediaType); } else { - segmentBaseLoader.loadInitialization(e.streamId, e.mediaType, e.representation); + return segmentBaseLoader.loadInitialization(data.representation, data.mediaType); } } - function onSegmentsListSegmentBaseNeeded(e) { + function getSegmentList(e) { if (isWebM(e.mimeType)) { - webmSegmentBaseLoader.loadSegments(e.streamId, e.mediaType, e.representation, e.representation ? e.representation.indexRange : null, e.callback); + return webmSegmentBaseLoader.loadSegments(e.representation, e.mediaType, e.representation ? e.representation.indexRange : null); } else { - segmentBaseLoader.loadSegments(e.streamId, e.mediaType, e.representation, e.representation ? e.representation.indexRange : null, e.callback); + return segmentBaseLoader.loadSegments(e.representation, e.mediaType, e.representation ? e.representation.indexRange : null); } } function reset() { - eventBus.off(events.SEGMENTBASE_INIT_REQUEST_NEEDED, onInitSegmentBaseNeeded, instance); - eventBus.off(events.SEGMENTBASE_SEGMENTSLIST_REQUEST_NEEDED, onSegmentsListSegmentBaseNeeded, instance); + segmentBaseLoader.reset(); + webmSegmentBaseLoader.reset(); } + instance = { - initialize: initialize, - reset: reset + initialize, + getSegmentBaseInitSegment, + getSegmentList, + reset }; setup(); diff --git a/src/dash/controllers/SegmentsController.js b/src/dash/controllers/SegmentsController.js index 00e0cdf800..c1944217e0 100644 --- a/src/dash/controllers/SegmentsController.js +++ b/src/dash/controllers/SegmentsController.js @@ -38,11 +38,9 @@ function SegmentsController(config) { config = config || {}; const context = this.context; - const events = config.events; - const eventBus = config.eventBus; const dashConstants = config.dashConstants; - const streamInfo = config.streamInfo; const type = config.type; + const segmentBaseController = config.segmentBaseController; let instance, getters; @@ -58,24 +56,25 @@ function SegmentsController(config) { getters[dashConstants.SEGMENT_BASE] = SegmentBaseGetter(context).create(config, isDynamic); } - function update(voRepresentation, mimeType, hasInitialization, hasSegments) { - if (!hasInitialization) { - eventBus.trigger(events.SEGMENTBASE_INIT_REQUEST_NEEDED, { - streamId: streamInfo.id, - mediaType: type, - mimeType: mimeType, - representation: voRepresentation - }); + function updateInitData(voRepresentation, hasInitialization) { + if (hasInitialization) { + return Promise.resolve(); } + return segmentBaseController.getSegmentBaseInitSegment({ + representation: voRepresentation, + mediaType: type + }); + } - if (!hasSegments) { - eventBus.trigger(events.SEGMENTBASE_SEGMENTSLIST_REQUEST_NEEDED, { - streamId: streamInfo.id, - mediaType: type, - mimeType: mimeType, - representation: voRepresentation - }); + function updateSegmentData(voRepresentation, hasSegments) { + if (hasSegments) { + return Promise.resolve(); } + return segmentBaseController.getSegmentList({ + mimeType: voRepresentation.mimeType, + representation: voRepresentation, + mediaType: type + }); } function getSegmentsGetter(representation) { @@ -93,10 +92,11 @@ function SegmentsController(config) { } instance = { - initialize: initialize, - update: update, - getSegmentByIndex: getSegmentByIndex, - getSegmentByTime: getSegmentByTime + initialize, + updateInitData, + updateSegmentData, + getSegmentByIndex, + getSegmentByTime }; setup(); diff --git a/src/dash/models/DashManifestModel.js b/src/dash/models/DashManifestModel.js index f345c82e54..891cb04cd3 100644 --- a/src/dash/models/DashManifestModel.js +++ b/src/dash/models/DashManifestModel.js @@ -459,6 +459,9 @@ function DashManifestModel() { if (realRepresentation.hasOwnProperty(DashConstants.CODECS)) { voRepresentation.codecs = realRepresentation.codecs; } + if (realRepresentation.hasOwnProperty(DashConstants.MIME_TYPE)) { + voRepresentation.mimeType = realRepresentation[DashConstants.MIME_TYPE]; + } if (realRepresentation.hasOwnProperty(DashConstants.CODEC_PRIVATE_DATA)) { voRepresentation.codecPrivateData = realRepresentation.codecPrivateData; } @@ -1113,7 +1116,7 @@ function DashManifestModel() { return serviceDescriptions; } - function getSupplementalPropperties(adaptation) { + function getSupplementalProperties(adaptation) { const supplementalProperties = {}; if (adaptation && adaptation.hasOwnProperty(DashConstants.SUPPLEMENTAL_PROPERTY)) { @@ -1139,53 +1142,53 @@ function DashManifestModel() { } instance = { - getIsTypeOf: getIsTypeOf, - getIsTextTrack: getIsTextTrack, - getLanguageForAdaptation: getLanguageForAdaptation, - getViewpointForAdaptation: getViewpointForAdaptation, - getRolesForAdaptation: getRolesForAdaptation, - getAccessibilityForAdaptation: getAccessibilityForAdaptation, - getAudioChannelConfigurationForAdaptation: getAudioChannelConfigurationForAdaptation, - getAudioChannelConfigurationForRepresentation: getAudioChannelConfigurationForRepresentation, - getAdaptationForIndex: getAdaptationForIndex, - getIndexForAdaptation: getIndexForAdaptation, - getAdaptationForId: getAdaptationForId, - getAdaptationsForType: getAdaptationsForType, + getIsTypeOf, + getIsTextTrack, + getLanguageForAdaptation, + getViewpointForAdaptation, + getRolesForAdaptation, + getAccessibilityForAdaptation, + getAudioChannelConfigurationForAdaptation, + getAudioChannelConfigurationForRepresentation, + getAdaptationForIndex, + getIndexForAdaptation, + getAdaptationForId, + getAdaptationsForType, getRealPeriods, getRealPeriodForIndex, - getCodec: getCodec, - getMimeType: getMimeType, - getKID: getKID, - getLabelsForAdaptation: getLabelsForAdaptation, - getContentProtectionData: getContentProtectionData, - getIsDynamic: getIsDynamic, - getId: getId, - hasProfile: hasProfile, - getDuration: getDuration, - getBandwidth: getBandwidth, - getManifestUpdatePeriod: getManifestUpdatePeriod, - getPublishTime: getPublishTime, - getRepresentationCount: getRepresentationCount, - getBitrateListForAdaptation: getBitrateListForAdaptation, - getRepresentationFor: getRepresentationFor, - getRepresentationsForAdaptation: getRepresentationsForAdaptation, - getAdaptationsForPeriod: getAdaptationsForPeriod, - getRegularPeriods: getRegularPeriods, - getMpd: getMpd, - getEventsForPeriod: getEventsForPeriod, + getCodec, + getMimeType, + getKID, + getLabelsForAdaptation, + getContentProtectionData, + getIsDynamic, + getId, + hasProfile, + getDuration, + getBandwidth, + getManifestUpdatePeriod, + getPublishTime, + getRepresentationCount, + getBitrateListForAdaptation, + getRepresentationFor, + getRepresentationsForAdaptation, + getAdaptationsForPeriod, + getRegularPeriods, + getMpd, + getEventsForPeriod, getEssentialPropertiesForRepresentation, - getEventStreamForAdaptationSet: getEventStreamForAdaptationSet, - getEventStreamForRepresentation: getEventStreamForRepresentation, - getUTCTimingSources: getUTCTimingSources, - getBaseURLsFromElement: getBaseURLsFromElement, - getRepresentationSortFunction: getRepresentationSortFunction, - getLocation: getLocation, - getPatchLocation: getPatchLocation, - getSuggestedPresentationDelay: getSuggestedPresentationDelay, - getAvailabilityStartTime: getAvailabilityStartTime, - getServiceDescriptions: getServiceDescriptions, - getSupplementalPropperties: getSupplementalPropperties, - setConfig: setConfig + getEventStreamForAdaptationSet, + getEventStreamForRepresentation, + getUTCTimingSources, + getBaseURLsFromElement, + getRepresentationSortFunction, + getLocation, + getPatchLocation, + getSuggestedPresentationDelay, + getAvailabilityStartTime, + getServiceDescriptions, + getSupplementalProperties, + setConfig }; setup(); diff --git a/src/dash/utils/SegmentsUtils.js b/src/dash/utils/SegmentsUtils.js index f5785332e7..ac8ad4c017 100644 --- a/src/dash/utils/SegmentsUtils.js +++ b/src/dash/utils/SegmentsUtils.js @@ -31,6 +31,7 @@ import Segment from './../vo/Segment'; + function zeroPadToLength(numStr, minStrLength) { while (numStr.length < minStrLength) { numStr = '0' + numStr; @@ -137,7 +138,7 @@ function getSegment(representation, duration, presentationStartTime, mediaStartT seg.presentationStartTime = presentationStartTime; seg.mediaStartTime = mediaStartTime; seg.availabilityStartTime = availabilityStartTime; - seg.availabilityEndTime = timelineConverter.calcAvailabilityEndTimeFromPresentationTime(presentationEndTime, representation.adaptation.period.mpd, isDynamic); + seg.availabilityEndTime = timelineConverter.calcAvailabilityEndTimeFromPresentationTime(presentationEndTime + duration, representation, isDynamic); seg.wallStartTime = timelineConverter.calcWallTimeForSegment(seg, isDynamic); seg.replacementNumber = getNumberForSegment(seg, index); seg.availabilityIdx = index; @@ -146,20 +147,27 @@ function getSegment(representation, duration, presentationStartTime, mediaStartT } function isSegmentAvailable(timelineConverter, representation, segment, isDynamic) { - const periodEnd = timelineConverter.getPeriodEnd(representation, isDynamic); - const periodRelativeEnd = timelineConverter.calcPeriodRelativeTimeFromMpdRelativeTime(representation, periodEnd); - - const segmentTime = timelineConverter.calcPeriodRelativeTimeFromMpdRelativeTime(representation, segment.presentationStartTime); - if (segmentTime >= periodRelativeEnd) { - if (isDynamic) { - // segment is not available in current period, but it may be segment available in another period that current one (in DVR window) - // if not (time > segmentAvailabilityRange.end), then return false - if (representation.segmentAvailabilityRange && segment.presentationStartTime >= representation.segmentAvailabilityRange.end) { - return false; - } - } else { - return false; + const voPeriod = representation.adaptation.period; + + // Avoid requesting segments that overlap the period boundary + if (isFinite(voPeriod.duration) && voPeriod.start + voPeriod.duration <= segment.presentationStartTime) { + return false; + } + + if (isDynamic) { + + if (representation.availabilityTimeOffset === 'INF') { + return true; } + + // For dynamic manifests we check if the presentation start time + duration is included in the availability window + // SAST = Period@start + seg@presentationStartTime + seg@duration + // ASAST = SAST - ATO + // SAET = SAST + TSBD + seg@duration + + const refTime = timelineConverter.getAvailabilityWindowAnchorTime(); + return segment.availabilityStartTime.getTime() <= refTime && segment.availabilityEndTime.getTime() >= refTime; + } return true; @@ -170,6 +178,7 @@ export function getIndexBasedSegment(timelineConverter, isDynamic, representatio presentationStartTime, presentationEndTime; + duration = representation.segmentDuration; /* @@ -184,9 +193,10 @@ export function getIndexBasedSegment(timelineConverter, isDynamic, representatio presentationStartTime = parseFloat((representation.adaptation.period.start + (index * duration)).toFixed(5)); presentationEndTime = parseFloat((presentationStartTime + duration).toFixed(5)); - const segment = getSegment(representation, duration, presentationStartTime, - timelineConverter.calcMediaTimeFromPresentationTime(presentationStartTime, representation), - timelineConverter.calcAvailabilityStartTimeFromPresentationTime(presentationStartTime, representation.adaptation.period.mpd, isDynamic), + const mediaTime = timelineConverter.calcMediaTimeFromPresentationTime(presentationStartTime, representation); + const availabilityStartTime = timelineConverter.calcAvailabilityStartTimeFromPresentationTime(presentationEndTime, representation, isDynamic); + + const segment = getSegment(representation, duration, presentationStartTime, mediaTime, availabilityStartTime, timelineConverter, presentationEndTime, isDynamic, index); if (!isSegmentAvailable(timelineConverter, representation, segment, isDynamic)) { @@ -207,9 +217,11 @@ export function getTimeBasedSegment(timelineConverter, isDynamic, representation presentationStartTime = timelineConverter.calcPresentationTimeFromMediaTime(scaledTime, representation); presentationEndTime = presentationStartTime + scaledDuration; + const availabilityStartTime = timelineConverter.calcAvailabilityStartTimeFromPresentationTime(presentationEndTime, representation, isDynamic); + seg = getSegment(representation, scaledDuration, presentationStartTime, scaledTime, - representation.adaptation.period.mpd.manifest.loadedTime, + availabilityStartTime, timelineConverter, presentationEndTime, isDynamic, index); if (!isSegmentAvailable(timelineConverter, representation, seg, isDynamic)) { diff --git a/src/dash/utils/TemplateSegmentsGetter.js b/src/dash/utils/TemplateSegmentsGetter.js index 30d742d60c..b0a77371fa 100644 --- a/src/dash/utils/TemplateSegmentsGetter.js +++ b/src/dash/utils/TemplateSegmentsGetter.js @@ -32,7 +32,7 @@ import FactoryMaker from '../../core/FactoryMaker'; import Constants from '../../streaming/constants/Constants'; -import { replaceTokenForTemplate, getIndexBasedSegment } from './SegmentsUtils'; +import {replaceTokenForTemplate, getIndexBasedSegment} from './SegmentsUtils'; function TemplateSegmentsGetter(config, isDynamic) { config = config || {}; @@ -53,8 +53,7 @@ function TemplateSegmentsGetter(config, isDynamic) { return null; } - const template = representation.adaptation.period.mpd.manifest.Period_asArray[representation.adaptation.period.index]. - AdaptationSet_asArray[representation.adaptation.index].Representation_asArray[representation.index].SegmentTemplate; + const template = representation.adaptation.period.mpd.manifest.Period_asArray[representation.adaptation.period.index].AdaptationSet_asArray[representation.adaptation.index].Representation_asArray[representation.index].SegmentTemplate; index = Math.max(index, 0); @@ -69,12 +68,11 @@ function TemplateSegmentsGetter(config, isDynamic) { } const duration = representation.segmentDuration; - const availabilityWindow = representation.segmentAvailabilityRange; + if (isNaN(duration)) { representation.availableSegmentsNumber = 1; - } - else { - representation.availableSegmentsNumber = Math.ceil((availabilityWindow.end - availabilityWindow.start) / duration); + } else { + representation.availableSegmentsNumber = Math.ceil(representation.adaptation.period.duration / duration); } return seg; @@ -93,15 +91,15 @@ function TemplateSegmentsGetter(config, isDynamic) { return null; } - const periodTime = timelineConverter.calcPeriodRelativeTimeFromMpdRelativeTime(representation, requestedTime); + let periodTime = timelineConverter.calcPeriodRelativeTimeFromMpdRelativeTime(representation, requestedTime); const index = Math.floor(periodTime / duration); return getSegmentByIndex(representation, index); } instance = { - getSegmentByIndex: getSegmentByIndex, - getSegmentByTime: getSegmentByTime + getSegmentByIndex, + getSegmentByTime }; return instance; diff --git a/src/dash/utils/TimelineConverter.js b/src/dash/utils/TimelineConverter.js index 34c8d6652d..18dde8dcd1 100644 --- a/src/dash/utils/TimelineConverter.js +++ b/src/dash/utils/TimelineConverter.js @@ -34,6 +34,7 @@ import FactoryMaker from '../../core/FactoryMaker'; import DashConstants from '../constants/DashConstants'; import DashManifestModel from '../models/DashManifestModel'; import Settings from '../../core/Settings'; +import Constants from '../../streaming/constants/Constants'; function TimelineConverter() { @@ -45,6 +46,7 @@ function TimelineConverter() { dashManifestModel, clientServerTimeShift, isClientServerTimeSyncCompleted, + availabilityWindowAnchorOffset, expectedLiveEdge; function setup() { @@ -65,48 +67,47 @@ function TimelineConverter() { clientServerTimeShift = value; } - function getExpectedLiveEdge() { - return expectedLiveEdge; - } - - function setExpectedLiveEdge(value) { - expectedLiveEdge = value; - } - - function calcAvailabilityTimeFromPresentationTime(presentationTime, mpd, isDynamic, calculateEnd) { - let availabilityTime = NaN; + function calcAvailabilityTimeFromPresentationTime(presentationEndTime, representation, isDynamic, calculateAvailabilityEndTime) { + let availabilityTime; + let mpd = representation.adaptation.period.mpd; + const availabilityStartTime = mpd.availabilityStartTime; - if (calculateEnd) { + if (calculateAvailabilityEndTime) { //@timeShiftBufferDepth specifies the duration of the time shifting buffer that is guaranteed // to be available for a Media Presentation with type 'dynamic'. // When not present, the value is infinite. - if (isDynamic && (mpd.timeShiftBufferDepth != Number.POSITIVE_INFINITY)) { - availabilityTime = new Date(mpd.availabilityStartTime.getTime() + ((presentationTime + mpd.timeShiftBufferDepth) * 1000)); + if (isDynamic && mpd.timeShiftBufferDepth !== Number.POSITIVE_INFINITY) { + // SAET = SAST + TSBD + seg@duration + availabilityTime = new Date(availabilityStartTime.getTime() + ((presentationEndTime - clientServerTimeShift + mpd.timeShiftBufferDepth) * 1000)); } else { availabilityTime = mpd.availabilityEndTime; } } else { - if (isDynamic) { - availabilityTime = new Date(mpd.availabilityStartTime.getTime() + (presentationTime - clientServerTimeShift) * 1000); + if (isDynamic && mpd.timeShiftBufferDepth !== Number.POSITIVE_INFINITY) { + // SAST = Period@start + seg@presentationStartTime + seg@duration + // ASAST = SAST - ATO + const availabilityTimeOffset = representation.availabilityTimeOffset; + // presentationEndTime = Period@start + seg@presentationStartTime + Segment@duration + availabilityTime = new Date(availabilityStartTime.getTime() + (presentationEndTime - clientServerTimeShift - availabilityTimeOffset) * 1000); } else { // in static mpd, all segments are available at the same time - availabilityTime = mpd.availabilityStartTime; + availabilityTime = availabilityStartTime; } } return availabilityTime; } - function calcAvailabilityStartTimeFromPresentationTime(presentationTime, mpd, isDynamic) { - return calcAvailabilityTimeFromPresentationTime.call(this, presentationTime, mpd, isDynamic); + function calcAvailabilityStartTimeFromPresentationTime(presentationEndTime, representation, isDynamic) { + return calcAvailabilityTimeFromPresentationTime.call(this, presentationEndTime, representation, isDynamic); } - function calcAvailabilityEndTimeFromPresentationTime(presentationTime, mpd, isDynamic) { - return calcAvailabilityTimeFromPresentationTime.call(this, presentationTime, mpd, isDynamic, true); + function calcAvailabilityEndTimeFromPresentationTime(presentationEndTime, representation, isDynamic) { + return calcAvailabilityTimeFromPresentationTime.call(this, presentationEndTime, representation, isDynamic, true); } function calcPresentationTimeFromWallTime(wallTime, period) { - return ((wallTime.getTime() - period.mpd.availabilityStartTime.getTime() + clientServerTimeShift * 1000) / 1000); + return ((wallTime.getTime() - period.mpd.availabilityStartTime.getTime() - clientServerTimeShift * 1000) / 1000); } function calcPresentationTimeFromMediaTime(mediaTime, representation) { @@ -137,37 +138,154 @@ function TimelineConverter() { return wallTime; } - function calcSegmentAvailabilityRange(voRepresentation, isDynamic) { - // Static Range Finder - const voPeriod = voRepresentation.adaptation.period; - const range = {start: voPeriod.start, end: voPeriod.start + voPeriod.duration}; - if (!isDynamic) return range; + function getAvailabilityWindowAnchorTime() { + return Date.now() - clientServerTimeShift * 1000; + } - if (!isClientServerTimeSyncCompleted && voRepresentation.segmentAvailabilityRange) { - return voRepresentation.segmentAvailabilityRange; + /** + * Calculates the timeshiftbuffer range. This range might overlap multiple periods and is not limited to period boundaries. However, we make sure that the range is potentially covered by period. + * @param {Array} streams + * @param {boolean} isDynamic + * @return {{start: number, end: number}} + */ + function calcTimeShiftBufferWindow(streams, isDynamic) { + // Static manifests. The availability window is equal to the DVR window + if (!isDynamic) { + return _calcTimeshiftBufferForStaticManifest(streams); } - // Dynamic Range Finder - const d = voRepresentation.segmentDuration || (voRepresentation.segments && voRepresentation.segments.length ? voRepresentation.segments[voRepresentation.segments.length - 1].duration : 0); + // Specific use case of SegmentTimeline + if (settings.get().streaming.calcSegmentAvailabilityRangeFromTimeline) { + return _calcTimeShiftBufferWindowForDynamicTimelineManifest(streams); + } + + return _calcTimeShiftBufferWindowForDynamicManifest(streams); + } + + function _calcTimeshiftBufferForStaticManifest(streams) { + // Static Range Finder. We iterate over all periods and return the total duration + const range = {start: NaN, end: NaN}; + let duration = 0; + let start = NaN; + streams.forEach((stream) => { + const streamInfo = stream.getStreamInfo(); + duration += streamInfo.duration; + + if (isNaN(start) || streamInfo.start < start) { + start = streamInfo.start; + } + }); + + range.start = start; + range.end = start + duration; + + return range; + } - // Specific use case of SegmentTimeline without timeShiftBufferDepth - if (voRepresentation.segmentInfoType === DashConstants.SEGMENT_TIMELINE && settings.get().streaming.calcSegmentAvailabilityRangeFromTimeline) { - return calcSegmentAvailabilityRangeFromTimeline(voRepresentation); + function _calcTimeShiftBufferWindowForDynamicManifest(streams) { + const range = {start: NaN, end: NaN}; + + if (!streams || streams.length === 0) { + return range; } + const voPeriod = streams[0].getAdapter().getRegularPeriods()[0]; const now = calcPresentationTimeFromWallTime(new Date(), voPeriod); - const periodEnd = voPeriod.start + voPeriod.duration; - range.start = Math.max((now - voPeriod.mpd.timeShiftBufferDepth), voPeriod.start); + const timeShiftBufferDepth = voPeriod.mpd.timeShiftBufferDepth; + const start = !isNaN(timeShiftBufferDepth) ? now - timeShiftBufferDepth : 0; + // check if we find a suitable period for that starttime. Otherwise we use the time closest to that + range.start = _adjustTimeBasedOnPeriodRanges(streams, start); + range.end = !isNaN(range.start) && now < range.start ? now : _adjustTimeBasedOnPeriodRanges(streams, now, true); + + if (!isNaN(timeShiftBufferDepth) && range.end < now - timeShiftBufferDepth) { + range.end = NaN; + } - const endOffset = voRepresentation.availabilityTimeOffset !== undefined && - voRepresentation.availabilityTimeOffset < d ? d - voRepresentation.availabilityTimeOffset : d; + return range; + } - range.end = now >= periodEnd && now - endOffset < periodEnd ? periodEnd : now - endOffset; + function _calcTimeShiftBufferWindowForDynamicTimelineManifest(streams) { + const range = {start: NaN, end: NaN}; + const voPeriod = streams[0].getAdapter().getRegularPeriods()[0]; + const now = calcPresentationTimeFromWallTime(new Date(), voPeriod); + + if (!streams || streams.length === 0) { + return range; + } + + streams.forEach((stream) => { + const adapter = stream.getAdapter(); + const mediaInfo = adapter.getMediaInfoForType(stream.getStreamInfo(), Constants.VIDEO) || adapter.getMediaInfoForType(stream.getStreamInfo(), Constants.AUDIO); + const voRepresentations = adapter.getVoRepresentations(mediaInfo); + const voRepresentation = voRepresentations[0]; + let periodRange = {start: NaN, end: NaN}; + + if (voRepresentation) { + if (voRepresentation.segmentInfoType === DashConstants.SEGMENT_TIMELINE) { + periodRange = _calcRangeForTimeline(voRepresentation); + } else { + const currentVoPeriod = voRepresentation.adaptation.period; + periodRange.start = currentVoPeriod.start; + periodRange.end = Math.max(now, currentVoPeriod.start + currentVoPeriod.duration); + } + } + + if (!isNaN(periodRange.start) && (isNaN(range.start) || range.start > periodRange.start)) { + range.start = periodRange.start; + } + if (!isNaN(periodRange.end) && (isNaN(range.end) || range.end < periodRange.end)) { + range.end = periodRange.end; + } + }); + + + range.end = range.end > now ? now : range.end; + const adjustedEndTime = _adjustTimeBasedOnPeriodRanges(streams, range.end, true); + + // if range is NaN all periods are in the future. we should return range.start > range.end in this case + range.end = isNaN(adjustedEndTime) ? range.end : adjustedEndTime; + + range.start = voPeriod && voPeriod.mpd && voPeriod.mpd.timeShiftBufferDepth && !isNaN(voPeriod.mpd.timeShiftBufferDepth) && !isNaN(range.end) ? Math.max(range.end - voPeriod.mpd.timeShiftBufferDepth, range.start) : range.start; + range.start = _adjustTimeBasedOnPeriodRanges(streams, range.start); return range; } - function calcSegmentAvailabilityRangeFromTimeline(voRepresentation) { + function _adjustTimeBasedOnPeriodRanges(streams, time, isEndOfDvrWindow = false) { + try { + let i = 0; + let found = false; + let adjustedTime = NaN; + + while (!found && i < streams.length) { + const streamInfo = streams[i].getStreamInfo(); + + // We found a period which contains the target time. + if (streamInfo.start <= time && (!isFinite(streamInfo.duration) || (streamInfo.start + streamInfo.duration >= time))) { + adjustedTime = time; + found = true; + } + + // Adjust the time for the start of the DVR window. The current period starts after the target time. We use the starttime of this period as adjusted time + else if (!isEndOfDvrWindow && (streamInfo.start > time && (isNaN(adjustedTime) || streamInfo.start < adjustedTime))) { + adjustedTime = streamInfo.start; + } + + // Adjust the time for the end of the DVR window. The current period ends before the targettime. We use the end time of this period as the adjusted time + else if (isEndOfDvrWindow && ((streamInfo.start + streamInfo.duration) < time && (isNaN(adjustedTime) || (streamInfo.start + streamInfo.duration > adjustedTime)))) { + adjustedTime = streamInfo.start + streamInfo.duration; + } + + i += 1; + } + + return adjustedTime; + } catch (e) { + return time; + } + } + + function _calcRangeForTimeline(voRepresentation) { const adaptation = voRepresentation.adaptation.period.mpd.manifest.Period_asArray[voRepresentation.adaptation.period.index].AdaptationSet_asArray[voRepresentation.adaptation.index]; const representation = dashManifestModel.getRepresentationFor(voRepresentation.index, adaptation); const timeline = representation.SegmentTemplate.SegmentTimeline; @@ -196,38 +314,11 @@ function TimelineConverter() { return range; } - function getPeriodEnd(voRepresentation, isDynamic) { - // Static Range Finder - const voPeriod = voRepresentation.adaptation.period; - if (!isDynamic) { - return voPeriod.start + voPeriod.duration; - } - - if (!isClientServerTimeSyncCompleted && voRepresentation.segmentAvailabilityRange) { - return voRepresentation.segmentAvailabilityRange; - } - - // Dynamic Range Finder - const d = voRepresentation.segmentDuration || (voRepresentation.segments && voRepresentation.segments.length ? voRepresentation.segments[voRepresentation.segments.length - 1].duration : 0); - const now = calcPresentationTimeFromWallTime(new Date(), voPeriod); - const periodEnd = voPeriod.start + voPeriod.duration; - - const endOffset = voRepresentation.availabilityTimeOffset !== undefined && - voRepresentation.availabilityTimeOffset < d ? d - voRepresentation.availabilityTimeOffset : d; - - return Math.min(now - endOffset, periodEnd); - } - function calcPeriodRelativeTimeFromMpdRelativeTime(representation, mpdRelativeTime) { const periodStartTime = representation.adaptation.period.start; return mpdRelativeTime - periodStartTime; } - /* - * We need to figure out if we want to timesync for segmentTimeine where useCalculatedLiveEdge = true - * seems we figure out client offset based on logic in liveEdgeFinder getLiveEdge timelineConverter.setClientTimeOffset(liveEdge - representationInfo.DVRWindow.end); - * FYI StreamController's onManifestUpdated entry point to timeSync - * */ function _onUpdateTimeSyncOffset(e) { if (e.offset !== undefined) { setClientTimeOffset(e.offset / 1000); @@ -237,6 +328,7 @@ function TimelineConverter() { function resetInitialSettings() { clientServerTimeShift = 0; + availabilityWindowAnchorOffset = 0; isClientServerTimeSyncCompleted = false; expectedLiveEdge = NaN; } @@ -247,21 +339,19 @@ function TimelineConverter() { } instance = { - initialize: initialize, - getClientTimeOffset: getClientTimeOffset, - setClientTimeOffset: setClientTimeOffset, - getExpectedLiveEdge: getExpectedLiveEdge, - setExpectedLiveEdge: setExpectedLiveEdge, - calcAvailabilityStartTimeFromPresentationTime: calcAvailabilityStartTimeFromPresentationTime, - calcAvailabilityEndTimeFromPresentationTime: calcAvailabilityEndTimeFromPresentationTime, - calcPresentationTimeFromWallTime: calcPresentationTimeFromWallTime, - calcPresentationTimeFromMediaTime: calcPresentationTimeFromMediaTime, - calcPeriodRelativeTimeFromMpdRelativeTime: calcPeriodRelativeTimeFromMpdRelativeTime, - calcMediaTimeFromPresentationTime: calcMediaTimeFromPresentationTime, - calcSegmentAvailabilityRange: calcSegmentAvailabilityRange, - getPeriodEnd: getPeriodEnd, - calcWallTimeForSegment: calcWallTimeForSegment, - reset: reset + initialize, + getClientTimeOffset, + setClientTimeOffset, + calcAvailabilityStartTimeFromPresentationTime, + calcAvailabilityEndTimeFromPresentationTime, + calcPresentationTimeFromWallTime, + calcPresentationTimeFromMediaTime, + calcPeriodRelativeTimeFromMpdRelativeTime, + calcMediaTimeFromPresentationTime, + calcWallTimeForSegment, + calcTimeShiftBufferWindow, + getAvailabilityWindowAnchorTime, + reset }; setup(); diff --git a/src/dash/utils/TimelineSegmentsGetter.js b/src/dash/utils/TimelineSegmentsGetter.js index 6df815066e..920b720b81 100644 --- a/src/dash/utils/TimelineSegmentsGetter.js +++ b/src/dash/utils/TimelineSegmentsGetter.js @@ -32,27 +32,25 @@ import FactoryMaker from '../../core/FactoryMaker'; import Constants from '../../streaming/constants/Constants'; -import { getTimeBasedSegment } from './SegmentsUtils'; +import {getTimeBasedSegment} from './SegmentsUtils'; function TimelineSegmentsGetter(config, isDynamic) { config = config || {}; const timelineConverter = config.timelineConverter; + const dashMetrics = config.dashMetrics; let instance; function checkConfig() { - if (!timelineConverter || !timelineConverter.hasOwnProperty('calcMediaTimeFromPresentationTime') || - !timelineConverter.hasOwnProperty('calcSegmentAvailabilityRange')) { + if (!timelineConverter) { throw new Error(Constants.MISSING_CONFIG_ERROR); } } function iterateSegments(representation, iterFunc) { - const base = representation.adaptation.period.mpd.manifest.Period_asArray[representation.adaptation.period.index]. - AdaptationSet_asArray[representation.adaptation.index].Representation_asArray[representation.index].SegmentTemplate || - representation.adaptation.period.mpd.manifest.Period_asArray[representation.adaptation.period.index]. - AdaptationSet_asArray[representation.adaptation.index].Representation_asArray[representation.index].SegmentList; + const base = representation.adaptation.period.mpd.manifest.Period_asArray[representation.adaptation.period.index].AdaptationSet_asArray[representation.adaptation.index].Representation_asArray[representation.index].SegmentTemplate || + representation.adaptation.period.mpd.manifest.Period_asArray[representation.adaptation.period.index].AdaptationSet_asArray[representation.adaptation.index].Representation_asArray[representation.index].SegmentList; const timeline = base.SegmentTimeline; const list = base.SegmentURL_asArray; @@ -96,12 +94,24 @@ function TimelineSegmentsGetter(config, isDynamic) { if (nextFrag && nextFrag.hasOwnProperty('t')) { repeatEndTime = nextFrag.t / fTimescale; } else { - const availabilityEnd = representation.segmentAvailabilityRange ? representation.segmentAvailabilityRange.end : (timelineConverter.calcSegmentAvailabilityRange(representation, isDynamic).end); - repeatEndTime = timelineConverter.calcMediaTimeFromPresentationTime(availabilityEnd, representation); - representation.segmentDuration = frag.d / fTimescale; + try { + let availabilityEnd = 0; + if (!isNaN(representation.adaptation.period.start) && !isNaN(representation.adaptation.period.duration) && isFinite(representation.adaptation.period.duration)) { + // use end of the Period + availabilityEnd = representation.adaptation.period.start + representation.adaptation.period.duration; + } else { + // use DVR window + const dvrWindow = dashMetrics.getCurrentDVRInfo(); + availabilityEnd = !isNaN(dvrWindow.end) ? dvrWindow.end : 0; + } + repeatEndTime = timelineConverter.calcMediaTimeFromPresentationTime(availabilityEnd, representation); + representation.segmentDuration = frag.d / fTimescale; + } catch (e) { + repeatEndTime = 0; + } } - repeat = Math.ceil((repeatEndTime - scaledTime) / (frag.d / fTimescale)) - 1; + repeat = Math.max(Math.ceil((repeatEndTime - scaledTime) / (frag.d / fTimescale)) - 1, 0); } for (j = 0; j <= repeat && !breakIterator; j++) { diff --git a/src/dash/vo/Representation.js b/src/dash/vo/Representation.js index d56210ad4c..7b5ccaaac0 100644 --- a/src/dash/vo/Representation.js +++ b/src/dash/vo/Representation.js @@ -43,6 +43,7 @@ class Representation { this.segmentInfoType = null; this.initialization = null; this.codecs = null; + this.mimeType = null; this.codecPrivateData = null; this.segmentDuration = NaN; this.timescale = 1; @@ -52,7 +53,6 @@ class Representation { this.presentationTimeOffset = 0; // Set the source buffer timeOffset to this this.MSETimeOffset = NaN; - this.segmentAvailabilityRange = null; this.availableSegmentsNumber = 0; this.bandwidth = NaN; this.width = NaN; diff --git a/src/dash/vo/RepresentationInfo.js b/src/dash/vo/RepresentationInfo.js index 0711217c04..9d55e8c460 100644 --- a/src/dash/vo/RepresentationInfo.js +++ b/src/dash/vo/RepresentationInfo.js @@ -36,7 +36,6 @@ class RepresentationInfo { constructor() { this.id = null; this.quality = null; - this.DVRWindow = null; this.fragmentDuration = null; this.mediaInfo = null; this.MSETimeOffset = null; diff --git a/src/mss/MssFragmentMoofProcessor.js b/src/mss/MssFragmentMoofProcessor.js index 20c83d1e8f..61dfd20942 100644 --- a/src/mss/MssFragmentMoofProcessor.js +++ b/src/mss/MssFragmentMoofProcessor.js @@ -186,7 +186,6 @@ function MssFragmentMoofProcessor(config) { updateDVR(type, range, streamProcessor.getStreamInfo().manifestInfo); } - representationController.updateRepresentation(representation, true); } function updateDVR(type, range, manifestInfo) { diff --git a/src/mss/parser/MssParser.js b/src/mss/parser/MssParser.js index 72a5be2ef5..c72911d868 100644 --- a/src/mss/parser/MssParser.js +++ b/src/mss/parser/MssParser.js @@ -363,7 +363,7 @@ function MssParser(config) { let segment, prevSegment, tManifest, - i,j,r; + i, j, r; let duration = 0; for (i = 0; i < chunks.length; i++) { @@ -426,7 +426,7 @@ function MssParser(config) { segment.t = prevSegment.t + prevSegment.d; segment.d = prevSegment.d; if (prevSegment.tManifest) { - segment.tManifest = BigInt(prevSegment.tManifest).add(BigInt(prevSegment.d)).toString(); + segment.tManifest = BigInt(prevSegment.tManifest).add(BigInt(prevSegment.d)).toString(); } duration += segment.d; segments.push(segment); @@ -571,7 +571,7 @@ function MssParser(config) { i += 8; // Set SystemID ('edef8ba9-79d6-4ace-a3c8-27dcd51d21ed') - pssh.set([0xed, 0xef, 0x8b, 0xa9, 0x79, 0xd6, 0x4a, 0xce, 0xa3, 0xc8, 0x27, 0xdc, 0xd5, 0x1d, 0x21, 0xed], i); + pssh.set([0xed, 0xef, 0x8b, 0xa9, 0x79, 0xd6, 0x4a, 0xce, 0xa3, 0xc8, 0x27, 0xdc, 0xd5, 0x1d, 0x21, 0xed], i); i += 16; // Set data length value @@ -613,7 +613,7 @@ function MssParser(config) { manifest.protocol = 'MSS'; manifest.profiles = 'urn:mpeg:dash:profile:isoff-live:2011'; manifest.type = getAttributeAsBoolean(smoothStreamingMedia, 'IsLive') ? 'dynamic' : 'static'; - timescale = smoothStreamingMedia.getAttribute('TimeScale'); + timescale = smoothStreamingMedia.getAttribute('TimeScale'); manifest.timescale = timescale ? parseFloat(timescale) : DEFAULT_TIME_SCALE; let dvrWindowLength = parseFloat(smoothStreamingMedia.getAttribute('DVRWindowLength')); // If the DVRWindowLength field is omitted for a live presentation or set to 0, the DVR window is effectively infinite @@ -705,7 +705,7 @@ function MssParser(config) { // Set minBufferTime to one segment duration manifest.minBufferTime = segmentDuration; - if (manifest.type === 'dynamic' ) { + if (manifest.type === 'dynamic') { // Match timeShiftBufferDepth to video segment timeline duration if (manifest.timeShiftBufferDepth > 0 && manifest.timeShiftBufferDepth !== Infinity && @@ -726,7 +726,7 @@ function MssParser(config) { if (manifest.type === 'dynamic') { let targetLiveDelay = mediaPlayerModel.getLiveDelay(); if (!targetLiveDelay) { - const liveDelayFragmentCount = settings.get().streaming.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.liveDelayFragmentCount) ? settings.get().streaming.liveDelayFragmentCount : 4; + const liveDelayFragmentCount = settings.get().streaming.delay.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.delay.liveDelayFragmentCount) ? settings.get().streaming.delay.liveDelayFragmentCount : 4; targetLiveDelay = segmentDuration * liveDelayFragmentCount; } let targetDelayCapping = Math.max(manifest.timeShiftBufferDepth - 10/*END_OF_PLAYLIST_PADDING*/, manifest.timeShiftBufferDepth / 2); @@ -737,21 +737,29 @@ function MssParser(config) { // Store initial buffer settings initialBufferSettings = { 'streaming': { + 'buffer': { + 'stableBufferTime': settings.get().streaming.buffer.stableBufferTime, + 'bufferTimeAtTopQuality': settings.get().streaming.buffer.bufferTimeAtTopQuality, + 'bufferTimeAtTopQualityLongForm': settings.get().streaming.buffer.bufferTimeAtTopQualityLongForm + }, 'calcSegmentAvailabilityRangeFromTimeline': settings.get().streaming.calcSegmentAvailabilityRangeFromTimeline, - 'liveDelay': settings.get().streaming.liveDelay, - 'stableBufferTime': settings.get().streaming.stableBufferTime, - 'bufferTimeAtTopQuality': settings.get().streaming.bufferTimeAtTopQuality, - 'bufferTimeAtTopQualityLongForm': settings.get().streaming.bufferTimeAtTopQualityLongForm + 'delay': { + 'liveDelay': settings.get().streaming.delay.liveDelay + } } }; settings.update({ 'streaming': { + 'buffer': { + 'stableBufferTime': bufferTime, + 'bufferTimeAtTopQuality': bufferTime, + 'bufferTimeAtTopQualityLongForm': bufferTime + }, 'calcSegmentAvailabilityRangeFromTimeline': true, - 'liveDelay': liveDelay, - 'stableBufferTime': bufferTime, - 'bufferTimeAtTopQuality': bufferTime, - 'bufferTimeAtTopQualityLongForm': bufferTime + 'delay': { + 'liveDelay': liveDelay + } } }); } diff --git a/src/offline/OfflineStreamProcessor.js b/src/offline/OfflineStreamProcessor.js index b70187c006..0beeb0395d 100644 --- a/src/offline/OfflineStreamProcessor.js +++ b/src/offline/OfflineStreamProcessor.js @@ -131,7 +131,7 @@ function OfflineStreamProcessor(config) { events: events }); - eventBus.on(events.STREAM_COMPLETED, onStreamCompleted, instance); + eventBus.on(events.STREAM_REQUESTING_COMPLETED, onStreamRequestingCompleted, instance); eventBus.on(events.FRAGMENT_LOADING_COMPLETED, onFragmentLoadingCompleted, instance); } @@ -174,7 +174,7 @@ function OfflineStreamProcessor(config) { } } - function onStreamCompleted(e) { + function onStreamRequestingCompleted(e) { if (e.fragmentModel !== fragmentModel) { return; } @@ -343,7 +343,7 @@ function OfflineStreamProcessor(config) { resetInitialSettings(); indexHandler.reset(); - eventBus.off(events.STREAM_COMPLETED, onStreamCompleted, instance); + eventBus.off(events.STREAM_REQUESTING_COMPLETED, onStreamRequestingCompleted, instance); eventBus.off(events.FRAGMENT_LOADING_COMPLETED, onFragmentLoadingCompleted, instance); } diff --git a/src/streaming/FragmentLoader.js b/src/streaming/FragmentLoader.js index 9f82097231..ed00441ad6 100644 --- a/src/streaming/FragmentLoader.js +++ b/src/streaming/FragmentLoader.js @@ -42,6 +42,7 @@ function FragmentLoader(config) { const events = config.events; const urlUtils = config.urlUtils; const errors = config.errors; + const streamId = config.streamId; let instance, urlLoader; @@ -103,7 +104,8 @@ function FragmentLoader(config) { progress: function (event) { eventBus.trigger(events.LOADING_PROGRESS, { request: request, - stream: event.stream + stream: event.stream, + streamId }); if (event.data) { eventBus.trigger(events.LOADING_DATA_PROGRESS, { diff --git a/src/streaming/ManifestUpdater.js b/src/streaming/ManifestUpdater.js index 2afb5aee9a..a5c4bdb9c7 100644 --- a/src/streaming/ManifestUpdater.js +++ b/src/streaming/ManifestUpdater.js @@ -30,6 +30,7 @@ */ import EventBus from '../core/EventBus'; import Events from '../core/events/Events'; +import MediaPlayerEvents from '../streaming/MediaPlayerEvents'; import FactoryMaker from '../core/FactoryMaker'; import Debug from '../core/Debug'; import Errors from '../core/errors/Errors'; @@ -83,8 +84,8 @@ function ManifestUpdater() { resetInitialSettings(); eventBus.on(Events.STREAMS_COMPOSED, onStreamsComposed, this); - eventBus.on(Events.PLAYBACK_STARTED, onPlaybackStarted, this); - eventBus.on(Events.PLAYBACK_PAUSED, onPlaybackPaused, this); + eventBus.on(MediaPlayerEvents.PLAYBACK_STARTED, onPlaybackStarted, this); + eventBus.on(MediaPlayerEvents.PLAYBACK_PAUSED, onPlaybackPaused, this); eventBus.on(Events.INTERNAL_MANIFEST_LOADED, onManifestLoaded, this); } @@ -102,8 +103,8 @@ function ManifestUpdater() { function reset() { - eventBus.off(Events.PLAYBACK_STARTED, onPlaybackStarted, this); - eventBus.off(Events.PLAYBACK_PAUSED, onPlaybackPaused, this); + eventBus.off(MediaPlayerEvents.PLAYBACK_STARTED, onPlaybackStarted, this); + eventBus.off(MediaPlayerEvents.PLAYBACK_PAUSED, onPlaybackPaused, this); eventBus.off(Events.STREAMS_COMPOSED, onStreamsComposed, this); eventBus.off(Events.INTERNAL_MANIFEST_LOADED, onManifestLoaded, this); @@ -251,7 +252,7 @@ function ManifestUpdater() { } function onPlaybackPaused(/*e*/) { - isPaused = !settings.get().streaming.scheduleWhilePaused; + isPaused = !settings.get().streaming.scheduling.scheduleWhilePaused; if (isPaused) { stopManifestRefreshTimer(); diff --git a/src/streaming/MediaPlayer.js b/src/streaming/MediaPlayer.js index 093285f5a9..17af9cdce4 100644 --- a/src/streaming/MediaPlayer.js +++ b/src/streaming/MediaPlayer.js @@ -41,9 +41,7 @@ import ManifestLoader from './ManifestLoader'; import ErrorHandler from './utils/ErrorHandler'; import Capabilities from './utils/Capabilities'; import CapabilitiesFilter from './utils/CapabilitiesFilter'; -import TextTracks from './text/TextTracks'; import RequestModifier from './utils/RequestModifier'; -import TextController from './text/TextController'; import URIFragmentModel from './models/URIFragmentModel'; import ManifestModel from './models/ManifestModel'; import MediaPlayerModel from './models/MediaPlayerModel'; @@ -62,7 +60,7 @@ import Settings from '../core/Settings'; import { getVersionString } - from './../core/Version'; + from '../core/Version'; //Dash import SegmentBaseController from '../dash/controllers/SegmentBaseController'; @@ -79,6 +77,7 @@ import {checkParameterType} from './utils/SupervisorTools'; import ManifestUpdater from './ManifestUpdater'; import URLUtils from '../streaming/utils/URLUtils'; import BoxParser from './utils/BoxParser'; +import TextController from './text/TextController'; /* jscs:disable */ /** @@ -149,13 +148,13 @@ function MediaPlayer() { capabilities, capabilitiesFilter, streamController, + textController, gapController, playbackController, dashMetrics, manifestModel, cmcdModel, videoModel, - textController, uriFragmentModel, domStorage, segmentBaseController, @@ -210,6 +209,9 @@ function MediaPlayer() { if (config.streamController) { streamController = config.streamController; } + if (config.textController) { + textController = config.textController; + } if (config.gapController) { gapController = config.gapController; } @@ -307,7 +309,7 @@ function MediaPlayer() { dashMetrics = DashMetrics(context).getInstance({ settings: settings }); - textController = TextController(context).getInstance(); + domStorage = DOMStorage(context).getInstance({ settings: settings }); @@ -353,7 +355,7 @@ function MediaPlayer() { setAutoPlay(AutoPlay !== undefined ? AutoPlay : true); // Detect and initialize offline module to support offline contents playback - detectOffline(); + _detectOffline(); if (view) { attachView(view); @@ -482,28 +484,6 @@ function MediaPlayer() { --------------------------------------------------------------------------- */ - /** - * Causes the player to begin streaming the media as set by the {@link module:MediaPlayer#attachSource attachSource()} - * method in preparation for playing. It specifically does not require a view to be attached with {@link module:MediaPlayer#attachSource attachView()} to begin preloading. - * When a view is attached after preloading, the buffered data is transferred to the attached mediaSource buffers. - * - * @see {@link module:MediaPlayer#attachSource attachSource()} - * @see {@link module:MediaPlayer#attachView attachView()} - * @memberof module:MediaPlayer - * @throws {@link module:MediaPlayer~SOURCE_NOT_ATTACHED_ERROR SOURCE_NOT_ATTACHED_ERROR} if called before attachSource function - * @instance - */ - function preload() { - if (videoModel.getElement() || streamingInitialized) { - return false; - } - if (source) { - initializePlayback(); - } else { - throw SOURCE_NOT_ATTACHED_ERROR; - } - } - /** * The play method initiates playback of the media defined by the {@link module:MediaPlayer#attachSource attachSource()} method. * This method will call play on the native Video Element. @@ -708,7 +688,7 @@ function MediaPlayer() { /** * The timeShiftBufferLength (DVR Window), in seconds. * - * @returns {number} The window of allowable play time behind the live point of a live stream. + * @returns {number} The window of allowable play time behind the live point of a live stream as defined in the manifest. * @memberof module:MediaPlayer * @instance */ @@ -779,7 +759,7 @@ function MediaPlayer() { /** * Duration of the media's playback, in seconds. * - * @returns {number} The current duration of the media. + * @returns {number} The current duration of the media. For a dynamic stream this will return DVRWindow.end - DVRWindow.start * @memberof module:MediaPlayer * @throws {@link module:MediaPlayer~PLAYBACK_NOT_INITIALIZED_ERROR PLAYBACK_NOT_INITIALIZED_ERROR} if called before initializePlayback function * @instance @@ -813,7 +793,7 @@ function MediaPlayer() { if (time() < 0) { return NaN; } - return getAsUTC(time()); + return _getAsUTC(time()); } /** @@ -829,7 +809,7 @@ function MediaPlayer() { if (!playbackInitialized) { throw PLAYBACK_NOT_INITIALIZED_ERROR; } - return getAsUTC(duration()); + return _getAsUTC(duration()); } /* @@ -963,7 +943,7 @@ function MediaPlayer() { /** * @memberof module:MediaPlayer * @instance - * @returns {number|NaN} Current live stream latency in seconds. It is the difference between current time and time position at the playback head. + * @returns {number|NaN} Current live stream latency in seconds. It is the difference between now time and time position at the playback head. * @throws {@link module:MediaPlayer~MEDIA_PLAYER_NOT_INITIALIZED_ERROR MEDIA_PLAYER_NOT_INITIALIZED_ERROR} if called before initialize function */ function getCurrentLiveLatency() { @@ -1146,7 +1126,7 @@ function MediaPlayer() { * @instance */ function getOfflineController() { - return detectOffline(); + return _detectOffline(); } /* @@ -1176,70 +1156,6 @@ function MediaPlayer() { --------------------------------------------------------------------------- */ - /** - * Set default language for text. If default language is not one of text tracks, dash will choose the first one. - * - * @param {string} lang - default language - * @memberof module:MediaPlayer - * @instance - * @deprecated will be removed in version 3.2.0. Please use setInitialMediaSettingsFor("fragmentedText", { lang: lang }) instead - */ - function setTextDefaultLanguage(lang) { - logger.warn('setTextDefaultLanguage is deprecated and will be removed in version 3.2.0. Please use setInitialMediaSettingsFor("fragmentedText", { lang: lang }) instead'); - if (textController === undefined) { - textController = TextController(context).getInstance(); - } - textController.setTextDefaultLanguage(lang); - } - - /** - * Get default language for text. - * - * @return {string} the default language if it has been set using setTextDefaultLanguage - * @memberof module:MediaPlayer - * @instance - * @deprecated will be removed in version 3.2.0. Please use getInitialMediaSettingsFor("fragmentedText").lang instead - */ - function getTextDefaultLanguage() { - logger.warn('getTextDefaultLanguage is deprecated and will be removed in version 3.2.0. Please use getInitialMediaSettingsFor("fragmentedText").lang instead'); - if (textController === undefined) { - textController = TextController(context).getInstance(); - } - - return textController.getTextDefaultLanguage(); - } - - /** - * Set enabled default state. - * This is used to enable/disable text when a file is loaded. - * During playback, use enableText to enable text for the file - * - * @param {boolean} enable - true to enable text, false otherwise - * @memberof module:MediaPlayer - * @instance - */ - function setTextDefaultEnabled(enable) { - if (textController === undefined) { - textController = TextController(context).getInstance(); - } - - textController.setTextDefaultEnabled(enable); - } - - /** - * Get enabled default state. - * - * @return {boolean} default enable state - * @memberof module:MediaPlayer - * @instance - */ - function getTextDefaultEnabled() { - if (textController === undefined) { - textController = TextController(context).getInstance(); - } - - return textController.getTextDefaultEnabled(); - } /** * Enable/disable text @@ -1250,11 +1166,13 @@ function MediaPlayer() { * @instance */ function enableText(enable) { - if (textController === undefined) { - textController = TextController(context).getInstance(); + const activeStreamInfo = streamController.getActiveStreamInfo(); + + if (!activeStreamInfo || !textController) { + return false; } - textController.enableText(enable); + return textController.enableText(activeStreamInfo.id, enable); } /** @@ -1266,11 +1184,13 @@ function MediaPlayer() { * @instance */ function enableForcedTextStreaming(enable) { - if (textController === undefined) { - textController = TextController(context).getInstance(); + const activeStreamInfo = streamController.getActiveStreamInfo(); + + if (!activeStreamInfo || !textController) { + return false; } - textController.enableForcedTextStreaming(enable); + return textController.enableForcedTextStreaming(activeStreamInfo.id, enable); } /** @@ -1281,11 +1201,13 @@ function MediaPlayer() { * @instance */ function isTextEnabled() { - if (textController === undefined) { - textController = TextController(context).getInstance(); + const activeStreamInfo = streamController.getActiveStreamInfo(); + + if (!activeStreamInfo || !textController) { + return false; } - return textController.isTextEnabled(); + return textController.isTextEnabled(activeStreamInfo); } /** @@ -1302,35 +1224,27 @@ function MediaPlayer() { throw PLAYBACK_NOT_INITIALIZED_ERROR; } - if (textController === undefined) { - textController = TextController(context).getInstance(); + const activeStreamInfo = streamController.getActiveStreamInfo(); + + if (!activeStreamInfo || !textController) { + return; } - textController.setTextTrack(idx); + textController.setTextTrack(activeStreamInfo.id, idx); } function getCurrentTextTrackIndex() { let idx = NaN; - if (textController) { - idx = textController.getCurrentTrackIdx(); + + const activeStreamInfo = streamController.getActiveStreamInfo(); + + if (!activeStreamInfo || !textController) { + return; } - return idx; - } - /** - * This method serves to control captions z-index value. If 'true' is passed, the captions will have the highest z-index and be - * displayed on top of other html elements. Default value is 'false' (z-index is not set). - * @param {boolean} value - * @memberof module:MediaPlayer - * @instance - */ - function displayCaptionsOnTop(value) { - let textTracks = TextTracks(context).getInstance(); - textTracks.setConfig({ - videoModel: videoModel - }); - textTracks.initialize(); - textTracks.setDisplayCConTop(value); + idx = textController.getCurrentTrackIdx(activeStreamInfo.id); + + return idx; } /* @@ -1371,9 +1285,9 @@ function MediaPlayer() { videoModel.setElement(element); if (element) { - detectProtection(); - detectMetricsReporting(); - detectMss(); + _detectProtection(); + _detectMetricsReporting(); + _detectMss(); if (streamController) { streamController.switchToVideoElement(); @@ -1381,10 +1295,10 @@ function MediaPlayer() { } if (playbackInitialized) { //Reset if we have been playing before, so this is a new element. - resetPlaybackControllers(); + _resetPlaybackControllers(); } - initializePlayback(); + _initializePlayback(); } /** @@ -1462,7 +1376,7 @@ function MediaPlayer() { throw STREAMING_NOT_INITIALIZED_ERROR; } let streamInfo = streamController.getActiveStreamInfo(); - return mediaController.getTracksFor(type, streamInfo); + return mediaController.getTracksFor(type, streamInfo.id); } /** @@ -1498,7 +1412,7 @@ function MediaPlayer() { throw STREAMING_NOT_INITIALIZED_ERROR; } let streamInfo = streamController.getActiveStreamInfo(); - return mediaController.getCurrentTrackFor(type, streamInfo); + return mediaController.getCurrentTrackFor(type, streamInfo.id); } /** @@ -1522,9 +1436,6 @@ function MediaPlayer() { throw MEDIA_PLAYER_NOT_INITIALIZED_ERROR; } mediaController.setInitialSettings(type, value); - if (type === Constants.FRAGMENTED_TEXT) { - textController.setInitialSettings(value); - } } /** @@ -1562,88 +1473,6 @@ function MediaPlayer() { mediaController.setTrack(track); } - /** - * This method returns the current track switch mode. - * - * @param {MediaType} type - * @returns {string} mode - * @memberof module:MediaPlayer - * @throws {@link module:MediaPlayer~MEDIA_PLAYER_NOT_INITIALIZED_ERROR MEDIA_PLAYER_NOT_INITIALIZED_ERROR} if called before initialize function - * @instance - */ - function getTrackSwitchModeFor(type) { - if (!mediaPlayerInitialized) { - throw MEDIA_PLAYER_NOT_INITIALIZED_ERROR; - } - return mediaController.getSwitchMode(type); - } - - /** - * This method sets the current track switch mode. Available options are: - * - * Constants.TRACK_SWITCH_MODE_NEVER_REPLACE - * (used to forbid clearing the buffered data (prior to current playback position) after track switch. - * Defers to fastSwitchEnabled for placement of new data. Default for video) - * - * Constants.TRACK_SWITCH_MODE_ALWAYS_REPLACE - * (used to clear the buffered data (prior to current playback position) after track switch. Default for audio) - * - * @param {MediaType} type - * @param {string} mode - * @memberof module:MediaPlayer - * @throws {@link module:MediaPlayer~MEDIA_PLAYER_NOT_INITIALIZED_ERROR MEDIA_PLAYER_NOT_INITIALIZED_ERROR} if called before initialize function - * @instance - */ - function setTrackSwitchModeFor(type, mode) { - if (!mediaPlayerInitialized) { - throw MEDIA_PLAYER_NOT_INITIALIZED_ERROR; - } - mediaController.setSwitchMode(type, mode); - } - - /** - * This method sets the selection mode for the initial track. This mode defines how the initial track will be selected - * if no initial media settings are set. If initial media settings are set this parameter will be ignored. Available options are: - * - * Constants.TRACK_SELECTION_MODE_HIGHEST_BITRATE - * This mode makes the player select the track with a highest bitrate. This mode is a default mode. - * - * Constants.TRACK_SELECTION_MODE_FIRST_TRACK - * This mode makes the player select the select the first track found in the manifest. - * - * Constants.TRACK_SELECTION_MODE_HIGHEST_EFFICIENCY - * This mode makes the player select the track with the lowest bitrate per pixel average. - * - * Constants.TRACK_SELECTION_MODE_WIDEST_RANGE - * This mode makes the player select the track with a widest range of bitrates. - * - * @param {string} mode - * @memberof module:MediaPlayer - * @throws {@link module:MediaPlayer~MEDIA_PLAYER_NOT_INITIALIZED_ERROR MEDIA_PLAYER_NOT_INITIALIZED_ERROR} if called before initialize function - * @instance - */ - function setSelectionModeForInitialTrack(mode) { - if (!mediaPlayerInitialized) { - throw MEDIA_PLAYER_NOT_INITIALIZED_ERROR; - } - mediaController.setSelectionModeForInitialTrack(mode); - } - - /** - * This method returns the track selection mode. - * - * @returns {string} mode - * @memberof module:MediaPlayer - * @throws {@link module:MediaPlayer~MEDIA_PLAYER_NOT_INITIALIZED_ERROR MEDIA_PLAYER_NOT_INITIALIZED_ERROR} if called before initialize function - * @instance - */ - function getSelectionModeForInitialTrack() { - if (!mediaPlayerInitialized) { - throw MEDIA_PLAYER_NOT_INITIALIZED_ERROR; - } - return mediaController.getSelectionModeForInitialTrack(); - } - /* --------------------------------------------------------------------------- @@ -1658,7 +1487,7 @@ function MediaPlayer() { * @instance */ function getProtectionController() { - return detectProtection(); + return _detectProtection(); } /** @@ -1846,7 +1675,7 @@ function MediaPlayer() { * @instance */ function retrieveManifest(url, callback) { - let manifestLoader = createManifestLoader(); + let manifestLoader = _createManifestLoader(); let self = this; const handler = function (e) { @@ -1905,11 +1734,11 @@ function MediaPlayer() { source = urlOrManifest; if (streamingInitialized || playbackInitialized) { - resetPlaybackControllers(); + _resetPlaybackControllers(); } if (isReady()) { - initializePlayback(); + _initializePlayback(); } } @@ -1934,7 +1763,7 @@ function MediaPlayer() { * @example * player.updateSettings({ * streaming: { - * liveDelayFragmentCount: 8 + * lowLatencyEnabled: false, * abr: { * maxBitrate: { audio: 100, video: 1000 } * } @@ -2038,11 +1867,23 @@ function MediaPlayer() { return streamInfo ? streamController.getStreamById(streamInfo.id) : null; } + /** + * Returns the DashAdapter.js Module. + * + * @see {@link module:DashAdapter} + * @returns {Object} + * @memberof module:MediaPlayer + * @instance + */ + function getDashAdapter() { + return adapter; + } + //*********************************** // PRIVATE METHODS //*********************************** - function resetPlaybackControllers() { + function _resetPlaybackControllers() { playbackInitialized = false; streamingInitialized = false; adapter.reset(); @@ -2051,52 +1892,67 @@ function MediaPlayer() { playbackController.reset(); abrController.reset(); mediaController.reset(); - textController.reset(); if (protectionController) { - if (settings.get().streaming.keepProtectionMediaKeys) { + if (settings.get().streaming.protection.keepProtectionMediaKeys) { protectionController.stop(); } else { protectionController.reset(); protectionController = null; - detectProtection(); + _detectProtection(); } } + textController.reset(); cmcdModel.reset(); } - function createPlaybackControllers() { + function _createPlaybackControllers() { // creates or get objects instances - const manifestLoader = createManifestLoader(); + const manifestLoader = _createManifestLoader(); if (!streamController) { streamController = StreamController(context).getInstance(); } + if (!textController) { + textController = TextController(context).create({ + errHandler, + manifestModel, + adapter, + mediaController, + videoModel, + settings + }); + } + capabilitiesFilter.setConfig({ capabilities, adapter, - settings + settings, + manifestModel, + errHandler }); capabilitiesFilter.setCustomCapabilitiesFilters(customCapabilitiesFilters); streamController.setConfig({ - capabilities: capabilities, + capabilities, capabilitiesFilter, - manifestLoader: manifestLoader, - manifestModel: manifestModel, - mediaPlayerModel: mediaPlayerModel, - protectionController: protectionController, - adapter: adapter, - dashMetrics: dashMetrics, - errHandler: errHandler, - timelineConverter: timelineConverter, - videoModel: videoModel, - playbackController: playbackController, - abrController: abrController, - mediaController: mediaController, - textController: textController, - settings: settings, - baseURLController: baseURLController + manifestLoader, + manifestModel, + mediaPlayerModel, + protectionController, + textController, + adapter, + dashMetrics, + errHandler, + timelineConverter, + videoModel, + playbackController, + abrController, + mediaController, + settings, + baseURLController, + uriFragmentModel, + segmentBaseController }); gapController.setConfig({ @@ -2109,33 +1965,23 @@ function MediaPlayer() { }); playbackController.setConfig({ - streamController: streamController, - dashMetrics: dashMetrics, - mediaPlayerModel: mediaPlayerModel, - adapter: adapter, - videoModel: videoModel, - timelineConverter: timelineConverter, - uriFragmentModel: uriFragmentModel, - settings: settings + streamController, + dashMetrics, + mediaPlayerModel, + adapter, + videoModel, + timelineConverter, + settings }); abrController.setConfig({ - streamController: streamController, - domStorage: domStorage, - mediaPlayerModel: mediaPlayerModel, - dashMetrics: dashMetrics, - adapter: adapter, - videoModel: videoModel, - settings: settings - }); - - textController.setConfig({ - errHandler: errHandler, - manifestModel: manifestModel, - adapter: adapter, - mediaController: mediaController, - streamController: streamController, - videoModel: videoModel + streamController, + domStorage, + mediaPlayerModel, + dashMetrics, + adapter, + videoModel, + settings }); cmcdModel.setConfig({ @@ -2145,12 +1991,14 @@ function MediaPlayer() { }); // initialises controller + abrController.initialize(); streamController.initialize(autoPlay, protectionData); + textController.initialize(); gapController.initialize(); cmcdModel.initialize(); } - function createManifestLoader() { + function _createManifestLoader() { return ManifestLoader(context).create({ debug: debug, errHandler: errHandler, @@ -2162,7 +2010,7 @@ function MediaPlayer() { }); } - function detectProtection() { + function _detectProtection() { if (protectionController) { return protectionController; } @@ -2200,7 +2048,7 @@ function MediaPlayer() { return null; } - function detectMetricsReporting() { + function _detectMetricsReporting() { if (metricsReportingController) { return; } @@ -2222,7 +2070,7 @@ function MediaPlayer() { } } - function detectMss() { + function _detectMss() { if (mssHandler) { return; } @@ -2251,7 +2099,7 @@ function MediaPlayer() { } } - function detectOffline() { + function _detectOffline() { if (!mediaPlayerInitialized) { throw MEDIA_PLAYER_NOT_INITIALIZED_ERROR; } @@ -2270,7 +2118,7 @@ function MediaPlayer() { }); Errors.extend(OfflineController.errors); - const manifestLoader = createManifestLoader(); + const manifestLoader = _createManifestLoader(); const manifestUpdater = ManifestUpdater(context).create(); manifestUpdater.setConfig({ @@ -2308,7 +2156,7 @@ function MediaPlayer() { return null; } - function getAsUTC(valToConvert) { + function _getAsUTC(valToConvert) { let metric = dashMetrics.getCurrentDVRInfo(); let availableFrom, utcValue; @@ -2321,7 +2169,7 @@ function MediaPlayer() { return utcValue; } - function initializePlayback() { + function _initializePlayback() { if (offlineController) { offlineController.resetRecords(); @@ -2330,7 +2178,7 @@ function MediaPlayer() { if (!streamingInitialized && source) { streamingInitialized = true; logger.info('Streaming Initialized'); - createPlaybackControllers(); + _createPlaybackControllers(); if (typeof source === 'string') { streamController.load(source); @@ -2345,114 +2193,92 @@ function MediaPlayer() { } } - /** - * Returns the DashAdapter.js Module. - * - * @see {@link module:DashAdapter} - * @returns {Object} - * @memberof module:MediaPlayer - * @instance - */ - function getDashAdapter() { - return adapter; - } - instance = { - initialize: initialize, - setConfig: setConfig, - on: on, - off: off, - extend: extend, - attachView: attachView, - attachSource: attachSource, - isReady: isReady, - preload: preload, - play: play, - isPaused: isPaused, - pause: pause, - isSeeking: isSeeking, - isDynamic: isDynamic, - seek: seek, - setPlaybackRate: setPlaybackRate, - getPlaybackRate: getPlaybackRate, - setMute: setMute, - isMuted: isMuted, - setVolume: setVolume, - getVolume: getVolume, - time: time, - duration: duration, - timeAsUTC: timeAsUTC, - durationAsUTC: durationAsUTC, - getActiveStream: getActiveStream, - getDVRWindowSize: getDVRWindowSize, - getDVRSeekOffset: getDVRSeekOffset, - convertToTimeCode: convertToTimeCode, - formatUTC: formatUTC, - getVersion: getVersion, - getDebug: getDebug, - getBufferLength: getBufferLength, - getTTMLRenderingDiv: getTTMLRenderingDiv, - getVideoElement: getVideoElement, - getSource: getSource, - getCurrentLiveLatency: getCurrentLiveLatency, - getTopBitrateInfoFor: getTopBitrateInfoFor, - setAutoPlay: setAutoPlay, - getAutoPlay: getAutoPlay, - getDashMetrics: getDashMetrics, - getQualityFor: getQualityFor, - setQualityFor: setQualityFor, - updatePortalSize: updatePortalSize, - setTextDefaultLanguage: setTextDefaultLanguage, - getTextDefaultLanguage: getTextDefaultLanguage, - setTextDefaultEnabled: setTextDefaultEnabled, - getTextDefaultEnabled: getTextDefaultEnabled, - enableText: enableText, - enableForcedTextStreaming: enableForcedTextStreaming, - isTextEnabled: isTextEnabled, - setTextTrack: setTextTrack, - getBitrateInfoListFor: getBitrateInfoListFor, - getStreamsFromManifest: getStreamsFromManifest, - getTracksFor: getTracksFor, - getTracksForTypeFromManifest: getTracksForTypeFromManifest, - getCurrentTrackFor: getCurrentTrackFor, - setInitialMediaSettingsFor: setInitialMediaSettingsFor, - getInitialMediaSettingsFor: getInitialMediaSettingsFor, - setCurrentTrack: setCurrentTrack, - getTrackSwitchModeFor: getTrackSwitchModeFor, - setTrackSwitchModeFor: setTrackSwitchModeFor, - setSelectionModeForInitialTrack: setSelectionModeForInitialTrack, - getSelectionModeForInitialTrack: getSelectionModeForInitialTrack, - addABRCustomRule: addABRCustomRule, - removeABRCustomRule: removeABRCustomRule, - removeAllABRCustomRule: removeAllABRCustomRule, - getAverageThroughput: getAverageThroughput, - retrieveManifest: retrieveManifest, - addUTCTimingSource: addUTCTimingSource, - removeUTCTimingSource: removeUTCTimingSource, - clearDefaultUTCTimingSources: clearDefaultUTCTimingSources, - restoreDefaultUTCTimingSources: restoreDefaultUTCTimingSources, - setXHRWithCredentialsForType: setXHRWithCredentialsForType, - getXHRWithCredentialsForType: getXHRWithCredentialsForType, - getProtectionController: getProtectionController, - attachProtectionController: attachProtectionController, - setProtectionData: setProtectionData, - registerLicenseRequestFilter: registerLicenseRequestFilter, - registerLicenseResponseFilter: registerLicenseResponseFilter, - unregisterLicenseRequestFilter: unregisterLicenseRequestFilter, - unregisterLicenseResponseFilter: unregisterLicenseResponseFilter, + initialize, + setConfig, + on, + off, + extend, + attachView, + attachSource, + isReady, + play, + isPaused, + pause, + isSeeking, + isDynamic, + seek, + setPlaybackRate, + getPlaybackRate, + setMute, + isMuted, + setVolume, + getVolume, + time, + duration, + timeAsUTC, + durationAsUTC, + getActiveStream, + getDVRWindowSize, + getDVRSeekOffset, + convertToTimeCode, + formatUTC, + getVersion, + getDebug, + getBufferLength, + getTTMLRenderingDiv, + getVideoElement, + getSource, + getCurrentLiveLatency, + getTopBitrateInfoFor, + setAutoPlay, + getAutoPlay, + getDashMetrics, + getQualityFor, + setQualityFor, + updatePortalSize, + enableText, + enableForcedTextStreaming, + isTextEnabled, + setTextTrack, + getBitrateInfoListFor, + getStreamsFromManifest, + getTracksFor, + getTracksForTypeFromManifest, + getCurrentTrackFor, + setInitialMediaSettingsFor, + getInitialMediaSettingsFor, + setCurrentTrack, + addABRCustomRule, + removeABRCustomRule, + removeAllABRCustomRule, + getAverageThroughput, + retrieveManifest, + addUTCTimingSource, + removeUTCTimingSource, + clearDefaultUTCTimingSources, + restoreDefaultUTCTimingSources, + setXHRWithCredentialsForType, + getXHRWithCredentialsForType, + getProtectionController, + attachProtectionController, + setProtectionData, + registerLicenseRequestFilter, + registerLicenseResponseFilter, + unregisterLicenseRequestFilter, + unregisterLicenseResponseFilter, registerCustomCapabilitiesFilter, unregisterCustomCapabilitiesFilter, - displayCaptionsOnTop: displayCaptionsOnTop, - attachTTMLRenderingDiv: attachTTMLRenderingDiv, - getCurrentTextTrackIndex: getCurrentTextTrackIndex, - provideThumbnail: provideThumbnail, - getDashAdapter: getDashAdapter, - getOfflineController: getOfflineController, - getSettings: getSettings, - updateSettings: updateSettings, - resetSettings: resetSettings, - reset: reset, - destroy: destroy + attachTTMLRenderingDiv, + getCurrentTextTrackIndex, + provideThumbnail, + getDashAdapter, + getOfflineController, + getSettings, + updateSettings, + resetSettings, + reset, + destroy }; setup(); diff --git a/src/streaming/MediaPlayerEvents.js b/src/streaming/MediaPlayerEvents.js index 74867f6d0b..418b2fd344 100644 --- a/src/streaming/MediaPlayerEvents.js +++ b/src/streaming/MediaPlayerEvents.js @@ -69,6 +69,12 @@ class MediaPlayerEvents extends EventsBase { */ this.BUFFER_LEVEL_STATE_CHANGED = 'bufferStateChanged'; + /** + * Triggered when the buffer level of a media type has been updated + * @event MediaPlayerEvents#BUFFER_LEVEL_UPDATED + */ + this.BUFFER_LEVEL_UPDATED = 'bufferLevelUpdated'; + /** * Triggered when a dynamic stream changed to static (transition phase between Live and On-Demand). * @event MediaPlayerEvents#DYNAMIC_TO_STATIC @@ -147,10 +153,10 @@ class MediaPlayerEvents extends EventsBase { this.PERIOD_SWITCH_COMPLETED = 'periodSwitchCompleted'; /** - * Triggered when a new period starts. - * @event MediaPlayerEvents#PERIOD_SWITCH_STARTED + * Triggered when a new stream (period) starts. + * @event MediaPlayerEvents#STREAM_SWITCH_STARTED */ - this.PERIOD_SWITCH_STARTED = 'periodSwitchStarted'; + this.STREAM_SWITCH_STARTED = 'streamSwitchStarted'; /** * Triggered when an ABR up /down switch is initiated; either by user in manual mode or auto mode via ABR rules. @@ -189,7 +195,19 @@ class MediaPlayerEvents extends EventsBase { this.STREAM_UPDATED = 'streamUpdated'; /** - * Triggered when a stream (period) is updated + * Triggered when a stream (period) is activated + * @event MediaPlayerEvents#STREAM_ACTIVATED + */ + this.STREAM_ACTIVATED = 'streamActivated'; + + /** + * Triggered when a stream (period) is deactivated + * @event MediaPlayerEvents#STREAM_DEACTIVATED + */ + this.STREAM_DEACTIVATED = 'streamDeactivated'; + + /** + * Triggered when a stream (period) is activated * @event MediaPlayerEvents#STREAM_INITIALIZED */ this.STREAM_INITIALIZED = 'streamInitialized'; @@ -244,6 +262,12 @@ class MediaPlayerEvents extends EventsBase { */ this.CAN_PLAY = 'canPlay'; + /** + * This corresponds to the CAN_PLAY_THROUGH readyState. + * @event MediaPlayerEvents#CAN_PLAY_THROUGH + */ + this.CAN_PLAY_THROUGH = 'canPlayThrough'; + /** * Sent when playback completes. * @event MediaPlayerEvents#PLAYBACK_ENDED @@ -270,6 +294,13 @@ class MediaPlayerEvents extends EventsBase { */ this.PLAYBACK_METADATA_LOADED = 'playbackMetaDataLoaded'; + /** + * The media's metadata has finished loading; all attributes now + * contain as much useful information as they're going to. + * @event MediaPlayerEvents#PLAYBACK_METADATA_LOADED + */ + this.PLAYBACK_LOADED_DATA = 'playbackLoadedData'; + /** * Sent when playback is paused. * @event MediaPlayerEvents#PLAYBACK_PAUSED @@ -349,18 +380,6 @@ class MediaPlayerEvents extends EventsBase { */ this.MANIFEST_VALIDITY_CHANGED = 'manifestValidityChanged'; - /** - * A gap occured in the timeline which requires a seek to the next period - * @event MediaPlayerEvents#GAP_CAUSED_SEEK_TO_PERIOD_END - */ - this.GAP_CAUSED_SEEK_TO_PERIOD_END = 'gapCausedSeekToPeriodEnd'; - - /** - * A gap occured in the timeline which requires an internal seek - * @event MediaPlayerEvents#GAP_CAUSED_INTERNAL_SEEK - */ - this.GAP_CAUSED_INTERNAL_SEEK = 'gapCausedInternalSeek'; - /** * Dash events are triggered at their respective start points on the timeline. * @event MediaPlayerEvents#EVENT_MODE_ON_START @@ -378,6 +397,12 @@ class MediaPlayerEvents extends EventsBase { * @event MediaPlayerEvents#CONFORMANCE_VIOLATION */ this.CONFORMANCE_VIOLATION = 'conformanceViolation'; + + /** + * Event that is dispatched whenever the player is attempting to switch the quality + * @event MediaPlayerEvents#REPRESENTATION_CHANGED + */ + this.SETTING_PLAYBACK_QUALITY = 'settingPlaybackQuality' } } diff --git a/src/streaming/PreBufferSink.js b/src/streaming/PreBufferSink.js index a6960223c6..57ca04af14 100644 --- a/src/streaming/PreBufferSink.js +++ b/src/streaming/PreBufferSink.js @@ -61,7 +61,9 @@ function PreBufferSink(onAppendedCallback) { function append(chunk) { if (chunk.segmentType !== 'InitializationSegment') { //Init segments are stored in the initCache. chunks.push(chunk); - chunks.sort(function (a, b) { return a.start - b.start; }); + chunks.sort(function (a, b) { + return a.start - b.start; + }); outstandingInit = null; } else {//We need to hold an init chunk for when a corresponding media segment is being downloaded when the discharge happens. outstandingInit = chunk; @@ -76,7 +78,7 @@ function PreBufferSink(onAppendedCallback) { } function remove(start, end) { - chunks = chunks.filter( a => !((isNaN(end) || a.start < end) && (isNaN(start) || a.end > start))); //The opposite of the getChunks predicate. + chunks = chunks.filter(a => !((isNaN(end) || a.start < end) && (isNaN(start) || a.end > start))); //The opposite of the getChunks predicate. } //Nothing async, nothing to abort. @@ -89,7 +91,7 @@ function PreBufferSink(onAppendedCallback) { for (let i = 0; i < chunks.length; i++) { let chunk = chunks[i]; if (ranges.length === 0 || chunk.start > ranges[ranges.length - 1].end) { - ranges.push({ start: chunk.start, end: chunk.end }); + ranges.push({start: chunk.start, end: chunk.end}); } else { ranges[ranges.length - 1].end = chunk.end; } @@ -114,10 +116,6 @@ function PreBufferSink(onAppendedCallback) { return timeranges; } - function hasDiscontinuitiesAfter() { - return false; - } - function updateTimestampOffset() { // Nothing to do } @@ -148,7 +146,7 @@ function PreBufferSink(onAppendedCallback) { } function getChunksAt(start, end) { - return chunks.filter( a => ((isNaN(end) || a.start < end) && (isNaN(start) || a.end > start)) ); + return chunks.filter(a => ((isNaN(end) || a.start < end) && (isNaN(start) || a.end > start))); } function waitForUpdateEnd(callback) { @@ -163,7 +161,6 @@ function PreBufferSink(onAppendedCallback) { discharge: discharge, reset: reset, updateTimestampOffset: updateTimestampOffset, - hasDiscontinuitiesAfter: hasDiscontinuitiesAfter, waitForUpdateEnd: waitForUpdateEnd, getBuffer: getBuffer }; diff --git a/src/streaming/SourceBufferSink.js b/src/streaming/SourceBufferSink.js index 2732d7dad9..6c442e0aae 100644 --- a/src/streaming/SourceBufferSink.js +++ b/src/streaming/SourceBufferSink.js @@ -30,38 +30,80 @@ */ import Debug from '../core/Debug'; import DashJSError from './vo/DashJSError'; -import EventBus from '../core/EventBus'; -import Events from '../core/events/Events'; import FactoryMaker from '../core/FactoryMaker'; -import TextController from './text/TextController'; import Errors from '../core/errors/Errors'; +import Settings from '../core/Settings'; -const MAX_ALLOWED_DISCONTINUITY = 0.1; // 100 milliseconds +const APPEND_WINDOW_START_OFFSET = 0.1; +const APPEND_WINDOW_END_OFFSET = 0.01; /** * @class SourceBufferSink * @ignore * @implements FragmentSink */ -function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) { + +const CHECK_INTERVAL = 50; + +function SourceBufferSink(config) { const context = this.context; - const eventBus = EventBus(context).getInstance(); + const settings = Settings(context).getInstance(); + const textController = config.textController; let instance, type, logger, buffer, - isAppendingInProgress, + mediaInfo, intervalId; let callbacks = []; let appendQueue = []; - let onAppended = onAppendedCallback; + let isAppendingInProgress = false; + let mediaSource = config.mediaSource; function setup() { logger = Debug(context).getInstance().getLogger(instance); - isAppendingInProgress = false; + } + + function initializeForStreamSwitch(mInfo, selectedRepresentation, oldSourceBufferSink) { + mediaInfo = mInfo; + type = mediaInfo.type; + const codec = mediaInfo.codec; + + _copyPreviousSinkData(oldSourceBufferSink); + _addEventListeners(); + + const promises = []; + + promises.push(_abortBeforeAppend); + promises.push(updateAppendWindow(mediaInfo.streamInfo)); + promises.push(changeType(codec)); + + if (selectedRepresentation && selectedRepresentation.MSETimeOffset !== undefined) { + promises.push(updateTimestampOffset(selectedRepresentation.MSETimeOffset)); + } + return Promise.all(promises); + } + + function changeType(codec) { + return new Promise((resolve) => { + waitForUpdateEnd(() => { + if (buffer.changeType) { + buffer.changeType(codec); + } + resolve(); + }); + }); + } + + function _copyPreviousSinkData(oldSourceBufferSink) { + buffer = oldSourceBufferSink.getBuffer(); + } + + function initializeForFirstUse(streamInfo, mInfo, selectedRepresentation) { + mediaInfo = mInfo; type = mediaInfo.type; const codec = mediaInfo.codec; try { @@ -70,40 +112,52 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) // - currently no browser does, so check for it and use our own // implementation. The same is true for codecs="wvtt". if (codec.match(/application\/mp4;\s*codecs="(stpp|wvtt).*"/i)) { - throw new Error('not really supported'); - } - buffer = oldBuffer ? oldBuffer : mediaSource.addSourceBuffer(codec); - if (buffer.changeType && oldBuffer) { - logger.debug('Doing period transition with changeType'); - buffer.changeType(codec); + return _initializeForText(streamInfo); } - updateAppendWindow(); + buffer = mediaSource.addSourceBuffer(codec); - const CHECK_INTERVAL = 50; - // use updateend event if possible - if (typeof buffer.addEventListener === 'function') { - try { - buffer.addEventListener('updateend', updateEndHandler, false); - buffer.addEventListener('error', errHandler, false); - buffer.addEventListener('abort', errHandler, false); + _addEventListeners(); - } catch (err) { - // use setInterval to periodically check if updating has been completed - intervalId = setInterval(checkIsUpdateEnded, CHECK_INTERVAL); - } - } else { - // use setInterval to periodically check if updating has been completed - intervalId = setInterval(checkIsUpdateEnded, CHECK_INTERVAL); + const promises = []; + + promises.push(updateAppendWindow(mediaInfo.streamInfo)); + + if (selectedRepresentation && selectedRepresentation.MSETimeOffset !== undefined) { + promises.push(updateTimestampOffset(selectedRepresentation.MSETimeOffset)); } - } catch (ex) { + + return Promise.all(promises); + + } catch (e) { // Note that in the following, the quotes are open to allow for extra text after stpp and wvtt if ((mediaInfo.isText) || (codec.indexOf('codecs="stpp') !== -1) || (codec.indexOf('codecs="wvtt') !== -1)) { - const textController = TextController(context).getInstance(); - buffer = textController.getTextSourceBuffer(); - } else { - throw ex; + return _initializeForText(streamInfo); } + return Promise.reject(e); + } + } + + function _initializeForText(streamInfo) { + buffer = textController.getTextSourceBuffer(streamInfo); + return Promise.resolve(); + } + + function _addEventListeners() { + // use updateend event if possible + if (typeof buffer.addEventListener === 'function') { + try { + buffer.addEventListener('updateend', updateEndHandler, false); + buffer.addEventListener('error', errHandler, false); + buffer.addEventListener('abort', errHandler, false); + + } catch (err) { + // use setInterval to periodically check if updating has been completed + intervalId = setInterval(updateEndHandler, CHECK_INTERVAL); + } + } else { + // use setInterval to periodically check if updating has been completed + intervalId = setInterval(updateEndHandler, CHECK_INTERVAL); } } @@ -111,30 +165,97 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) return type; } - function reset(keepBuffer) { - if (buffer) { + function _removeEventListeners() { + try { if (typeof buffer.removeEventListener === 'function') { buffer.removeEventListener('updateend', updateEndHandler, false); buffer.removeEventListener('error', errHandler, false); buffer.removeEventListener('abort', errHandler, false); } clearInterval(intervalId); - callbacks = []; - if (!keepBuffer) { + } catch (e) { + logger.error(e); + } + } + + function updateAppendWindow(sInfo) { + return new Promise((resolve) => { + + if (!buffer || !settings.get().streaming.buffer.useAppendWindow) { + resolve(); + return; + } + + waitForUpdateEnd(() => { + try { + if (!buffer) { + resolve(); + return; + } + + let appendWindowEnd = mediaSource.duration; + let appendWindowStart = 0; + if (sInfo && !isNaN(sInfo.start) && !isNaN(sInfo.duration) && isFinite(sInfo.duration)) { + appendWindowEnd = sInfo.start + sInfo.duration; + } + if (sInfo && !isNaN(sInfo.start)) { + appendWindowStart = sInfo.start; + } + if (buffer.appendWindowEnd !== appendWindowEnd || buffer.appendWindowStart !== appendWindowStart) { + buffer.appendWindowStart = 0; + buffer.appendWindowEnd = appendWindowEnd + APPEND_WINDOW_END_OFFSET; + buffer.appendWindowStart = Math.max(appendWindowStart - APPEND_WINDOW_START_OFFSET, 0); + logger.debug(`Updated append window for ${mediaInfo.type}. Set start to ${buffer.appendWindowStart} and end to ${buffer.appendWindowEnd}`); + } + + resolve(); + } catch (e) { + logger.warn(`Failed to set append window`); + resolve(); + } + }); + }); + } + + function updateTimestampOffset(MSETimeOffset) { + return new Promise((resolve) => { + + if (!buffer) { + resolve(); + return; + } + + waitForUpdateEnd(() => { try { - if (!buffer.getClassName || buffer.getClassName() !== 'TextSourceBuffer') { - logger.debug(`Removing sourcebuffer from media source`); - mediaSource.removeSourceBuffer(buffer); + if (buffer.timestampOffset !== MSETimeOffset && !isNaN(MSETimeOffset)) { + buffer.timestampOffset = MSETimeOffset; + logger.debug(`Set MSE timestamp offset to ${MSETimeOffset}`); } + resolve(); } catch (e) { - logger.error('Failed to remove source buffer from media source.'); + resolve(); } - buffer = null; + }); + }); + } + + + function reset() { + if (buffer) { + try { + callbacks = []; + _removeEventListeners(); + isAppendingInProgress = false; + appendQueue = []; + if (!buffer.getClassName || buffer.getClassName() !== 'TextSourceBuffer') { + logger.debug(`Removing sourcebuffer from media source`); + mediaSource.removeSourceBuffer(buffer); + } + } catch (e) { + } - isAppendingInProgress = false; + buffer = null; } - appendQueue = []; - onAppended = null; } function getBuffer() { @@ -150,135 +271,101 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) } } - function hasDiscontinuitiesAfter(time) { - try { - const ranges = getAllBufferRanges(); - if (ranges && ranges.length > 1) { - for (let i = 0, len = ranges.length; i < len; i++) { - if (i > 0) { - if (time < ranges.start(i) && ranges.start(i) > ranges.end(i - 1) + MAX_ALLOWED_DISCONTINUITY) { - return true; - } - } - } - } - } catch (e) { - logger.error('hasDiscontinuities exception: ' + e.message); - } - return false; - } - function append(chunk) { - if (!chunk) { - onAppended({ - chunk: chunk, - error: new DashJSError(Errors.APPEND_ERROR_CODE, Errors.APPEND_ERROR_MESSAGE) - }); - return; - } - appendQueue.push(chunk); - if (!isAppendingInProgress) { + return new Promise((resolve, reject) => { + if (!chunk) { + reject({ + chunk: chunk, + error: new DashJSError(Errors.APPEND_ERROR_CODE, Errors.APPEND_ERROR_MESSAGE) + }); + return; + } + appendQueue.push({ data: chunk, promise: { resolve, reject } }); waitForUpdateEnd(appendNextInQueue.bind(this)); - } + }); } - function updateTimestampOffset(MSETimeOffset) { - if (buffer.timestampOffset !== MSETimeOffset && !isNaN(MSETimeOffset)) { + function _abortBeforeAppend() { + return new Promise((resolve) => { waitForUpdateEnd(() => { - if (MSETimeOffset < 0) { - MSETimeOffset += 0.001; - } - buffer.timestampOffset = MSETimeOffset; - }); - } - } + // Save the append window, which is reset on abort(). + const appendWindowStart = buffer.appendWindowStart; + const appendWindowEnd = buffer.appendWindowEnd; - function updateAppendWindow(sInfo) { - if (!buffer) { - return; - } - waitForUpdateEnd(() => { - try { - let appendWindowEnd = mediaSource.duration; - let appendWindowStart = 0; - if (sInfo && !isNaN(sInfo.start) && !isNaN(sInfo.duration) && isFinite(sInfo.duration)) { - appendWindowEnd = sInfo.start + sInfo.duration; - } - if (sInfo && !isNaN(sInfo.start)) { - appendWindowStart = sInfo.start; - } - buffer.appendWindowStart = 0; - buffer.appendWindowEnd = appendWindowEnd; + buffer.abort(); buffer.appendWindowStart = appendWindowStart; - logger.debug(`Updated append window. Set start to ${buffer.appendWindowStart} and end to ${buffer.appendWindowEnd}`); - } catch (e) { - logger.warn(`Failed to set append window`); - } + buffer.appendWindowEnd = appendWindowEnd; + resolve(); + }); }); } - function remove(start, end, forceRemoval) { - const sourceBufferSink = this; - // make sure that the given time range is correct. Otherwise we will get InvalidAccessError - waitForUpdateEnd(function () { - try { - if ((start >= 0) && (end > start) && (forceRemoval || mediaSource.readyState !== 'ended')) { + function remove(range) { + return new Promise((resolve, reject) => { + const start = range.start; + const end = range.end; + + // make sure that the given time range is correct. Otherwise we will get InvalidAccessError + if (!((start >= 0) && (end > start))) { + resolve(); + return; + } + + waitForUpdateEnd(function () { + try { buffer.remove(start, end); - } - // updating is in progress, we should wait for it to complete before signaling that this operation is done - waitForUpdateEnd(function () { - eventBus.trigger(Events.SOURCEBUFFER_REMOVE_COMPLETED, { - buffer: sourceBufferSink, + // updating is in progress, we should wait for it to complete before signaling that this operation is done + waitForUpdateEnd(function () { + resolve({ + from: start, + to: end, + unintended: false + }); + if (range.resolve) { + range.resolve(); + } + }); + } catch (err) { + reject({ from: start, to: end, - unintended: false + unintended: false, + error: new DashJSError(err.code, err.message) }); - }); - } catch (err) { - eventBus.trigger(Events.SOURCEBUFFER_REMOVE_COMPLETED, { - buffer: sourceBufferSink, - from: start, - to: end, - unintended: false, - error: new DashJSError(err.code, err.message) - }); - } + if (range.reject) { + range.reject(err); + } + } + }); }); } function appendNextInQueue() { - const sourceBufferSink = this; + if (isAppendingInProgress) { + return; + } if (appendQueue.length > 0) { isAppendingInProgress = true; const nextChunk = appendQueue[0]; appendQueue.splice(0, 1); - let oldRanges = []; + const afterSuccess = function () { - // Safari sometimes drops a portion of a buffer after appending. Handle these situations here - const newRanges = getAllBufferRanges(); - checkBufferGapsAfterAppend(sourceBufferSink, oldRanges, newRanges, nextChunk); + isAppendingInProgress = false; if (appendQueue.length > 0) { appendNextInQueue.call(this); - } else { - isAppendingInProgress = false; - if (onAppended) { - onAppended({ - chunk: nextChunk - }); - } } + nextChunk.promise.resolve({ chunk: nextChunk.data }); }; try { - if (nextChunk.bytes.length === 0) { + if (nextChunk.data.bytes.byteLength === 0) { afterSuccess.call(this); } else { - oldRanges = getAllBufferRanges(); if (buffer.appendBuffer) { - buffer.appendBuffer(nextChunk.bytes); + buffer.appendBuffer(nextChunk.data.bytes); } else { - buffer.append(nextChunk.bytes, nextChunk); + buffer.append(nextChunk.data.bytes, nextChunk.data); } // updating is in progress, we should wait for it to complete before signaling that this operation is done waitForUpdateEnd(afterSuccess.bind(this)); @@ -290,60 +377,37 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) } else { isAppendingInProgress = false; } - - if (onAppended) { - onAppended({ - chunk: nextChunk, - error: new DashJSError(err.code, err.message) - }); - } - } - } - } - - function checkBufferGapsAfterAppend(buffer, oldRanges, newRanges, chunk) { - if (oldRanges && oldRanges.length > 0 && oldRanges.length < newRanges.length && - isChunkAlignedWithRange(oldRanges, chunk)) { - // A split in the range was created while appending - eventBus.trigger(Events.SOURCEBUFFER_REMOVE_COMPLETED, { - buffer: buffer, - from: newRanges.end(newRanges.length - 2), - to: newRanges.start(newRanges.length - 1), - unintended: true - }); - } - } - - function isChunkAlignedWithRange(oldRanges, chunk) { - for (let i = 0; i < oldRanges.length; i++) { - const start = Math.round(oldRanges.start(i)); - const end = Math.round(oldRanges.end(i)); - if (end === chunk.start || start === chunk.end || (chunk.start >= start && chunk.end <= end)) { - return true; + nextChunk.promise.reject({ chunk: nextChunk.data, error: new DashJSError(err.code, err.message) }); } } - return false; } function abort() { - try { - if (mediaSource.readyState === 'open') { - buffer.abort(); - } else if (buffer.setTextTrack && mediaSource.readyState === 'ended') { - buffer.abort(); //The cues need to be removed from the TextSourceBuffer via a call to abort() + return new Promise((resolve) => { + try { + appendQueue = []; + if (mediaSource.readyState === 'open') { + waitForUpdateEnd(() => { + buffer.abort(); + resolve(); + }); + } else if (buffer && buffer.setTextTrack && mediaSource.readyState === 'ended') { + buffer.abort(); //The cues need to be removed from the TextSourceBuffer via a call to abort() + resolve(); + } else { + resolve(); + } + } catch (e) { + resolve(); } - } catch (ex) { - logger.error('SourceBuffer append abort failed: "' + ex + '"'); - } - appendQueue = []; + }); + } function executeCallback() { if (callbacks.length > 0) { - const cb = callbacks.shift(); - if (buffer.updating) { - waitForUpdateEnd(cb); - } else { + if (!buffer.updating) { + const cb = callbacks.shift(); cb(); // Try to execute next callback if still not updating executeCallback(); @@ -351,16 +415,13 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) } } - function checkIsUpdateEnded() { - // if updating is still in progress do nothing and wait for the next check again. - if (buffer.updating) return; - // updating is completed, now we can stop checking and resolve the promise - executeCallback(); - } - function updateEndHandler() { - if (buffer.updating) return; + // if updating is still in progress do nothing and wait for the next check again. + if (buffer.updating) { + return; + } + // updating is completed, now we can stop checking and resolve the promise executeCallback(); } @@ -377,17 +438,19 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) } instance = { - getType: getType, - getAllBufferRanges: getAllBufferRanges, - getBuffer: getBuffer, - append: append, - remove: remove, - abort: abort, - reset: reset, - updateTimestampOffset: updateTimestampOffset, - hasDiscontinuitiesAfter: hasDiscontinuitiesAfter, - waitForUpdateEnd: waitForUpdateEnd, - updateAppendWindow + getType, + getAllBufferRanges, + getBuffer, + append, + remove, + abort, + reset, + updateTimestampOffset, + waitForUpdateEnd, + initializeForStreamSwitch, + initializeForFirstUse, + updateAppendWindow, + changeType }; setup(); diff --git a/src/streaming/Stream.js b/src/streaming/Stream.js index c07e21de03..a88b63b3cb 100644 --- a/src/streaming/Stream.js +++ b/src/streaming/Stream.js @@ -42,6 +42,10 @@ import DashJSError from './vo/DashJSError'; import BoxParser from './utils/BoxParser'; import URLUtils from './utils/URLUtils'; + +const MEDIA_TYPES = [Constants.VIDEO, Constants.AUDIO, Constants.TEXT, Constants.FRAGMENTED_TEXT, Constants.EMBEDDED_TEXT, Constants.MUXED, Constants.IMAGE]; + + function Stream(config) { config = config || {}; @@ -51,29 +55,29 @@ function Stream(config) { const manifestModel = config.manifestModel; const mediaPlayerModel = config.mediaPlayerModel; + const dashMetrics = config.dashMetrics; const manifestUpdater = config.manifestUpdater; const adapter = config.adapter; + const timelineConverter = config.timelineConverter; const capabilities = config.capabilities; - const capabilitiesFilter = config.capabilitiesFilter; const errHandler = config.errHandler; - const timelineConverter = config.timelineConverter; - const dashMetrics = config.dashMetrics; const abrController = config.abrController; const playbackController = config.playbackController; const eventController = config.eventController; const mediaController = config.mediaController; - const textController = config.textController; const protectionController = config.protectionController; + const textController = config.textController; const videoModel = config.videoModel; - const settings = config.settings; let streamInfo = config.streamInfo; + const settings = config.settings; + let instance, logger, streamProcessors, - isStreamInitialized, - isStreamActivated, - isMediaInitialized, + isInitialized, + isActive, + hasFinishedBuffering, hasVideoTrack, hasAudioTrack, updateError, @@ -82,48 +86,68 @@ function Stream(config) { thumbnailController, preloaded, boxParser, - preloadingScheduled, debug, isEndedEventSignaled, trackChangedEvent; + /** + * Setup the stream + */ function setup() { - debug = Debug(context).getInstance(); - logger = debug.getLogger(instance); - resetInitialSettings(); + try { + debug = Debug(context).getInstance(); + logger = debug.getLogger(instance); + resetInitialSettings(); - boxParser = BoxParser(context).getInstance(); + boxParser = BoxParser(context).getInstance(); - fragmentController = FragmentController(context).create({ - streamInfo: streamInfo, - mediaPlayerModel: mediaPlayerModel, - dashMetrics: dashMetrics, - errHandler: errHandler, - settings: settings, - boxParser: boxParser, - dashConstants: DashConstants, - urlUtils: urlUtils - }); + fragmentController = FragmentController(context).create({ + streamInfo: streamInfo, + mediaPlayerModel: mediaPlayerModel, + dashMetrics: dashMetrics, + errHandler: errHandler, + settings: settings, + boxParser: boxParser, + dashConstants: DashConstants, + urlUtils: urlUtils + }); + + } catch (e) { + throw e; + } } + /** + * Initialize the events + */ function initialize() { registerEvents(); registerProtectionEvents(); + textController.initializeForStream(streamInfo); eventBus.trigger(Events.STREAM_UPDATED, { streamInfo: streamInfo }); } + /** + * Register the streaming events + */ function registerEvents() { eventBus.on(Events.BUFFERING_COMPLETED, onBufferingCompleted, instance); eventBus.on(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, instance); eventBus.on(Events.INBAND_EVENTS, onInbandEvents, instance); } + /** + * Unregister the streaming events + */ function unRegisterEvents() { eventBus.off(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, instance); eventBus.off(Events.BUFFERING_COMPLETED, onBufferingCompleted, instance); eventBus.off(Events.INBAND_EVENTS, onInbandEvents, instance); } + /** + * Register the protection events + */ function registerProtectionEvents() { if (protectionController) { eventBus.on(Events.KEY_ERROR, onProtectionError, instance); @@ -135,6 +159,9 @@ function Stream(config) { } } + /** + * Unregister the protection events + */ function unRegisterProtectionEvents() { if (protectionController) { eventBus.off(Events.KEY_ERROR, onProtectionError, instance); @@ -146,6 +173,10 @@ function Stream(config) { } } + /** + * Returns the stream id + * @return {*|null} + */ function getStreamId() { return streamInfo ? streamInfo.id : null; } @@ -153,27 +184,303 @@ function Stream(config) { /** * Activates Stream by re-initializing some of its components * @param {MediaSource} mediaSource + * @param {array} previousBufferSinks * @memberof Stream# - * @param {SourceBuffer} previousBuffers */ - function activate(mediaSource, previousBuffers) { - if (!isStreamActivated) { - let result; - eventBus.on(Events.CURRENT_TRACK_CHANGED, onCurrentTrackChanged, instance); - if (!getPreloaded()) { - result = initializeMedia(mediaSource, previousBuffers); + function activate(mediaSource, previousBufferSinks) { + return new Promise((resolve, reject) => { + if (isActive) { + resolve(previousBufferSinks); + return; + } + + if (getPreloaded()) { + isActive = true; + eventBus.trigger(Events.STREAM_ACTIVATED, { + streamInfo + }); + resolve(previousBufferSinks); + return; + } + + + _initializeMedia(mediaSource, previousBufferSinks) + .then((bufferSinks) => { + isActive = true; + eventBus.trigger(Events.STREAM_ACTIVATED, { + streamInfo + }); + resolve(bufferSinks); + }) + .catch((e) => { + reject(e); + }); + }); + } + + /** + * + * @param {object} mediaSource + * @param {array} previousBufferSinks + * @return {Promise} + * @private + */ + function _initializeMedia(mediaSource, previousBufferSinks) { + return _commonMediaInitialization(mediaSource, previousBufferSinks); + } + + function startPreloading(mediaSource, previousBuffers) { + return new Promise((resolve, reject) => { + + if (getPreloaded()) { + reject(); + return; + } + + logger.info(`[startPreloading] Preloading next stream with id ${getId()}`); + setPreloaded(true); + + _commonMediaInitialization(mediaSource, previousBuffers) + .then(() => { + for (let i = 0; i < streamProcessors.length && streamProcessors[i]; i++) { + streamProcessors[i].setExplicitBufferingTime(getStartTime()); + streamProcessors[i].getScheduleController().startScheduleTimer(); + } + resolve(); + }) + .catch(() => { + setPreloaded(false); + reject(); + }); + }); + } + + /** + * + * @param {object} mediaSource + * @param {array} previousBufferSinks + * @return {Promise} + * @private + */ + function _commonMediaInitialization(mediaSource, previousBufferSinks) { + return new Promise((resolve, reject) => { + checkConfig(); + + isUpdating = true; + addInlineEvents(); + + + let element = videoModel.getElement(); + + MEDIA_TYPES.forEach((mediaType) => { + if (mediaType !== Constants.VIDEO || (!element || (element && (/^VIDEO$/i).test(element.nodeName)))) { + _initializeMediaForType(mediaType, mediaSource); + } + }); + + _createBufferSinks(previousBufferSinks) + .then((bufferSinks) => { + isUpdating = false; + + if (streamProcessors.length === 0) { + const msg = 'No streams to play.'; + errHandler.error(new DashJSError(Errors.MANIFEST_ERROR_ID_NOSTREAMS_CODE, msg, manifestModel.getValue())); + logger.fatal(msg); + } else { + _checkIfInitializationCompleted(); + } + + // All mediaInfos for texttracks are added to the TextSourceBuffer by now. We can start creating the tracks + textController.createTracks(streamInfo); + + resolve(bufferSinks); + }) + .catch((e) => { + reject(e); + }); + }); + + } + + + /** + * Initialize for a given media type. Creates a corresponding StreamProcessor + * @param {string} type + * @param {object} mediaSource + * @private + */ + function _initializeMediaForType(type, mediaSource) { + const allMediaForType = adapter.getAllMediaInfoForType(streamInfo, type); + + let mediaInfo = null; + let initialMediaInfo; + + if (!allMediaForType || allMediaForType.length === 0) { + logger.info('No ' + type + ' data.'); + return; + } + + if (type === Constants.VIDEO) { + hasVideoTrack = true; + } + + if (type === Constants.AUDIO) { + hasAudioTrack = true; + } + + for (let i = 0, ln = allMediaForType.length; i < ln; i++) { + mediaInfo = allMediaForType[i]; + + if (type === Constants.EMBEDDED_TEXT) { + textController.addEmbeddedTrack(streamInfo, mediaInfo); } else { - initializeAfterPreload(); - result = previousBuffers; + if (_isMediaSupported(mediaInfo)) { + mediaController.addTrack(mediaInfo); + } } - isStreamActivated = true; - return result; } - return previousBuffers; + + if (type === Constants.EMBEDDED_TEXT) { + textController.addMediaInfosToBuffer(streamInfo, allMediaForType); + } + + if (type === Constants.EMBEDDED_TEXT || mediaController.getTracksFor(type, streamInfo.id).length === 0) { + return; + } + + if (type === Constants.IMAGE) { + thumbnailController = ThumbnailController(context).create({ + streamInfo: streamInfo, + adapter: adapter, + baseURLController: config.baseURLController, + timelineConverter: config.timelineConverter, + debug: debug, + eventBus: eventBus, + events: Events, + dashConstants: DashConstants, + dashMetrics: config.dashMetrics, + segmentBaseController: config.segmentBaseController + }); + thumbnailController.initialize(); + return; + } + + + mediaController.setInitialMediaSettingsForType(type, streamInfo); + initialMediaInfo = mediaController.getCurrentTrackFor(type, streamInfo.id); + + eventBus.trigger(Events.STREAM_INITIALIZING, { + streamInfo: streamInfo, + mediaInfo: mediaInfo + }); + + _createStreamProcessor(initialMediaInfo, allMediaForType, mediaSource); + } + + function _isMediaSupported(mediaInfo) { + const type = mediaInfo ? mediaInfo.type : null; + let msg; + + if (type === Constants.MUXED) { + msg = 'Multiplexed representations are intentionally not supported, as they are not compliant with the DASH-AVC/264 guidelines'; + logger.fatal(msg); + errHandler.error(new DashJSError(Errors.MANIFEST_ERROR_ID_MULTIPLEXED_CODE, msg, manifestModel.getValue())); + return false; + } + + if (type === Constants.TEXT || type === Constants.FRAGMENTED_TEXT || type === Constants.EMBEDDED_TEXT || type === Constants.IMAGE) { + return true; + } + + if (!!mediaInfo.contentProtection && !capabilities.supportsEncryptedMedia()) { + errHandler.error(new DashJSError(Errors.CAPABILITY_MEDIAKEYS_ERROR_CODE, Errors.CAPABILITY_MEDIAKEYS_ERROR_MESSAGE)); + return false; + } + + return true; + } + + /** + * Creates the StreamProcessor for a given media type. + * @param {object} initialMediaInfo + * @param {array} allMediaForType + * @param {object} mediaSource + * @private + */ + function _createStreamProcessor(initialMediaInfo, allMediaForType, mediaSource) { + + let fragmentModel = fragmentController.getModel(initialMediaInfo ? initialMediaInfo.type : null); + const type = initialMediaInfo ? initialMediaInfo.type : null; + const mimeType = initialMediaInfo ? initialMediaInfo.mimeType : null; + + let streamProcessor = StreamProcessor(context).create({ + streamInfo: streamInfo, + type: type, + mimeType: mimeType, + timelineConverter: timelineConverter, + adapter: adapter, + manifestModel: manifestModel, + mediaPlayerModel: mediaPlayerModel, + fragmentModel: fragmentModel, + dashMetrics: config.dashMetrics, + baseURLController: config.baseURLController, + segmentBaseController: config.segmentBaseController, + abrController: abrController, + playbackController: playbackController, + mediaController: mediaController, + textController: textController, + errHandler: errHandler, + settings: settings, + boxParser: boxParser + }); + + streamProcessor.initialize(mediaSource, hasVideoTrack); + abrController.updateTopQualityIndex(initialMediaInfo); + streamProcessors.push(streamProcessor); + + for (let i = 0; i < allMediaForType.length; i++) { + streamProcessor.addMediaInfo(allMediaForType[i]); + } + + if (type === Constants.TEXT || type === Constants.FRAGMENTED_TEXT) { + textController.addMediaInfosToBuffer(streamInfo, allMediaForType, mimeType, fragmentModel); + } + + if (initialMediaInfo) { + streamProcessor.selectMediaInfo(initialMediaInfo); + } + } + + /** + * Creates the SourceBufferSink objects for all StreamProcessors + * @param {array} previousBuffers + * @return {Promise} + * @private + */ + function _createBufferSinks(previousBuffers) { + return new Promise((resolve) => { + const buffers = {}; + const promises = streamProcessors.map((sp) => { + return sp.createBufferSinks(previousBuffers); + }); + + Promise.all(promises) + .then((bufferSinks) => { + bufferSinks.forEach((sink) => { + if (sink) { + buffers[sink.getType()] = sink; + } + }); + resolve(buffers); + }) + .catch(() => { + resolve(buffers); + }); + }); } /** - * Partially resets some of the Stream elements + * Partially resets some of the Stream elements. This function is called when preloading of streams is canceled or a stream switch occurs. * @memberof Stream# * @param {boolean} keepBuffers */ @@ -182,23 +489,27 @@ function Stream(config) { const errored = false; for (let i = 0; i < ln; i++) { let fragmentModel = streamProcessors[i].getFragmentModel(); - fragmentModel.removeExecutedRequestsBeforeTime(getStartTime() + getDuration()); + fragmentModel.resetInitialSettings(); streamProcessors[i].reset(errored, keepBuffers); } + if (textController) { + textController.deactivateStream(streamInfo); + } streamProcessors = []; - isStreamActivated = false; - isMediaInitialized = false; + isActive = false; + hasFinishedBuffering = false; setPreloaded(false); - eventBus.off(Events.CURRENT_TRACK_CHANGED, onCurrentTrackChanged, instance); + setIsEndedEventSignaled(false); + eventBus.trigger(Events.STREAM_DEACTIVATED, { streamInfo }); } - function isActive() { - return isStreamActivated; + function getIsActive() { + return isActive; } function setMediaSource(mediaSource) { for (let i = 0; i < streamProcessors.length;) { - if (isMediaSupported(streamProcessors[i].getMediaInfo())) { + if (_isMediaSupported(streamProcessors[i].getMediaInfo())) { streamProcessors[i].setMediaSource(mediaSource); i++; } else { @@ -207,12 +518,6 @@ function Stream(config) { } } - for (let i = 0; i < streamProcessors.length; i++) { - //Adding of new tracks to a stream processor isn't guaranteed by the spec after the METADATA_LOADED state - //so do this after the buffers are created above. - streamProcessors[i].dischargePreBuffer(); - } - if (streamProcessors.length === 0) { const msg = 'No streams to play.'; errHandler.error(new DashJSError(Errors.MANIFEST_ERROR_ID_NOSTREAMS_CODE, msg + 'nostreams', manifestModel.getValue())); @@ -220,37 +525,35 @@ function Stream(config) { } } - function resetInitialSettings() { - deactivate(); - isStreamInitialized = false; + function resetInitialSettings(keepBuffers) { + deactivate(keepBuffers); + isInitialized = false; hasVideoTrack = false; hasAudioTrack = false; updateError = {}; isUpdating = false; - preloadingScheduled = false; isEndedEventSignaled = false; } - function reset() { - - if (playbackController) { - playbackController.pause(); - } + function reset(keepBuffers) { if (fragmentController) { fragmentController.reset(); fragmentController = null; } - streamInfo = null; + if (abrController && streamInfo) { + abrController.clearDataForStream(streamInfo.id); + } - resetInitialSettings(); + resetInitialSettings(keepBuffers); + + streamInfo = null; unRegisterEvents(); unRegisterProtectionEvents(); - setPreloaded(false); } function getDuration() { @@ -269,27 +572,6 @@ function Stream(config) { return streamInfo ? streamInfo.start : NaN; } - function getPreloadingScheduled() { - return preloadingScheduled; - } - - function setPreloadingScheduled(value) { - preloadingScheduled = value; - } - - function getLiveStartTime() { - if (!streamInfo.manifestInfo.isDynamic) return NaN; - // Get live start time of the video stream (1st in array of streams) - // or audio if no video stream - for (let i = 0; i < streamProcessors.length; i++) { - if (streamProcessors[i].getType() === Constants.AUDIO || - streamProcessors[i].getType() === Constants.VIDEO) { - return streamProcessors[i].getLiveStartTime(); - } - } - return NaN; - } - function getId() { return streamInfo ? streamInfo.id : null; } @@ -311,7 +593,7 @@ function Stream(config) { } function checkConfig() { - if (!videoModel || !abrController || !abrController.hasOwnProperty('getBitrateList') || !adapter || !adapter.hasOwnProperty('getAllMediaInfoForType') || !adapter.hasOwnProperty('getEventsFor')) { + if (!videoModel || !abrController) { throw new Error(Constants.MISSING_CONFIG_ERROR); } } @@ -341,37 +623,13 @@ function Stream(config) { } } - function isMediaSupported(mediaInfo) { - const type = mediaInfo ? mediaInfo.type : null; - let codec, - msg; - - if (type === Constants.MUXED) { - msg = 'Multiplexed representations are intentionally not supported, as they are not compliant with the DASH-AVC/264 guidelines'; - logger.fatal(msg); - errHandler.error(new DashJSError(Errors.MANIFEST_ERROR_ID_MULTIPLEXED_CODE, msg, manifestModel.getValue())); - return false; - } - - if (type === Constants.TEXT || type === Constants.FRAGMENTED_TEXT || type === Constants.EMBEDDED_TEXT || type === Constants.IMAGE) { - return true; - } - codec = mediaInfo.codec; - logger.debug(type + ' codec: ' + codec); - - if (!!mediaInfo.contentProtection && !capabilities.supportsEncryptedMedia()) { - errHandler.error(new DashJSError(Errors.CAPABILITY_MEDIAKEYS_ERROR_CODE, Errors.CAPABILITY_MEDIAKEYS_ERROR_MESSAGE)); - } else if (!capabilities.supportsCodec(codec)) { - msg = type + 'Codec (' + codec + ') is not supported.'; - logger.error(msg); - return false; + function prepareTrackChange(e) { + if (!isActive || !streamInfo) { + return; } - return true; - } + hasFinishedBuffering = false; - function onCurrentTrackChanged(e) { - if (!streamInfo || e.newMediaInfo.streamInfo.id !== streamInfo.id) return; let mediaInfo = e.newMediaInfo; let manifest = manifestModel.getValue(); @@ -383,139 +641,20 @@ function Stream(config) { let currentTime = playbackController.getTime(); logger.info('Stream - Process track changed at current time ' + currentTime); - logger.debug('Stream - Update stream controller'); - if (manifest.refreshManifestOnSwitchTrack) { // Applies only for MSS streams + // Applies only for MSS streams + if (manifest.refreshManifestOnSwitchTrack) { logger.debug('Stream - Refreshing manifest for switch track'); trackChangedEvent = e; manifestUpdater.refreshManifest(); } else { - processor.selectMediaInfo(mediaInfo); - if (mediaInfo.type !== Constants.FRAGMENTED_TEXT) { - abrController.updateTopQualityIndex(mediaInfo); - processor.switchTrackAsked(); - processor.getFragmentModel().abortRequests(); - } else { - processor.getScheduleController().setSeekTarget(currentTime); - processor.setBufferingTime(currentTime); - processor.resetIndexHandler(); - } - } - } - - function createStreamProcessor(mediaInfo, allMediaForType, mediaSource, optionalSettings) { - - let fragmentModel = fragmentController.getModel(mediaInfo ? mediaInfo.type : null); - - let streamProcessor = StreamProcessor(context).create({ - streamInfo: streamInfo, - type: mediaInfo ? mediaInfo.type : null, - mimeType: mediaInfo ? mediaInfo.mimeType : null, - timelineConverter: timelineConverter, - adapter: adapter, - manifestModel: manifestModel, - mediaPlayerModel: mediaPlayerModel, - fragmentModel: fragmentModel, - dashMetrics: config.dashMetrics, - baseURLController: config.baseURLController, - abrController: abrController, - playbackController: playbackController, - mediaController: mediaController, - textController: textController, - errHandler: errHandler, - settings: settings, - boxParser: boxParser - }); - - streamProcessor.initialize(mediaSource, hasVideoTrack); - abrController.updateTopQualityIndex(mediaInfo); - - if (optionalSettings) { - streamProcessor.setBuffer(optionalSettings.buffer); - streamProcessor.setBufferingTime(optionalSettings.currentTime); - streamProcessors[optionalSettings.replaceIdx] = streamProcessor; - } else { - streamProcessors.push(streamProcessor); - } - - if (optionalSettings && optionalSettings.ignoreMediaInfo) { - return; - } - - if (mediaInfo && (mediaInfo.type === Constants.TEXT || mediaInfo.type === Constants.FRAGMENTED_TEXT)) { - let idx; - for (let i = 0; i < allMediaForType.length; i++) { - if (allMediaForType[i].index === mediaInfo.index) { - idx = i; - } - streamProcessor.addMediaInfo(allMediaForType[i]); //creates text tracks for all adaptations in one stream processor - } - streamProcessor.selectMediaInfo(allMediaForType[idx]); //sets the initial media info - } else { - streamProcessor.addMediaInfo(mediaInfo, true); - } - } - - function initializeMediaForType(type, mediaSource) { - const allMediaForType = adapter.getAllMediaInfoForType(streamInfo, type); - - let mediaInfo = null; - let initialMediaInfo; - - if (!allMediaForType || allMediaForType.length === 0) { - logger.info('No ' + type + ' data.'); - return; - } - - if (type === Constants.VIDEO) { - hasVideoTrack = true; - } - - if (type === Constants.AUDIO) { - hasAudioTrack = true; - } - - for (let i = 0, ln = allMediaForType.length; i < ln; i++) { - mediaInfo = allMediaForType[i]; - - if (type === Constants.EMBEDDED_TEXT) { - textController.addEmbeddedTrack(mediaInfo); - } else { - if (!isMediaSupported(mediaInfo)) continue; - mediaController.addTrack(mediaInfo); - } - } - - if (type === Constants.EMBEDDED_TEXT || mediaController.getTracksFor(type, streamInfo).length === 0) { - return; - } - - if (type === Constants.IMAGE) { - thumbnailController = ThumbnailController(context).create({ - streamInfo: streamInfo, - adapter: adapter, - baseURLController: config.baseURLController, - timelineConverter: config.timelineConverter, - debug: debug, - eventBus: eventBus, - events: Events, - dashConstants: DashConstants - }); - return; + processor.selectMediaInfo(mediaInfo) + .then(() => { + if (mediaInfo.type !== Constants.FRAGMENTED_TEXT) { + abrController.updateTopQualityIndex(mediaInfo); + } + processor.prepareTrackSwitch(); + }); } - - - mediaController.checkInitialMediaSettingsForType(type, streamInfo); - initialMediaInfo = mediaController.getCurrentTrackFor(type, streamInfo); - - eventBus.trigger(Events.STREAM_INITIALIZING, { - streamInfo: streamInfo, - mediaInfo: mediaInfo - }); - - // TODO : How to tell index handler live/duration? - // TODO : Pass to controller and then pass to each method on handler? - - createStreamProcessor(initialMediaInfo, allMediaForType, mediaSource); } function addInlineEvents() { @@ -525,66 +664,7 @@ function Stream(config) { } } - function addInbandEvents(events) { - if (eventController) { - eventController.addInbandEvents(events); - } - } - - function initializeMedia(mediaSource, previousBuffers) { - checkConfig(); - let element = videoModel.getElement(); - - addInlineEvents(); - - isUpdating = true; - - capabilitiesFilter.filterUnsupportedFeaturesOfPeriod( streamInfo); - - if (!element || (element && (/^VIDEO$/i).test(element.nodeName))) { - initializeMediaForType(Constants.VIDEO, mediaSource); - } - initializeMediaForType(Constants.AUDIO, mediaSource); - initializeMediaForType(Constants.TEXT, mediaSource); - initializeMediaForType(Constants.FRAGMENTED_TEXT, mediaSource); - initializeMediaForType(Constants.EMBEDDED_TEXT, mediaSource); - initializeMediaForType(Constants.MUXED, mediaSource); - initializeMediaForType(Constants.IMAGE, mediaSource); - - //TODO. Consider initialization of TextSourceBuffer here if embeddedText, but no sideloadedText. - const buffers = createBuffers(previousBuffers); - - isMediaInitialized = true; - isUpdating = false; - - if (streamProcessors.length === 0) { - const msg = 'No streams to play.'; - errHandler.error(new DashJSError(Errors.MANIFEST_ERROR_ID_NOSTREAMS_CODE, msg, manifestModel.getValue())); - logger.fatal(msg); - } else { - checkIfInitializationCompleted(); - } - - return buffers; - } - - function initializeAfterPreload() { - isUpdating = true; - checkConfig(); - capabilitiesFilter.filterUnsupportedFeaturesOfPeriod(streamInfo); - - isMediaInitialized = true; - isUpdating = false; - if (streamProcessors.length === 0) { - const msg = 'No streams to play.'; - errHandler.error(new DashJSError(Errors.MANIFEST_ERROR_ID_NOSTREAMS_CODE, msg, manifestModel.getValue())); - logger.debug(msg); - } else { - checkIfInitializationCompleted(); - } - } - - function checkIfInitializationCompleted() { + function _checkIfInitializationCompleted() { const ln = streamProcessors.length; const hasError = !!updateError.audio || !!updateError.video; let error = hasError ? new DashJSError(Errors.DATA_UPDATE_FAILED_ERROR_CODE, Errors.DATA_UPDATE_FAILED_ERROR_MESSAGE) : null; @@ -595,10 +675,6 @@ function Stream(config) { } } - if (!isMediaInitialized) { - return; - } - if (protectionController) { // Need to check if streamProcessors exists because streamProcessors // could be cleared in case an error is detected while initializing DRM keysystem @@ -618,21 +694,13 @@ function Stream(config) { if (error) { errHandler.error(error); - } else if (!isStreamInitialized) { - isStreamInitialized = true; - + } else if (!isInitialized) { + isInitialized = true; eventBus.trigger(Events.STREAM_INITIALIZED, { - streamInfo: streamInfo, - liveStartTime: !preloaded ? getLiveStartTime() : NaN + streamInfo: streamInfo }); } - // (Re)start ScheduleController: - // - in case stream initialization has been completed after 'play' event (case for SegmentBase streams) - // - in case stream is complete but a track switch has been requested - for (let i = 0; i < ln && streamProcessors[i]; i++) { - streamProcessors[i].getScheduleController().start(); - } } function getMediaInfo(type) { @@ -649,17 +717,6 @@ function Stream(config) { return null; } - function createBuffers(previousBuffers) { - const buffers = {}; - for (let i = 0, ln = streamProcessors.length; i < ln; i++) { - const buffer = streamProcessors[i].createBuffer(previousBuffers); - if (buffer) { - buffers[streamProcessors[i].getType()] = buffer.getBuffer(); - } - } - return buffers; - } - function onBufferingCompleted() { let processors = getProcessors(); const ln = processors.length; @@ -673,22 +730,26 @@ function Stream(config) { for (let i = 0; i < ln; i++) { //if audio or video buffer is not buffering completed state, do not send STREAM_BUFFERING_COMPLETED if (!processors[i].isBufferingCompleted() && (processors[i].getType() === Constants.AUDIO || processors[i].getType() === Constants.VIDEO)) { - logger.warn('onBufferingCompleted - One streamProcessor has finished but', processors[i].getType(), 'one is not buffering completed'); + logger.debug('onBufferingCompleted - One streamProcessor has finished but', processors[i].getType(), 'one is not buffering completed'); return; } } logger.debug('onBufferingCompleted - trigger STREAM_BUFFERING_COMPLETED'); - eventBus.trigger(Events.STREAM_BUFFERING_COMPLETED, { streamInfo: streamInfo }); + console.debug(`onBufferingCompleted - trigger STREAM_BUFFERING_COMPLETED for stream id ${streamInfo.id}`); + hasFinishedBuffering = true; + eventBus.trigger(Events.STREAM_BUFFERING_COMPLETED, { streamInfo: streamInfo }, { streamInfo }); } function onDataUpdateCompleted(e) { updateError[e.mediaType] = e.error; - checkIfInitializationCompleted(); + _checkIfInitializationCompleted(); } function onInbandEvents(e) { - addInbandEvents(e.events); + if (eventController) { + eventController.addInbandEvents(e.events); + } } function getProcessorForMediaInfo(mediaInfo) { @@ -721,10 +782,16 @@ function Stream(config) { return arr; } + function startScheduleControllers() { + const ln = streamProcessors.length; + for (let i = 0; i < ln && streamProcessors[i]; i++) { + streamProcessors[i].getScheduleController().startScheduleTimer(); + } + } + function updateData(updatedStreamInfo) { logger.info('Manifest updated... updating data system wide.'); - isStreamActivated = false; isUpdating = true; streamInfo = updatedStreamInfo; @@ -734,8 +801,6 @@ function Stream(config) { addInlineEvents(); } - capabilitiesFilter.filterUnsupportedFeaturesOfPeriod(streamInfo); - for (let i = 0, ln = streamProcessors.length; i < ln; i++) { let streamProcessor = streamProcessors[i]; streamProcessor.updateStreamInfo(streamInfo); @@ -752,32 +817,33 @@ function Stream(config) { if (mediaInfo.type !== Constants.FRAGMENTED_TEXT) { let processor = getProcessorForMediaInfo(trackChangedEvent.oldMediaInfo); if (!processor) return; - processor.switchTrackAsked(); + processor.prepareTrackSwitch(); trackChangedEvent = undefined; } } isUpdating = false; - checkIfInitializationCompleted(); + _checkIfInitializationCompleted(); + } function isMediaCodecCompatible(newStream, previousStream = null) { return compareCodecs(newStream, Constants.VIDEO, previousStream) && compareCodecs(newStream, Constants.AUDIO, previousStream); } - function isProtectionCompatible(stream, previousStream = null) { - return compareProtectionConfig(stream, Constants.VIDEO, previousStream) && compareProtectionConfig(stream, Constants.AUDIO, previousStream); + function isProtectionCompatible(newStream) { + if (!newStream) { + return true; + } + return _compareProtectionConfig(Constants.VIDEO, newStream) && _compareProtectionConfig(Constants.AUDIO, newStream); } - function compareProtectionConfig(stream, type, previousStream = null) { - if (!stream) { - return false; - } - const newStreamInfo = stream.getStreamInfo(); - const currentStreamInfo = previousStream ? previousStream.getStreamInfo() : getStreamInfo(); + function _compareProtectionConfig(type, newStream) { + const currentStreamInfo = getStreamInfo(); + const newStreamInfo = newStream.getStreamInfo(); if (!newStreamInfo || !currentStreamInfo) { - return false; + return true; } const newAdaptation = adapter.getAdaptationForType(newStreamInfo.index, type, newStreamInfo); @@ -789,10 +855,10 @@ function Stream(config) { } // If the current period is unencrypted and the upcoming one is encrypted we need to reset sourcebuffers. - return !(!isAdaptationDrmProtected(currentAdaptation) && isAdaptationDrmProtected(newAdaptation)); + return !(!_isAdaptationDrmProtected(currentAdaptation) && _isAdaptationDrmProtected(newAdaptation)); } - function isAdaptationDrmProtected(adaptation) { + function _isAdaptationDrmProtected(adaptation) { if (!adaptation) { // If there is no adaptation for neither the old or the new stream they're compatible @@ -847,56 +913,43 @@ function Stream(config) { return preloaded; } - function preload(mediaSource, previousBuffers) { - if (!getPreloaded()) { - addInlineEvents(); - - initializeMediaForType(Constants.VIDEO, mediaSource); - initializeMediaForType(Constants.AUDIO, mediaSource); - initializeMediaForType(Constants.TEXT, mediaSource); - initializeMediaForType(Constants.FRAGMENTED_TEXT, mediaSource); - initializeMediaForType(Constants.EMBEDDED_TEXT, mediaSource); - initializeMediaForType(Constants.MUXED, mediaSource); - initializeMediaForType(Constants.IMAGE, mediaSource); - - createBuffers(previousBuffers); - - eventBus.on(Events.CURRENT_TRACK_CHANGED, onCurrentTrackChanged, instance); - for (let i = 0; i < streamProcessors.length && streamProcessors[i]; i++) { - streamProcessors[i].getScheduleController().start(); - } - - setPreloaded(true); - } + function getHasFinishedBuffering() { + return hasFinishedBuffering; } + function getAdapter() { + return adapter; + } instance = { - initialize: initialize, - getStreamId: getStreamId, - activate: activate, - deactivate: deactivate, - isActive: isActive, - getDuration: getDuration, - getStartTime: getStartTime, - getId: getId, - getStreamInfo: getStreamInfo, - getHasAudioTrack: getHasAudioTrack, - getHasVideoTrack: getHasVideoTrack, - preload: preload, - getThumbnailController: getThumbnailController, - getBitrateListFor: getBitrateListFor, - updateData: updateData, - reset: reset, - getProcessors: getProcessors, - setMediaSource: setMediaSource, - isMediaCodecCompatible: isMediaCodecCompatible, - isProtectionCompatible: isProtectionCompatible, - getPreloaded: getPreloaded, - getPreloadingScheduled, - setPreloadingScheduled, + initialize, + getStreamId, + activate, + deactivate, + getIsActive, + getDuration, + getStartTime, + getId, + getStreamInfo, + getHasAudioTrack, + getHasVideoTrack, + startPreloading, + getThumbnailController, + getBitrateListFor, + updateData, + reset, + getProcessors, + setMediaSource, + isMediaCodecCompatible, + isProtectionCompatible, + getPreloaded, getIsEndedEventSignaled, - setIsEndedEventSignaled + setIsEndedEventSignaled, + getAdapter, + getHasFinishedBuffering, + setPreloaded, + startScheduleControllers, + prepareTrackChange }; setup(); diff --git a/src/streaming/StreamProcessor.js b/src/streaming/StreamProcessor.js index 35bf7796b2..4b8d316cd9 100644 --- a/src/streaming/StreamProcessor.js +++ b/src/streaming/StreamProcessor.js @@ -33,10 +33,9 @@ import DashConstants from '../dash/constants/DashConstants'; import MetricsConstants from './constants/MetricsConstants'; import FragmentModel from './models/FragmentModel'; import BufferController from './controllers/BufferController'; -import TextBufferController from './text/TextBufferController'; +import NotFragmentedTextBufferController from './text/NotFragmentedTextBufferController'; import ScheduleController from './controllers/ScheduleController'; import RepresentationController from '../dash/controllers/RepresentationController'; -import LiveEdgeFinder from './utils/LiveEdgeFinder'; import FactoryMaker from '../core/FactoryMaker'; import {checkInteger} from './utils/SupervisorTools'; import EventBus from '../core/EventBus'; @@ -48,8 +47,8 @@ import Debug from '../core/Debug'; import RequestModifier from './utils/RequestModifier'; import URLUtils from '../streaming/utils/URLUtils'; import BoxParser from './utils/BoxParser'; -import FragmentRequest from './vo/FragmentRequest'; import {PlayListTrace} from './vo/metrics/PlayList'; +import SegmentsController from '../dash/controllers/SegmentsController'; function StreamProcessor(config) { @@ -82,40 +81,55 @@ function StreamProcessor(config) { bufferController, scheduleController, representationController, - liveEdgeFinder, - indexHandler, - bufferingTime, - bufferPruned; + shouldUseExplicitTimeForRequest, + dashHandler, + segmentsController, + bufferingTime; function setup() { logger = Debug(context).getInstance().getLogger(instance); resetInitialSettings(); - eventBus.on(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, instance, { priority: EventBus.EVENT_PRIORITY_HIGH }); // High priority to be notified before Stream - eventBus.on(Events.QUALITY_CHANGE_REQUESTED, onQualityChanged, instance); - eventBus.on(Events.INIT_FRAGMENT_NEEDED, onInitFragmentNeeded, instance); - eventBus.on(Events.MEDIA_FRAGMENT_NEEDED, onMediaFragmentNeeded, instance); - eventBus.on(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, instance); - eventBus.on(Events.BUFFER_LEVEL_UPDATED, onBufferLevelUpdated, instance); - eventBus.on(Events.BUFFER_LEVEL_STATE_CHANGED, onBufferLevelStateChanged, instance); - eventBus.on(Events.BUFFER_CLEARED, onBufferCleared, instance); - eventBus.on(Events.QUOTA_EXCEEDED, onQuotaExceeded, instance); - eventBus.on(Events.SEEK_TARGET, onSeekTarget, instance); + eventBus.on(Events.DATA_UPDATE_COMPLETED, _onDataUpdateCompleted, instance, { priority: EventBus.EVENT_PRIORITY_HIGH }); // High priority to be notified before Stream + eventBus.on(Events.INIT_FRAGMENT_NEEDED, _onInitFragmentNeeded, instance); + eventBus.on(Events.MEDIA_FRAGMENT_NEEDED, _onMediaFragmentNeeded, instance); + eventBus.on(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, instance); + eventBus.on(Events.BUFFER_LEVEL_STATE_CHANGED, _onBufferLevelStateChanged, instance); + eventBus.on(Events.BUFFER_CLEARED, _onBufferCleared, instance); + eventBus.on(Events.SEEK_TARGET, _onSeekTarget, instance); + eventBus.on(Events.QUALITY_CHANGE_REQUESTED, _onQualityChanged, instance); + eventBus.on(Events.FRAGMENT_LOADING_ABANDONED, _onFragmentLoadingAbandoned, instance); + eventBus.on(Events.FRAGMENT_LOADING_COMPLETED, _onFragmentLoadingCompleted, instance); + eventBus.on(Events.QUOTA_EXCEEDED, _onQuotaExceeded, instance); + eventBus.on(Events.SET_FRAGMENTED_TEXT_AFTER_DISABLED, _onSetFragmentedTextAfterDisabled, instance); + eventBus.on(Events.SET_NON_FRAGMENTED_TEXT, _onSetNonFragmentedText, instance); } function initialize(mediaSource, hasVideoTrack) { - indexHandler = DashHandler(context).create({ - streamInfo: streamInfo, - type: type, - timelineConverter: timelineConverter, - dashMetrics: dashMetrics, - mediaPlayerModel: mediaPlayerModel, + + segmentsController = SegmentsController(context).create({ + events: Events, + eventBus, + streamInfo, + timelineConverter, + dashConstants: DashConstants, + segmentBaseController: config.segmentBaseController, + type + }); + + dashHandler = DashHandler(context).create({ + streamInfo, + type, + timelineConverter, + dashMetrics, + mediaPlayerModel, baseURLController: config.baseURLController, - errHandler: errHandler, - settings: settings, - boxParser: boxParser, + errHandler, + segmentsController, + settings, + boxParser, events: Events, - eventBus: eventBus, + eventBus, errors: Errors, debug: Debug(context).getInstance(), requestModifier: RequestModifier(context).getInstance(), @@ -124,56 +138,52 @@ function StreamProcessor(config) { urlUtils: URLUtils(context).getInstance() }); - // Create live edge finder for dynamic streams isDynamic = streamInfo.manifestInfo.isDynamic; - if (isDynamic) { - liveEdgeFinder = LiveEdgeFinder(context).create({ - timelineConverter: timelineConverter - }); - } // Create/initialize controllers - indexHandler.initialize(isDynamic); + dashHandler.initialize(isDynamic); abrController.registerStreamType(type, instance); representationController = RepresentationController(context).create({ - streamInfo: streamInfo, - type: type, - abrController: abrController, - dashMetrics: dashMetrics, - playbackController: playbackController, - timelineConverter: timelineConverter, + streamInfo, + type, + abrController, + dashMetrics, + playbackController, + timelineConverter, dashConstants: DashConstants, events: Events, - eventBus: eventBus, - errors: Errors + eventBus, + errors: Errors, + isDynamic, + segmentsController }); - bufferController = createBufferControllerForType(type); + bufferController = _createBufferControllerForType(type); if (bufferController) { bufferController.initialize(mediaSource); } scheduleController = ScheduleController(context).create({ - streamInfo: streamInfo, - type: type, - mimeType: mimeType, - adapter: adapter, - dashMetrics: dashMetrics, - mediaPlayerModel: mediaPlayerModel, - fragmentModel: fragmentModel, - abrController: abrController, - playbackController: playbackController, - textController: textController, - mediaController: mediaController, - bufferController: bufferController, - settings: settings + streamInfo, + type, + mimeType, + adapter, + dashMetrics, + mediaPlayerModel, + fragmentModel, + abrController, + playbackController, + textController, + mediaController, + bufferController, + settings }); scheduleController.initialize(hasVideoTrack); bufferingTime = 0; - bufferPruned = false; + shouldUseExplicitTimeForRequest = false; } function getStreamId() { @@ -188,11 +198,12 @@ function StreamProcessor(config) { mediaInfoArr = []; mediaInfo = null; bufferingTime = 0; + shouldUseExplicitTimeForRequest = false; } function reset(errored, keepBuffers) { - if (indexHandler) { - indexHandler.reset(); + if (dashHandler) { + dashHandler.reset(); } if (bufferController) { @@ -210,24 +221,27 @@ function StreamProcessor(config) { representationController = null; } - if (liveEdgeFinder) { - liveEdgeFinder.reset(); - liveEdgeFinder = null; + if (segmentsController) { + segmentsController = null; } - if (abrController && !keepBuffers) { - abrController.unRegisterStreamType(type); + if (abrController) { + abrController.unRegisterStreamType(getStreamId(), type); } - eventBus.off(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, instance); - eventBus.off(Events.QUALITY_CHANGE_REQUESTED, onQualityChanged, instance); - eventBus.off(Events.INIT_FRAGMENT_NEEDED, onInitFragmentNeeded, instance); - eventBus.off(Events.MEDIA_FRAGMENT_NEEDED, onMediaFragmentNeeded, instance); - eventBus.off(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, instance); - eventBus.off(Events.BUFFER_LEVEL_UPDATED, onBufferLevelUpdated, instance); - eventBus.off(Events.BUFFER_LEVEL_STATE_CHANGED, onBufferLevelStateChanged, instance); - eventBus.off(Events.BUFFER_CLEARED, onBufferCleared, instance); - eventBus.off(Events.SEEK_TARGET, onSeekTarget, instance); + eventBus.off(Events.DATA_UPDATE_COMPLETED, _onDataUpdateCompleted, instance); + eventBus.off(Events.INIT_FRAGMENT_NEEDED, _onInitFragmentNeeded, instance); + eventBus.off(Events.MEDIA_FRAGMENT_NEEDED, _onMediaFragmentNeeded, instance); + eventBus.off(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, instance); + eventBus.off(Events.BUFFER_LEVEL_STATE_CHANGED, _onBufferLevelStateChanged, instance); + eventBus.off(Events.BUFFER_CLEARED, _onBufferCleared, instance); + eventBus.off(Events.SEEK_TARGET, _onSeekTarget, instance); + eventBus.off(Events.QUALITY_CHANGE_REQUESTED, _onQualityChanged, instance); + eventBus.off(Events.FRAGMENT_LOADING_ABANDONED, _onFragmentLoadingAbandoned, instance); + eventBus.off(Events.FRAGMENT_LOADING_COMPLETED, _onFragmentLoadingCompleted, instance); + eventBus.off(Events.SET_FRAGMENTED_TEXT_AFTER_DISABLED, _onSetFragmentedTextAfterDisabled, instance); + eventBus.off(Events.SET_NON_FRAGMENTED_TEXT, _onSetNonFragmentedText, instance); + eventBus.off(Events.QUOTA_EXCEEDED, _onQuotaExceeded, instance); resetInitialSettings(); type = null; @@ -238,67 +252,355 @@ function StreamProcessor(config) { return representationController ? representationController.isUpdating() : false; } + /** + * When a seek within the corresponding period occurs this function initiates the clearing of the buffer and sets the correct buffering time. + * @param {object} e + * @private + */ + function prepareInnerPeriodPlaybackSeeking(e) { + return new Promise((resolve) => { + // Stop segment requests until we have figured out for which time we need to request a segment. We don't want to replace existing segments. + scheduleController.clearScheduleTimer(); + fragmentModel.abortRequests(); + + // Abort operations to the SourceBuffer Sink and reset the BufferControllers isBufferingCompleted state. + bufferController.prepareForPlaybackSeek() + .then(() => { + // Clear the buffer. We need to prune everything which is not in the target interval. + const clearRanges = bufferController.getAllRangesWithSafetyFactor(e.seekTime); + // When everything has been pruned go on + return bufferController.clearBuffers(clearRanges); + }) + .then(() => { + // Figure out the correct segment request time. + const targetTime = bufferController.getContinuousBufferTimeForTargetTime(e.seekTime); + + // If the buffer is continuous and exceeds the duration of the period we are still done buffering. We need to trigger the buffering completed event in order to start prebuffering upcoming periods again + if (!isNaN(streamInfo.duration) && isFinite(streamInfo.duration) && targetTime >= streamInfo.start + streamInfo.duration) { + bufferController.setIsBufferingCompleted(true); + resolve(); + } else { + setExplicitBufferingTime(targetTime); + bufferController.setSeekTarget(targetTime); + + const promises = []; + + // append window has been reset by abort() operation. Set the correct values again + promises.push(bufferController.updateAppendWindow()); + + // Timestamp offset couldve been changed by preloading period + const representationInfo = getRepresentationInfo(); + promises.push(bufferController.updateBufferTimestampOffset(representationInfo)); + + Promise.all(promises) + .then(() => { + // We might have aborted the append operation of an init segment. Append init segment again. + scheduleController.setInitSegmentRequired(true); + + // Right after a seek we should not immediately check the playback quality + scheduleController.setCheckPlaybackQuality(false); + scheduleController.startScheduleTimer(); + resolve(); + }); + } + }) + .catch((e) => { + logger.error(e); + }); + }); - function onDataUpdateCompleted(e) { - if (!e.error) { - // Update representation if no error - scheduleController.setCurrentRepresentation(adapter.convertDataToRepresentationInfo(e.currentRepresentation)); + } + + /** + * Seek outside of the current period. + * @return {Promise} + */ + function prepareOuterPeriodPlaybackSeeking() { + return new Promise((resolve, reject) => { + try { + // Stop scheduling + scheduleController.clearScheduleTimer(); + + // Abort all ongoing requests + fragmentModel.abortRequests(); + + // buffering not complete anymore and abort current append operation to SourceBuffer + bufferController.prepareForPlaybackSeek() + .then(() => { + // Clear the buffers completely. + return bufferController.pruneAllSafely(); + }) + .then(() => { + resolve(); + }); + + } catch (e) { + reject(e); + } + }); + } + + /** + * ScheduleController indicates that an init segment needs to be fetched. + * @param {object} e + * @private + */ + function _onInitFragmentNeeded(e) { + // Event propagation may have been stopped (see MssHandler) + if (!e.sender) return; + + if (adapter.getIsTextTrack(mimeType) && !textController.isTextEnabled()) return; + + if (bufferController && e.representationId) { + if (!bufferController.appendInitSegmentFromCache(e.representationId)) { + // Init segment not in cache, send new request + const request = dashHandler ? dashHandler.getInitRequest(getMediaInfo(), representationController.getCurrentRepresentation()) : null; + if (request) { + fragmentModel.executeRequest(request); + } else { + _noValidRequest(); + scheduleController.setInitSegmentRequired(true); + } + } } - if (!e.error || e.error.code === Errors.SEGMENTS_UPDATE_FAILED_ERROR_CODE) { - // Update has been postponed, update nevertheless DVR info - const activeStreamId = playbackController.getStreamController().getActiveStreamInfo().id; - if (activeStreamId === streamInfo.id) { - addDVRMetric(); + } + + /** + * ScheduleController indicates that a media segment is needed + * @private + */ + function _onMediaFragmentNeeded() { + let request = null; + + const representation = representationController.getCurrentRepresentation(); + const isMediaFinished = dashHandler.isMediaFinished(representation, bufferingTime); + + // Check if the media is finished. If so, no need to schedule another request + if (isMediaFinished) { + const segmentIndex = dashHandler.getCurrentIndex(); + logger.debug(`Segment requesting for stream ${streamInfo.id} has finished`); + eventBus.trigger(Events.STREAM_REQUESTING_COMPLETED, { segmentIndex }, { + streamId: streamInfo.id, + mediaType: type + }); + scheduleController.clearScheduleTimer(); + return; + } + + // Don't schedule next fragments while pruning to avoid buffer inconsistencies + if (!bufferController.getIsPruningInProgress()) { + request = _getFragmentRequest(); + if (request) { + shouldUseExplicitTimeForRequest = false; + if (!isNaN(request.startTime + request.duration)) { + bufferingTime = request.startTime + request.duration; + } + request.delayLoadingTime = new Date().getTime() + scheduleController.getTimeToLoadDelay(); + scheduleController.setTimeToLoadDelay(0); } } + + if (request) { + logger.debug(`Next fragment request url for stream id ${streamInfo.id} and media type ${type} is ${request.url}`); + fragmentModel.executeRequest(request); + } else { + // Use case - Playing at the bleeding live edge and frag is not available yet. Cycle back around. + _noValidRequest(); + } } - function onQualityChanged(e) { - let representationInfo = getRepresentationInfo(e.newQuality); - scheduleController.setCurrentRepresentation(representationInfo); - dashMetrics.pushPlayListTraceMetrics(new Date(), PlayListTrace.REPRESENTATION_SWITCH_STOP_REASON); - dashMetrics.createPlaylistTraceMetrics(representationInfo.id, playbackController.getTime() * 1000, playbackController.getPlaybackRate()); + /** + * Get the init or media segment request using the DashHandler. + * @return {null|FragmentRequest|null} + * @private + */ + function _getFragmentRequest() { + const representationInfo = getRepresentationInfo(); + let request; + + if (isNaN(bufferingTime) || (getType() === Constants.FRAGMENTED_TEXT && !textController.isTextEnabled())) { + return null; + } + + // Use time just whenever is strictly needed + const useTime = shouldUseExplicitTimeForRequest; + + if (dashHandler) { + const representation = representationController && representationInfo ? representationController.getRepresentationForQuality(representationInfo.quality) : null; + + if (useTime) { + request = dashHandler.getSegmentRequestForTime(getMediaInfo(), representation, bufferingTime); + } else { + request = dashHandler.getNextSegmentRequest(getMediaInfo(), representation); + } + } + + return request; + } + + /** + * Whenever we can not generate a valid request we restart scheduling according to the timeouts defined in the settings. + * @private + */ + function _noValidRequest() { + logger.debug(`No valid request found for ${type}`); + scheduleController.startScheduleTimer(settings.get().streaming.lowLatencyEnabled ? settings.get().streaming.scheduling.lowLatencyTimeout : settings.get().streaming.scheduling.defaultTimeout); } - function onBufferLevelUpdated(e) { - dashMetrics.addBufferLevel(type, new Date(), e.bufferLevel * 1000); - const activeStreamId = playbackController.getStreamController().getActiveStreamInfo().id; - if (!manifestModel.getValue().doNotUpdateDVRWindowOnBufferUpdated && streamInfo.id === activeStreamId) { - addDVRMetric(); + function _onDataUpdateCompleted(e) { + if (!e.error) { + // Update representation if no error + scheduleController.setCurrentRepresentation(adapter.convertDataToRepresentationInfo(e.currentRepresentation)); + if (!bufferController.getIsBufferingCompleted()) { + bufferController.updateBufferTimestampOffset(e.currentRepresentation); + } } } - function onBufferLevelStateChanged(e) { + function _onBufferLevelStateChanged(e) { dashMetrics.addBufferState(type, e.state, scheduleController.getBufferTarget()); if (e.state === MetricsConstants.BUFFER_EMPTY && !playbackController.isSeeking()) { - // logger.info('Buffer is empty! Stalling!'); + logger.info('Buffer is empty! Stalling!'); dashMetrics.pushPlayListTraceMetrics(new Date(), PlayListTrace.REBUFFERING_REASON); } } - function onBufferCleared(e) { + function _onBufferCleared(e) { // Remove executed requests not buffered anymore fragmentModel.syncExecutedRequestsWithBufferedRange( bufferController.getBuffer().getAllBufferRanges(), streamInfo.duration); // If buffer removed ahead current time (QuotaExceededError or automatic buffer pruning) then adjust current index handler time - if (e.from > playbackController.getTime()) { - bufferingTime = e.from; - bufferPruned = true; + if (e.quotaExceeded && e.from > playbackController.getTime()) { + setExplicitBufferingTime(e.from); + } + + // (Re)start schedule once buffer has been pruned after a QuotaExceededError + if (e.hasEnoughSpaceToAppend && e.quotaExceeded) { + scheduleController.startScheduleTimer(); } + + } + + /** + * The quality has changed which means we have switched to a different representation. + * If we want to aggressively replace existing parts in the buffer we need to make sure that the new quality is higher than the already buffered one. + * @param {object} e + * @private + */ + function _onQualityChanged(e) { + const representationInfo = getRepresentationInfo(e.newQuality); + scheduleController.setCurrentRepresentation(representationInfo); + + // if we switch up in quality and need to replace existing parts in the buffer we need to adjust the buffer target + if (settings.get().streaming.buffer.fastSwitchEnabled) { + const time = playbackController.getTime(); + let safeBufferLevel = 1.5; + const request = fragmentModel.getRequests({ + state: FragmentModel.FRAGMENT_MODEL_EXECUTED, + time: time + safeBufferLevel, + threshold: 0 + })[0]; + + if (request && !adapter.getIsTextTrack(mimeType)) { + const bufferLevel = bufferController.getBufferLevel(); + const abandonmentState = abrController.getAbandonmentStateFor(streamInfo.id, type); + + if (request.quality < representationInfo.quality && bufferLevel >= safeBufferLevel && abandonmentState !== MetricsConstants.ABANDON_LOAD) { + setExplicitBufferingTime(time + safeBufferLevel); + scheduleController.setCheckPlaybackQuality(false); + } + } + } + + dashMetrics.pushPlayListTraceMetrics(new Date(), PlayListTrace.REPRESENTATION_SWITCH_STOP_REASON); + dashMetrics.createPlaylistTraceMetrics(representationInfo.id, playbackController.getTime() * 1000, playbackController.getPlaybackRate()); + } + + /** + * We have canceled the download of a fragment and need to adjust the buffer time or reload an init segment + * @param {object} e + */ + function _onFragmentLoadingAbandoned(e) { + logger.info('onFragmentLoadingAbandoned request: ' + e.request.url + ' has been aborted'); + + // we only need to handle this if we are not seeking or switching the tracks + if (!playbackController.isSeeking() && !scheduleController.getSwitchStrack()) { + logger.info('onFragmentLoadingAbandoned request: ' + e.request.url + ' has to be downloaded again, origin is not seeking process or switch track call'); + + // in case of an init segment we force the download of an init segment + if (e.request && e.request.isInitializationRequest()) { + scheduleController.setInitSegmentRequired(true); + } + + // in case of a media segment we reset the buffering time + else { + setExplicitBufferingTime(e.request.startTime + (e.request.duration / 2)); + } + + // In case of a seek the schedule controller was stopped and will be started once the buffer has been pruned. + scheduleController.startScheduleTimer(0); + } + } + + /** + * When a fragment has been loaded we need to start the schedule timer again in case of an error. + * @param {object} e + */ + function _onFragmentLoadingCompleted(e) { + logger.info('OnFragmentLoadingCompleted for stream id ' + streamInfo.id + ' and media type ' + type + ' - Url:', e.request ? e.request.url : 'undefined', e.request.range ? ', Range:' + e.request.range : ''); + + if (adapter.getIsTextTrack(mimeType)) { + scheduleController.startScheduleTimer(0); + } + + if (e.error && e.request.serviceLocation) { + setExplicitBufferingTime(e.request.startTime + (e.request.duration / 2)); + scheduleController.startScheduleTimer(0); + } + } + + /** + * Callback function triggered by the TextController whenever a track is changed for fragmented text. Will only be triggered if textracks have previously been disabled. + * @private + */ + function _onSetFragmentedTextAfterDisabled() { + setExplicitBufferingTime(playbackController.getTime()); + getScheduleController().startScheduleTimer(); } - function onQuotaExceeded(e) { - bufferingTime = e.quotaExceededTime; - bufferPruned = true; + /** + * Callback function triggered by the TextController whenever a track is changed for non fragmented text + * @param {object} e + * @private + */ + function _onSetNonFragmentedText(e) { + const currentTrackInfo = e.currentTrackInfo; + + if (!currentTrackInfo) { + return; + } + + const mInfo = mediaInfoArr.find((info) => { + return info.index === currentTrackInfo.index && info.lang === currentTrackInfo.lang; + }); + + if (mInfo) { + selectMediaInfo(mInfo) + .then(() => { + bufferController.setIsBufferingCompleted(false); + setExplicitBufferingTime(playbackController.getTime()); + scheduleController.setInitSegmentRequired(true); + scheduleController.startScheduleTimer(); + }); + } } - function addDVRMetric() { - const manifestInfo = streamInfo.manifestInfo; - const isDynamic = manifestInfo.isDynamic; - const range = timelineConverter.calcSegmentAvailabilityRange(representationController.getCurrentRepresentation(), isDynamic); - dashMetrics.addDVRInfo(getType(), playbackController.getTime(), manifestInfo, range); + function _onQuotaExceeded(e) { + // Stop scheduler (will be restarted once buffer is pruned) + setExplicitBufferingTime(e.quotaExceededTime); + scheduleController.clearScheduleTimer(); } function getRepresentationController() { @@ -309,10 +611,6 @@ function StreamProcessor(config) { return bufferController ? bufferController.getBuffer() : null; } - function setBuffer(buffer) { - bufferController.setBuffer(buffer); - } - function getBufferController() { return bufferController; } @@ -323,7 +621,7 @@ function StreamProcessor(config) { function updateStreamInfo(newStreamInfo) { streamInfo = newStreamInfo; - if (settings.get().streaming.useAppendWindow) { + if (!isBufferingCompleted()) { bufferController.updateAppendWindow(); } } @@ -332,6 +630,10 @@ function StreamProcessor(config) { return streamInfo; } + /** + * Called once the StreamProcessor is initialized and when the track is switched. We only have one StreamProcessor per media type. So we need to adjust the mediaInfo once we switch/select a track. + * @param {object} newMediaInfo + */ function selectMediaInfo(newMediaInfo) { if (newMediaInfo !== mediaInfo && (!newMediaInfo || !mediaInfo || (newMediaInfo.type === mediaInfo.type))) { mediaInfo = newMediaInfo; @@ -343,18 +645,18 @@ function StreamProcessor(config) { if (representationController) { const realAdaptation = representationController.getData(); const maxQuality = abrController.getTopQualityIndexFor(type, streamInfo.id); - const minIdx = abrController.getMinAllowedIndexFor(type); + const minIdx = abrController.getMinAllowedIndexFor(type, streamInfo.id); let quality, averageThroughput; let bitrate = null; - if ((realAdaptation === null || (realAdaptation.id != newRealAdaptation.id)) && type !== Constants.FRAGMENTED_TEXT) { + if ((realAdaptation === null || (realAdaptation.id !== newRealAdaptation.id)) && type !== Constants.FRAGMENTED_TEXT) { averageThroughput = abrController.getThroughputHistory().getAverageThroughput(type); bitrate = averageThroughput || abrController.getInitialBitrateFor(type); - quality = abrController.getQualityForBitrate(mediaInfo, bitrate); + quality = abrController.getQualityForBitrate(mediaInfo, bitrate, streamInfo.id); } else { - quality = abrController.getQualityFor(type); + quality = abrController.getQualityFor(type, streamInfo.id); } if (minIdx !== undefined && quality < minIdx) { @@ -363,8 +665,9 @@ function StreamProcessor(config) { if (quality > maxQuality) { quality = maxQuality; } - indexHandler.setMimeType(mediaInfo ? mediaInfo.mimeType : null); - representationController.updateData(newRealAdaptation, voRepresentations, type, quality); + return representationController.updateData(newRealAdaptation, voRepresentations, type, quality); + } else { + return Promise.resolve(); } } @@ -374,7 +677,7 @@ function StreamProcessor(config) { } if (selectNewMediaInfo) { - this.selectMediaInfo(newMediaInfo); + selectMediaInfo(newMediaInfo); } } @@ -391,11 +694,7 @@ function StreamProcessor(config) { } function setMediaSource(mediaSource) { - bufferController.setMediaSource(mediaSource, getMediaInfoArr()); - } - - function dischargePreBuffer() { - bufferController.dischargePreBuffer(); + bufferController.setMediaSource(mediaSource); } function getScheduleController() { @@ -428,42 +727,6 @@ function StreamProcessor(config) { return bufferController ? bufferController.getBufferLevel() : 0; } - function onInitFragmentNeeded(e) { - // Event propagation may have been stopped (see MssHandler) - if (!e.sender) return; - - if (adapter.getIsTextTrack(mimeType) && !textController.isTextEnabled()) return; - - if (bufferController && e.representationId) { - if (!bufferController.appendInitSegment(e.representationId)) { - // Init segment not in cache, send new request - const request = indexHandler ? indexHandler.getInitRequest(getMediaInfo(), representationController.getCurrentRepresentation()) : null; - scheduleController.processInitRequest(request); - } - } - } - - function onMediaFragmentNeeded(e) { - let request; - - // Don't schedule next fragments while pruning to avoid buffer inconsistencies - if (!bufferController.getIsPruningInProgress()) { - request = findNextRequest(e.seekTarget, e.replacement); - if (request) { - scheduleController.setSeekTarget(NaN); - if (!e.replacement) { - if (!isNaN(request.startTime + request.duration)) { - bufferingTime = request.startTime + request.duration; - } - request.delayLoadingTime = new Date().getTime() + scheduleController.getTimeToLoadDelay(); - scheduleController.setTimeToLoadDelay(0); - } - } - } - - scheduleController.processMediaRequest(request); - } - /** * Probe the next request. This is used in the CMCD model to get information about the upcoming request. Note: No actual request is performed here. * @return {FragmentRequest|null} @@ -474,7 +737,7 @@ function StreamProcessor(config) { const representation = representationController && representationInfo ? representationController.getRepresentationForQuality(representationInfo.quality) : null; - let request = indexHandler.getNextSegmentRequestIdempotent( + let request = dashHandler.getNextSegmentRequestIdempotent( getMediaInfo(), representation ); @@ -482,82 +745,33 @@ function StreamProcessor(config) { return request; } - function findNextRequest(seekTarget, requestToReplace) { - const representationInfo = getRepresentationInfo(); - const hasSeekTarget = !isNaN(seekTarget); - const currentTime = playbackController.getNormalizedTime(); - let time = hasSeekTarget ? seekTarget : bufferingTime; - let bufferIsDivided = false; - let request; - - if (isNaN(time) || (getType() === Constants.FRAGMENTED_TEXT && !textController.isTextEnabled())) { - return null; - } - /** - * This is critical for IE/Safari/EDGE - * */ - if (bufferController) { - let range = bufferController.getRangeAt(time); - const playingRange = bufferController.getRangeAt(currentTime); - if ((range !== null || playingRange !== null) && !hasSeekTarget) { - if (!range || (playingRange && playingRange.start != range.start && playingRange.end != range.end)) { - const hasDiscontinuities = bufferController.getBuffer().hasDiscontinuitiesAfter(currentTime); - if (hasDiscontinuities && getType() !== Constants.FRAGMENTED_TEXT) { - fragmentModel.removeExecutedRequestsAfterTime(playingRange.end); - bufferIsDivided = true; - } - } - } - } - - if (requestToReplace) { - time = requestToReplace.startTime + (requestToReplace.duration / 2); - request = getFragmentRequest(representationInfo, time, { - timeThreshold: 0, - ignoreIsFinished: true - }); - } else { - // Use time just whenever is strictly needed - const useTime = hasSeekTarget || bufferPruned || bufferIsDivided; - request = getFragmentRequest(representationInfo, - useTime ? time : undefined, { - keepIdx: !useTime - }); - bufferPruned = false; - - // Then, check if this request was downloaded or not - while (request && request.action !== FragmentRequest.ACTION_COMPLETE && fragmentModel.isFragmentLoaded(request)) { - // loop until we found not loaded fragment, or no fragment - request = getFragmentRequest(representationInfo); - } - } - - return request; - } - - function onMediaFragmentLoaded(e) { + function _onMediaFragmentLoaded(e) { const chunk = e.chunk; const bytes = chunk.bytes; const quality = chunk.quality; const currentRepresentation = getRepresentationInfo(quality); - const voRepresentation = representationController && currentRepresentation ? representationController.getRepresentationForQuality(currentRepresentation.quality) : null; - const eventStreamMedia = adapter.getEventsFor(currentRepresentation.mediaInfo); - const eventStreamTrack = adapter.getEventsFor(currentRepresentation, voRepresentation); - if (eventStreamMedia && eventStreamMedia.length > 0 || eventStreamTrack && eventStreamTrack.length > 0) { - const request = fragmentModel.getRequests({ - state: FragmentModel.FRAGMENT_MODEL_EXECUTED, - quality: quality, - index: chunk.index - })[0]; - - const events = handleInbandEvents(bytes, request, eventStreamMedia, eventStreamTrack); - eventBus.trigger(Events.INBAND_EVENTS, - { events: events }, - { streamId: streamInfo.id } - ); + // If we switch tracks this event might be fired after the representations in the RepresentationController have been updated according to the new MediaInfo. + // In this case there will be no currentRepresentation and voRepresentation matching the "old" quality + if (currentRepresentation && voRepresentation) { + const eventStreamMedia = adapter.getEventsFor(currentRepresentation.mediaInfo); + const eventStreamTrack = adapter.getEventsFor(currentRepresentation, voRepresentation); + + if (eventStreamMedia && eventStreamMedia.length > 0 || eventStreamTrack && eventStreamTrack.length > 0) { + const request = fragmentModel.getRequests({ + state: FragmentModel.FRAGMENT_MODEL_EXECUTED, + quality: quality, + index: chunk.index + })[0]; + + const events = handleInbandEvents(bytes, request, eventStreamMedia, eventStreamTrack); + eventBus.trigger(Events.INBAND_EVENTS, + { events: events }, + { streamId: streamInfo.id } + ); + } } } @@ -598,160 +812,137 @@ function StreamProcessor(config) { } } - function createBuffer(previousBuffers) { - return (getBuffer() || bufferController ? bufferController.createBuffer(mediaInfoArr, previousBuffers) : null); - } - - function switchTrackAsked() { - scheduleController.switchTrackAsked(); + function createBufferSinks(previousBuffers) { + return (getBuffer() || bufferController ? bufferController.createBufferSink(mediaInfo, previousBuffers) : Promise.resolve(null)); } - function createBufferControllerForType(type) { - let controller = null; + function prepareTrackSwitch() { + logger.debug(`Preparing track switch for type ${type}`); + const shouldReplace = type === Constants.FRAGMENTED_TEXT || (settings.get().streaming.trackSwitchMode[type] === Constants.TRACK_SWITCH_MODE_ALWAYS_REPLACE && playbackController.getTimeToStreamEnd(streamInfo) > settings.get().streaming.buffer.stallThreshold); - if (!type) { - errHandler.error(new DashJSError(Errors.MEDIASOURCE_TYPE_UNSUPPORTED_CODE, Errors.MEDIASOURCE_TYPE_UNSUPPORTED_MESSAGE + 'not properly defined')); - return null; + // when buffering is completed and we are not supposed to replace anything do nothing. Still we need to trigger preloading again + if (bufferController.getIsBufferingCompleted() && !shouldReplace) { + eventBus.trigger(Events.BUFFERING_COMPLETED, {}, { streamId: streamInfo.id, mediaType: type }) + return; } - if (type === Constants.VIDEO || type === Constants.AUDIO) { - controller = BufferController(context).create({ - streamInfo: streamInfo, - type: type, - mediaPlayerModel: mediaPlayerModel, - manifestModel: manifestModel, - fragmentModel: fragmentModel, - errHandler: errHandler, - mediaController: mediaController, - representationController: representationController, - adapter: adapter, - textController: textController, - abrController: abrController, - playbackController: playbackController, - settings: settings - }); - } else { - controller = TextBufferController(context).create({ - streamInfo: streamInfo, - type: type, - mimeType: mimeType, - mediaPlayerModel: mediaPlayerModel, - manifestModel: manifestModel, - fragmentModel: fragmentModel, - errHandler: errHandler, - mediaController: mediaController, - representationController: representationController, - adapter: adapter, - textController: textController, - abrController: abrController, - playbackController: playbackController, - settings: settings - }); - } + // We stop the schedule controller and signal a track switch. That way we request a new init segment next + scheduleController.clearScheduleTimer(); + scheduleController.setSwitchTrack(true); - return controller; - } - - - function getLiveStartTime() { - if (!isDynamic) return NaN; - if (!liveEdgeFinder) return NaN; + // when we are supposed to replace it does not matter if buffering is already completed + if (shouldReplace) { + // Inform other classes like the GapController that we are replacing existing stuff + eventBus.trigger(Events.TRACK_REPLACEMENT_STARTED, { + mediaType: type, + streamId: streamInfo.id + }, { mediaType: type, streamId: streamInfo.id }); - let liveStartTime = NaN; - const currentRepresentationInfo = getRepresentationInfo(); - const liveEdge = liveEdgeFinder.getLiveEdge(currentRepresentationInfo); + // Abort the current request it will be removed from the buffer anyways + fragmentModel.abortRequests(); - if (isNaN(liveEdge)) { - return NaN; + // Abort appending segments to the buffer. Also adjust the appendWindow as we might have been in the progress of prebuffering stuff. + bufferController.prepareForReplacementTrackSwitch(mediaInfo.codec) + .then(() => { + // Timestamp offset couldve been changed by preloading period + const representationInfo = getRepresentationInfo(); + return bufferController.updateBufferTimestampOffset(representationInfo); + }) + .then(() => { + _bufferClearedForReplacementTrackSwitch(); + }) + .catch(() => { + _bufferClearedForReplacementTrackSwitch(); + }); + } else { + // We do not replace anything that is already in the buffer. Still we need to prepare the buffer for the track switch + bufferController.prepareForNonReplacementTrackSwitch(mediaInfo.codec) + .then(() => { + _bufferClearedForNonReplacementTrackSwitch(); + }) + .catch + (() => { + _bufferClearedForNonReplacementTrackSwitch(); + }); } + } - const request = findRequestForLiveEdge(liveEdge, currentRepresentationInfo); + /** + * For an instant track switch we need to adjust the buffering time after the buffer has been pruned. + * @private + */ + function _bufferClearedForReplacementTrackSwitch() { + const targetTime = playbackController.getTime(); - if (request) { - // When low latency mode is selected but browser doesn't support fetch - // start at the beginning of the segment to avoid consuming the whole buffer - if (settings.get().streaming.lowLatencyEnabled) { - liveStartTime = request.duration < mediaPlayerModel.getLiveDelay() ? request.startTime : request.startTime + request.duration - mediaPlayerModel.getLiveDelay(); - } else { - liveStartTime = request.startTime; - } + if (settings.get().streaming.buffer.flushBufferAtTrackSwitch) { + // For some devices (like chromecast) it is necessary to seek the video element to reset the internal decoding buffer, + // otherwise audio track switch will be effective only once after previous buffered track is consumed + playbackController.seek(targetTime + 0.001, false, true); } - return liveStartTime; + setExplicitBufferingTime(targetTime); + bufferController.setSeekTarget(targetTime); + scheduleController.startScheduleTimer(); } - function findRequestForLiveEdge(liveEdge, currentRepresentationInfo) { - try { - let request = null; - let liveDelay = playbackController.getLiveDelay(); - const dvrWindowSize = !isNaN(streamInfo.manifestInfo.DVRWindowSize) ? streamInfo.manifestInfo.DVRWindowSize : liveDelay; - const dvrWindowSafetyMargin = 0.1 * dvrWindowSize; - let startTime; - - // Make sure that we have at least a valid request for the end of the DVR window, otherwise we might try forever - if (!isFinite(dvrWindowSize) || getFragmentRequest(currentRepresentationInfo, liveEdge - dvrWindowSize + dvrWindowSafetyMargin, { - ignoreIsFinished: true - })) { - - // Try to find a request as close as possible to the targeted live edge - while (!request && liveDelay <= dvrWindowSize) { - startTime = liveEdge - liveDelay; - request = getFragmentRequest(currentRepresentationInfo, startTime, { - ignoreIsFinished: true - }); - if (!request) { - liveDelay += 1; // Increase by one second for each iteration - } - } - } + function _bufferClearedForNonReplacementTrackSwitch() { + const time = playbackController.getTime(); + const targetTime = bufferController.getContinuousBufferTimeForTargetTime(time); - if (request) { - playbackController.setLiveDelay(liveDelay, true); - } - logger.debug('live edge: ' + liveEdge + ', live delay: ' + liveDelay + ', live target: ' + startTime); - return request; - } catch (e) { - return null; - } + setExplicitBufferingTime(targetTime); + scheduleController.startScheduleTimer(); } - function onSeekTarget(e) { - bufferingTime = e.time; - scheduleController.setSeekTarget(e.time); - } - function setBufferingTime(value) { - bufferingTime = value; - } + function _createBufferControllerForType(type) { + let controller = null; + + if (!type) { + errHandler.error(new DashJSError(Errors.MEDIASOURCE_TYPE_UNSUPPORTED_CODE, Errors.MEDIASOURCE_TYPE_UNSUPPORTED_MESSAGE + 'not properly defined')); + return null; + } - function resetIndexHandler() { - if (indexHandler) { - indexHandler.resetIndex(); + if (type === Constants.TEXT) { + controller = NotFragmentedTextBufferController(context).create({ + streamInfo, + type, + mimeType, + fragmentModel, + textController, + errHandler, + settings + }); + } else { + controller = BufferController(context).create({ + streamInfo, + type, + mediaPlayerModel, + manifestModel, + fragmentModel, + errHandler, + mediaController, + representationController, + adapter, + textController, + abrController, + playbackController, + settings + }); } - } - function getInitRequest(quality) { - checkInteger(quality); - const representation = representationController ? representationController.getRepresentationForQuality(quality) : null; - return indexHandler ? indexHandler.getInitRequest(getMediaInfo(), representation) : null; + return controller; } - function getFragmentRequest(representationInfo, time, options) { - let fragRequest = null; - - if (indexHandler) { - const representation = representationController && representationInfo ? representationController.getRepresentationForQuality(representationInfo.quality) : null; - - // if time and options are undefined, it means the next segment is requested - // otherwise, the segment at this specific time is requested. - if (time !== undefined && options !== undefined) { - fragRequest = indexHandler.getSegmentRequestForTime(getMediaInfo(), representation, time, options); - } else { - fragRequest = indexHandler.getNextSegmentRequest(getMediaInfo(), representation); - } + function _onSeekTarget(e) { + if (e && e.time) { + setExplicitBufferingTime(e.time); + bufferController.setSeekTarget(e.time); } + } - return fragRequest; + function setExplicitBufferingTime(value) { + bufferingTime = value; + shouldUseExplicitTimeForRequest = true; } function finalisePlayList(time, reason) { @@ -759,38 +950,34 @@ function StreamProcessor(config) { } instance = { - initialize: initialize, - getStreamId: getStreamId, - getType: getType, - isUpdating: isUpdating, - getBufferController: getBufferController, - getFragmentModel: getFragmentModel, - getScheduleController: getScheduleController, - getRepresentationController: getRepresentationController, - getRepresentationInfo: getRepresentationInfo, - getBufferLevel: getBufferLevel, - isBufferingCompleted: isBufferingCompleted, - createBuffer: createBuffer, - updateStreamInfo: updateStreamInfo, - getStreamInfo: getStreamInfo, - selectMediaInfo: selectMediaInfo, - addMediaInfo: addMediaInfo, - getLiveStartTime: getLiveStartTime, - switchTrackAsked: switchTrackAsked, - getMediaInfoArr: getMediaInfoArr, - getMediaInfo: getMediaInfo, - getMediaSource: getMediaSource, - setMediaSource: setMediaSource, - dischargePreBuffer: dischargePreBuffer, - getBuffer: getBuffer, - setBuffer: setBuffer, - setBufferingTime: setBufferingTime, - resetIndexHandler: resetIndexHandler, - getInitRequest: getInitRequest, - getFragmentRequest: getFragmentRequest, - finalisePlayList: finalisePlayList, - probeNextRequest: probeNextRequest, - reset: reset + initialize, + getStreamId, + getType, + isUpdating, + getBufferController, + getFragmentModel, + getScheduleController, + getRepresentationController, + getRepresentationInfo, + getBufferLevel, + isBufferingCompleted, + createBufferSinks, + updateStreamInfo, + getStreamInfo, + selectMediaInfo, + addMediaInfo, + prepareTrackSwitch, + getMediaInfoArr, + getMediaInfo, + getMediaSource, + setMediaSource, + getBuffer, + setExplicitBufferingTime, + finalisePlayList, + probeNextRequest, + prepareInnerPeriodPlaybackSeeking, + prepareOuterPeriodPlaybackSeeking, + reset }; setup(); diff --git a/src/streaming/constants/Constants.js b/src/streaming/constants/Constants.js index 3d346ce91d..89d81a0910 100644 --- a/src/streaming/constants/Constants.js +++ b/src/streaming/constants/Constants.js @@ -285,6 +285,13 @@ class Constants { this.ARRAY_BUFFER = 'ArrayBuffer'; this.DVB_REPORTING_URL = 'dvb:reportingUrl'; this.DVB_PROBABILITY = 'dvb:probability'; + this.VIDEO_ELEMENT_READY_STATES = { + HAVE_NOTHING: 0, + HAVE_METADATA: 1, + HAVE_CURRENT_DATA: 2, + HAVE_FUTURE_DATA: 3, + HAVE_ENOUGH_DATA: 4 + }; } constructor() { diff --git a/src/streaming/controllers/AbrController.js b/src/streaming/controllers/AbrController.js index 71ab8a9f6d..c160f5472d 100644 --- a/src/streaming/controllers/AbrController.js +++ b/src/streaming/controllers/AbrController.js @@ -43,8 +43,8 @@ import SwitchRequestHistory from '../rules/SwitchRequestHistory'; import DroppedFramesHistory from '../rules/DroppedFramesHistory'; import ThroughputHistory from '../rules/ThroughputHistory'; import Debug from '../../core/Debug'; -import { HTTPRequest } from '../vo/metrics/HTTPRequest'; -import { checkInteger } from '../utils/SupervisorTools'; +import {HTTPRequest} from '../vo/metrics/HTTPRequest'; +import {checkInteger} from '../utils/SupervisorTools'; const DEFAULT_VIDEO_BITRATE = 1000; const DEFAULT_AUDIO_BITRATE = 100; @@ -87,33 +87,15 @@ function AbrController() { resetInitialSettings(); } - function registerStreamType(type, streamProcessor) { - switchHistoryDict[type] = switchHistoryDict[type] || SwitchRequestHistory(context).create(); - streamProcessorDict[type] = streamProcessor; - abandonmentStateDict[type] = abandonmentStateDict[type] || {}; - abandonmentStateDict[type].state = MetricsConstants.ALLOW_LOAD; - isUsingBufferOccupancyABRDict[type] = false; - isUsingL2AABRDict[type] = false; - isUsingLoLPBRDict[type] = false; - eventBus.on(Events.LOADING_PROGRESS, onFragmentLoadProgress, instance); - if (type === Constants.VIDEO) { - eventBus.on(Events.QUALITY_CHANGE_RENDERED, onQualityChangeRendered, instance); - droppedFramesHistory = droppedFramesHistory || DroppedFramesHistory(context).create(); - setElementSize(); - } - eventBus.on(Events.METRIC_ADDED, onMetricAdded, instance); - eventBus.on(Events.PERIOD_SWITCH_COMPLETED, createAbrRulesCollection, instance); - - throughputHistory = throughputHistory || ThroughputHistory(context).create({ + /** + * Initialize everything that is not Stream specific. We only have one instance of the ABR Controller for all periods. + */ + function initialize() { + droppedFramesHistory = DroppedFramesHistory(context).create(); + throughputHistory = ThroughputHistory(context).create({ settings: settings }); - } - - function unRegisterStreamType(type) { - delete streamProcessorDict[type]; - } - function createAbrRulesCollection() { abrRulesCollection = ABRRulesCollection(context).create({ dashMetrics: dashMetrics, mediaPlayerModel: mediaPlayerModel, @@ -121,6 +103,65 @@ function AbrController() { }); abrRulesCollection.initialize(); + + eventBus.on(Events.QUALITY_CHANGE_RENDERED, _onQualityChangeRendered, instance); + eventBus.on(Events.LOADING_PROGRESS, onFragmentLoadProgress, instance); + eventBus.on(Events.METRIC_ADDED, _onMetricAdded, instance); + } + + /** + * Whenever a StreamProcessor is created it is added to the list of streamProcessorDict + * In addition, the corresponding objects for this object and its stream id are created + * @param {object} type + * @param {object} streamProcessor + */ + function registerStreamType(type, streamProcessor) { + const streamId = streamProcessor.getStreamInfo().id; + + if (!streamProcessorDict[streamId]) { + streamProcessorDict[streamId] = {}; + } + + if (!switchHistoryDict[streamId]) { + switchHistoryDict[streamId] = {}; + } + + if (!abandonmentStateDict[streamId]) { + abandonmentStateDict[streamId] = {}; + } + + switchHistoryDict[streamId][type] = SwitchRequestHistory(context).create(); + streamProcessorDict[streamId][type] = streamProcessor; + + abandonmentStateDict[streamId][type] = {}; + abandonmentStateDict[streamId][type].state = MetricsConstants.ALLOW_LOAD; + + isUsingBufferOccupancyABRDict[type] = isUsingBufferOccupancyABRDict[type] || false; + isUsingL2AABRDict[type] = isUsingL2AABRDict[type] || false; + isUsingLoLPBRDict[type] = isUsingLoLPBRDict[type] || false; + + if (type === Constants.VIDEO) { + setElementSize(); + } + } + + function unRegisterStreamType(streamId, type) { + try { + if (streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { + delete streamProcessorDict[streamId][type]; + } + + if (switchHistoryDict[streamId] && switchHistoryDict[streamId][type]) { + delete switchHistoryDict[streamId][type]; + } + + if (abandonmentStateDict[streamId] && abandonmentStateDict[streamId][type]) { + delete abandonmentStateDict[streamId][type]; + } + + } catch (e) { + + } } function resetInitialSettings() { @@ -132,9 +173,14 @@ function AbrController() { isUsingBufferOccupancyABRDict = {}; isUsingL2AABRDict = {}; isUsingLoLPBRDict = {}; + if (windowResizeEventCalled === undefined) { windowResizeEventCalled = false; } + if (droppedFramesHistory) { + droppedFramesHistory.reset(); + } + playbackIndex = undefined; droppedFramesHistory = undefined; throughputHistory = undefined; @@ -147,9 +193,8 @@ function AbrController() { resetInitialSettings(); eventBus.off(Events.LOADING_PROGRESS, onFragmentLoadProgress, instance); - eventBus.off(Events.QUALITY_CHANGE_RENDERED, onQualityChangeRendered, instance); - eventBus.off(Events.METRIC_ADDED, onMetricAdded, instance); - eventBus.off(Events.PERIOD_SWITCH_COMPLETED, createAbrRulesCollection, instance); + eventBus.off(Events.QUALITY_CHANGE_RENDERED, _onQualityChangeRendered, instance); + eventBus.off(Events.METRIC_ADDED, _onMetricAdded, instance); if (abrRulesCollection) { abrRulesCollection.reset(); @@ -188,16 +233,16 @@ function AbrController() { } } - function onQualityChangeRendered(e) { + function _onQualityChangeRendered(e) { if (e.mediaType === Constants.VIDEO) { if (playbackIndex !== undefined) { - droppedFramesHistory.push(playbackIndex, videoModel.getPlaybackQuality()); + droppedFramesHistory.push(e.streamId, playbackIndex, videoModel.getPlaybackQuality()); } playbackIndex = e.newQuality; } } - function onMetricAdded(e) { + function _onMetricAdded(e) { if (e.metric === MetricsConstants.HTTP_REQUEST && e.value && e.value.type === HTTPRequest.MEDIA_SEGMENT_TYPE && (e.mediaType === Constants.AUDIO || e.mediaType === Constants.VIDEO)) { throughputHistory.push(e.mediaType, e.value, settings.get().streaming.abr.useDeadTimeLatency); } @@ -207,33 +252,40 @@ function AbrController() { } } - function getTopQualityIndexFor(type, id) { + /** + * Returns the highest possible index taking limitations like maxBitrate and portal size into account. + * @param {string} type + * @param {string} streamId + * @return {number} + */ + function getTopQualityIndexFor(type, streamId) { let idx; - topQualities[id] = topQualities[id] || {}; + topQualities[streamId] = topQualities[streamId] || {}; - if (!topQualities[id].hasOwnProperty(type)) { - topQualities[id][type] = 0; + if (!topQualities[streamId].hasOwnProperty(type)) { + topQualities[streamId][type] = 0; } - idx = checkMaxBitrate(topQualities[id][type], type); - idx = checkMaxRepresentationRatio(idx, type, topQualities[id][type]); - idx = checkPortalSize(idx, type); + idx = checkMaxBitrate(topQualities[streamId][type], type, streamId); + idx = checkMaxRepresentationRatio(idx, type, topQualities[streamId][type]); + idx = checkPortalSize(idx, type, streamId); return idx; } /** * Gets top BitrateInfo for the player * @param {string} type - 'video' or 'audio' are the type options. + * @param {string} streamId - Id of the stream * @returns {BitrateInfo | null} */ - function getTopBitrateInfoFor(type) { - if (type && streamProcessorDict && streamProcessorDict[type]) { - const streamInfo = streamProcessorDict[type].getStreamInfo(); - if (streamInfo && streamInfo.id) { - const idx = getTopQualityIndexFor(type, streamInfo.id); - const bitrates = getBitrateList(streamProcessorDict[type].getMediaInfo()); - return bitrates[idx] ? bitrates[idx] : null; - } + function getTopBitrateInfoFor(type, streamId = null) { + if (!streamId) { + streamId = streamController.getActiveStreamInfo().id; + } + if (type && streamProcessorDict && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { + const idx = getTopQualityIndexFor(type, streamId); + const bitrates = getBitrateList(streamProcessorDict[streamId][type].getMediaInfo()); + return bitrates[idx] ? bitrates[idx] : null; } return null; } @@ -245,9 +297,11 @@ function AbrController() { */ function getInitialBitrateFor(type) { checkConfig(); + if (type === Constants.TEXT || type === Constants.FRAGMENTED_TEXT) { return NaN; } + const savedBitrate = domStorage.getSavedBitrateSettings(type); let configBitrate = settings.get().streaming.abr.initialBitrate[type]; let configRatio = settings.get().streaming.abr.initialRepresentationRatio[type]; @@ -279,23 +333,23 @@ function AbrController() { return settings.get().streaming.abr.minBitrate[type]; } - function getMaxAllowedIndexFor(type) { + function getMaxAllowedIndexFor(type, streamId) { const maxBitrate = getMaxAllowedBitrateFor(type); if (maxBitrate > -1) { - return getQualityForBitrate(streamProcessorDict[type].getMediaInfo(), maxBitrate); + return getQualityForBitrate(streamProcessorDict[streamId][type].getMediaInfo(), maxBitrate, streamId); } else { return undefined; } } - function getMinAllowedIndexFor(type) { + function getMinAllowedIndexFor(type, streamId) { const minBitrate = getMinAllowedBitrateFor(type); if (minBitrate > -1) { - const mediaInfo = streamProcessorDict[type].getMediaInfo(); + const mediaInfo = streamProcessorDict[streamId][type].getMediaInfo(); const bitrateList = getBitrateList(mediaInfo); // This returns the quality index <= for the given bitrate - let minIdx = getQualityForBitrate(mediaInfo, minBitrate); + let minIdx = getQualityForBitrate(mediaInfo, minBitrate, streamId); if (bitrateList[minIdx] && minIdx < bitrateList.length - 1 && bitrateList[minIdx].bitrate < minBitrate * 1000) { minIdx++; // Go to the next bitrate } @@ -305,16 +359,14 @@ function AbrController() { } } - function checkPlaybackQuality(type) { - if (type && streamProcessorDict && streamProcessorDict[type]) { - const streamInfo = streamProcessorDict[type].getStreamInfo(); - const streamId = streamInfo ? streamInfo.id : null; - const oldQuality = getQualityFor(type); + function checkPlaybackQuality(type, streamId) { + if (type && streamProcessorDict && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { + const oldQuality = getQualityFor(type, streamId); const rulesContext = RulesContext(context).create({ abrController: instance, - streamProcessor: streamProcessorDict[type], + streamProcessor: streamProcessorDict[streamId][type], currentValue: oldQuality, - switchHistory: switchHistoryDict[type], + switchHistory: switchHistoryDict[streamId][type], droppedFramesHistory: droppedFramesHistory, useBufferOccupancyABR: useBufferOccupancyABR(type), useL2AABR: useL2AABR(type), @@ -325,14 +377,16 @@ function AbrController() { if (droppedFramesHistory) { const playbackQuality = videoModel.getPlaybackQuality(); if (playbackQuality) { - droppedFramesHistory.push(playbackIndex, playbackQuality); + droppedFramesHistory.push(streamId, playbackIndex, playbackQuality); } } + if (!!settings.get().streaming.abr.autoSwitchBitrate[type]) { - const minIdx = getMinAllowedIndexFor(type); + const minIdx = getMinAllowedIndexFor(type, streamId); const topQualityIdx = getTopQualityIndexFor(type, streamId); const switchRequest = abrRulesCollection.getMaxQuality(rulesContext); let newQuality = switchRequest.quality; + if (minIdx !== undefined && ((newQuality > SwitchRequest.NO_CHANGE) ? newQuality : oldQuality) < minIdx) { newQuality = minIdx; } @@ -340,11 +394,11 @@ function AbrController() { newQuality = topQualityIdx; } - switchHistoryDict[type].push({ oldValue: oldQuality, newValue: newQuality }); + switchHistoryDict[streamId][type].push({ oldValue: oldQuality, newValue: newQuality }); if (newQuality > SwitchRequest.NO_CHANGE && newQuality != oldQuality) { - if (abandonmentStateDict[type].state === MetricsConstants.ALLOW_LOAD || newQuality > oldQuality) { - changeQuality(type, oldQuality, newQuality, topQualityIdx, switchRequest.reason); + if (abandonmentStateDict[streamId][type].state === MetricsConstants.ALLOW_LOAD || newQuality > oldQuality) { + changeQuality(type, oldQuality, newQuality, topQualityIdx, switchRequest.reason, streamId); } } else if (settings.get().debug.logLevel === Debug.LOG_LEVEL_DEBUG) { const bufferLevel = dashMetrics.getCurrentBufferLevel(type, true); @@ -354,32 +408,40 @@ function AbrController() { } } - function setPlaybackQuality(type, streamInfo, newQuality, reason) { - const id = streamInfo.id; - const oldQuality = getQualityFor(type); + function setPlaybackQuality(type, streamInfo, newQuality, reason = null) { + const streamId = streamInfo.id; + const oldQuality = getQualityFor(type, streamId); checkInteger(newQuality); - const topQualityIdx = getTopQualityIndexFor(type, id); + const topQualityIdx = getTopQualityIndexFor(type, streamId); + const bitrateInfo = _getBitrateForQuality(type, streamId, newQuality); + + eventBus.trigger(Events.SETTING_PLAYBACK_QUALITY, { + newQuality, + streamInfo, + mediaType: type, + bitrateInfo + }) + if (newQuality !== oldQuality && newQuality >= 0 && newQuality <= topQualityIdx) { - changeQuality(type, oldQuality, newQuality, topQualityIdx, reason); + changeQuality(type, oldQuality, newQuality, topQualityIdx, reason, streamId); } } - function changeQuality(type, oldQuality, newQuality, topQualityIdx, reason) { - if (type && streamProcessorDict[type]) { - const streamInfo = streamProcessorDict[type].getStreamInfo(); - const id = streamInfo ? streamInfo.id : null; - if (settings.get().debug.logLevel === Debug.LOG_LEVEL_DEBUG) { - const bufferLevel = dashMetrics.getCurrentBufferLevel(type); - logger.info('[' + type + '] switch from ' + oldQuality + ' to ' + newQuality + '/' + topQualityIdx + ' (buffer: ' + bufferLevel + ') ' + (reason ? JSON.stringify(reason) : '.')); - } - setQualityFor(type, id, newQuality); + function changeQuality(type, oldQuality, newQuality, topQualityIdx, reason, streamId) { + if (type && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { + const streamInfo = streamProcessorDict[streamId][type].getStreamInfo(); + const bufferLevel = dashMetrics.getCurrentBufferLevel(type); + logger.info('[' + type + '] switch from ' + oldQuality + ' to ' + newQuality + '/' + topQualityIdx + ' (buffer: ' + bufferLevel + ') ' + (reason ? JSON.stringify(reason) : '.')); + + setQualityFor(type, newQuality, streamId); eventBus.trigger(Events.QUALITY_CHANGE_REQUESTED, { - oldQuality: oldQuality, - newQuality: newQuality, - reason: reason + oldQuality, + newQuality, + reason, + streamInfo }, { streamId: streamInfo.id, mediaType: type } ); @@ -390,23 +452,36 @@ function AbrController() { } } - function setAbandonmentStateFor(type, state) { - abandonmentStateDict[type].state = state; + function _getBitrateForQuality(type, streamId, quality) { + try { + const streamInfo = streamController.getStreamById(streamId).getStreamInfo(); + const mediaInfo = adapter.getMediaInfoForType(streamInfo, type) + const bitrateList = getBitrateList(mediaInfo); + + return bitrateList[quality]; + } catch (e) { + return 0; + } + } + + function setAbandonmentStateFor(streamId, type, state) { + abandonmentStateDict[streamId][type].state = state; } - function getAbandonmentStateFor(type) { - return abandonmentStateDict[type] ? abandonmentStateDict[type].state : null; + function getAbandonmentStateFor(streamId, type) { + return abandonmentStateDict[streamId] && abandonmentStateDict[streamId][type] ? abandonmentStateDict[streamId][type].state : null; } /** * @param {MediaInfo} mediaInfo * @param {number} bitrate A bitrate value, kbps - * @param {number} latency Expected latency of connection, ms + * @param {String} streamId Period ID + * @param {number|null} latency Expected latency of connection, ms * @returns {number} A quality index <= for the given bitrate * @memberof AbrController# */ - function getQualityForBitrate(mediaInfo, bitrate, latency) { - const voRepresentation = mediaInfo && mediaInfo.type ? streamProcessorDict[mediaInfo.type].getRepresentationInfo() : null; + function getQualityForBitrate(mediaInfo, bitrate, streamId, latency = null) { + const voRepresentation = mediaInfo && mediaInfo.type ? streamProcessorDict[streamId][mediaInfo.type].getRepresentationInfo() : null; if (settings.get().streaming.abr.useDeadTimeLatency && latency && voRepresentation && voRepresentation.fragmentDuration) { latency = latency / 1000; @@ -526,15 +601,16 @@ function AbrController() { const streamId = mediaInfo.streamInfo.id; const max = mediaInfo.representationCount - 1; - setTopQualityIndex(type, streamId, max); + topQualities[streamId] = topQualities[streamId] || {}; + topQualities[streamId][type] = max; return max; } function isPlayingAtTopQuality(streamInfo) { const streamId = streamInfo ? streamInfo.id : null; - const audioQuality = getQualityFor(Constants.AUDIO); - const videoQuality = getQualityFor(Constants.VIDEO); + const audioQuality = getQualityFor(Constants.AUDIO, streamId); + const videoQuality = getQualityFor(Constants.VIDEO, streamId); const isAtTop = (audioQuality === getTopQualityIndexFor(Constants.AUDIO, streamId)) && (videoQuality === getTopQualityIndexFor(Constants.VIDEO, streamId)); @@ -542,49 +618,49 @@ function AbrController() { return isAtTop; } - function getQualityFor(type) { - if (type && streamProcessorDict[type]) { - const streamInfo = streamProcessorDict[type].getStreamInfo(); - const id = streamInfo ? streamInfo.id : null; - let quality; + function getQualityFor(type, streamId = null) { + try { + if (!streamId) { + streamId = streamController.getActiveStreamInfo().id; + } + if (type && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { + let quality; + + if (streamId) { + qualityDict[streamId] = qualityDict[streamId] || {}; - if (id) { - qualityDict[id] = qualityDict[id] || {}; + if (!qualityDict[streamId].hasOwnProperty(type)) { + qualityDict[streamId][type] = QUALITY_DEFAULT; + } - if (!qualityDict[id].hasOwnProperty(type)) { - qualityDict[id][type] = QUALITY_DEFAULT; + quality = qualityDict[streamId][type]; + return quality; } - - quality = qualityDict[id][type]; - return quality; } + return QUALITY_DEFAULT; + } catch (e) { + return QUALITY_DEFAULT; } - return QUALITY_DEFAULT; } - function setQualityFor(type, id, value) { - qualityDict[id] = qualityDict[id] || {}; - qualityDict[id][type] = value; + function setQualityFor(type, value, streamId) { + qualityDict[streamId] = qualityDict[streamId] || {}; + qualityDict[streamId][type] = value; } - function setTopQualityIndex(type, id, value) { - topQualities[id] = topQualities[id] || {}; - topQualities[id][type] = value; - } - - function checkMaxBitrate(idx, type) { + function checkMaxBitrate(idx, type, streamId) { let newIdx = idx; - if (!streamProcessorDict[type]) { + if (!streamProcessorDict[streamId] || !streamProcessorDict[streamId][type]) { return newIdx; } - const minIdx = getMinAllowedIndexFor(type); + const minIdx = getMinAllowedIndexFor(type, streamId); if (minIdx !== undefined) { newIdx = Math.max(idx, minIdx); } - const maxIdx = getMaxAllowedIndexFor(type); + const maxIdx = getMaxAllowedIndexFor(type, streamId); if (maxIdx !== undefined) { newIdx = Math.min(newIdx, maxIdx); } @@ -613,8 +689,8 @@ function AbrController() { } } - function checkPortalSize(idx, type) { - if (type !== Constants.VIDEO || !settings.get().streaming.abr.limitBitrateByPortal || !streamProcessorDict[type]) { + function checkPortalSize(idx, type, streamId) { + if (type !== Constants.VIDEO || !settings.get().streaming.abr.limitBitrateByPortal || !streamProcessorDict[streamId] || !streamProcessorDict[streamId][type]) { return idx; } @@ -646,8 +722,10 @@ function AbrController() { function onFragmentLoadProgress(e) { const type = e.request.mediaType; + const streamId = e.streamId; + if (!!settings.get().streaming.abr.autoSwitchBitrate[type]) { - const streamProcessor = streamProcessorDict[type]; + const streamProcessor = streamProcessorDict[streamId][type]; if (!streamProcessor) return; // There may be a fragment load in progress when we switch periods and recreated some controllers. const rulesContext = RulesContext(context).create({ @@ -659,7 +737,7 @@ function AbrController() { useLoLPABR: useLoLPABR(type), videoModel }); - const switchRequest = abrRulesCollection.shouldAbandonFragment(rulesContext); + const switchRequest = abrRulesCollection.shouldAbandonFragment(rulesContext, streamId); if (switchRequest.quality > SwitchRequest.NO_CHANGE) { const fragmentModel = streamProcessor.getFragmentModel(); @@ -670,10 +748,10 @@ function AbrController() { if (request) { //TODO Check if we should abort or if better to finish download. check bytesLoaded/Total fragmentModel.abortRequests(); - setAbandonmentStateFor(type, MetricsConstants.ABANDON_LOAD); - switchHistoryDict[type].reset(); - switchHistoryDict[type].push({ - oldValue: getQualityFor(type), + setAbandonmentStateFor(streamId, type, MetricsConstants.ABANDON_LOAD); + switchHistoryDict[streamId][type].reset(); + switchHistoryDict[streamId][type].push({ + oldValue: getQualityFor(type, streamId), newValue: switchRequest.quality, confidence: 1, reason: switchRequest.reason @@ -683,7 +761,7 @@ function AbrController() { clearTimeout(abandonmentTimeout); abandonmentTimeout = setTimeout( () => { - setAbandonmentStateFor(type, MetricsConstants.ALLOW_LOAD); + setAbandonmentStateFor(streamId, type, MetricsConstants.ALLOW_LOAD); abandonmentTimeout = null; }, settings.get().streaming.abandonLoadTimeout @@ -693,28 +771,45 @@ function AbrController() { } } + function clearDataForStream(streamId) { + if (droppedFramesHistory) { + droppedFramesHistory.clearForStream(streamId); + } + if (streamProcessorDict[streamId]) { + delete streamProcessorDict[streamId]; + } + if (switchHistoryDict[streamId]) { + delete switchHistoryDict[streamId]; + } + + if (abandonmentStateDict[streamId]) { + delete abandonmentStateDict[streamId]; + } + } + instance = { - isPlayingAtTopQuality: isPlayingAtTopQuality, - updateTopQualityIndex: updateTopQualityIndex, - getThroughputHistory: getThroughputHistory, - getBitrateList: getBitrateList, - getQualityForBitrate: getQualityForBitrate, - getTopBitrateInfoFor: getTopBitrateInfoFor, - getMaxAllowedIndexFor: getMaxAllowedIndexFor, - getMinAllowedIndexFor: getMinAllowedIndexFor, - getInitialBitrateFor: getInitialBitrateFor, - getQualityFor: getQualityFor, - getAbandonmentStateFor: getAbandonmentStateFor, - setPlaybackQuality: setPlaybackQuality, - checkPlaybackQuality: checkPlaybackQuality, - getTopQualityIndexFor: getTopQualityIndexFor, - setElementSize: setElementSize, - setWindowResizeEventCalled: setWindowResizeEventCalled, - createAbrRulesCollection: createAbrRulesCollection, - registerStreamType: registerStreamType, - unRegisterStreamType: unRegisterStreamType, - setConfig: setConfig, - reset: reset + initialize, + isPlayingAtTopQuality, + updateTopQualityIndex, + clearDataForStream, + getThroughputHistory, + getBitrateList, + getQualityForBitrate, + getTopBitrateInfoFor, + getMaxAllowedIndexFor, + getMinAllowedIndexFor, + getInitialBitrateFor, + getQualityFor, + getAbandonmentStateFor, + setPlaybackQuality, + checkPlaybackQuality, + getTopQualityIndexFor, + setElementSize, + setWindowResizeEventCalled, + registerStreamType, + unRegisterStreamType, + setConfig, + reset }; setup(); diff --git a/src/streaming/controllers/BufferController.js b/src/streaming/controllers/BufferController.js index d3458c9a77..a74f354227 100644 --- a/src/streaming/controllers/BufferController.js +++ b/src/streaming/controllers/BufferController.js @@ -32,7 +32,6 @@ import Constants from '../constants/Constants'; import MetricsConstants from '../constants/MetricsConstants'; import FragmentModel from '../models/FragmentModel'; import SourceBufferSink from '../SourceBufferSink'; -import PreBufferSink from '../PreBufferSink'; import AbrController from './AbrController'; import EventBus from '../../core/EventBus'; import Events from '../../core/events/Events'; @@ -42,8 +41,8 @@ import InitCache from '../utils/InitCache'; import DashJSError from '../vo/DashJSError'; import Errors from '../../core/errors/Errors'; import {HTTPRequest} from '../vo/metrics/HTTPRequest'; +import MediaPlayerEvents from '../../streaming/MediaPlayerEvents'; -const BUFFERING_COMPLETED_THRESHOLD = 0.1; const BUFFER_END_THRESHOLD = 0.5; const BUFFER_RANGE_CALCULATION_THRESHOLD = 0.01; const QUOTA_EXCEEDED_ERROR_CODE = 22; @@ -58,7 +57,6 @@ function BufferController(config) { const errHandler = config.errHandler; const fragmentModel = config.fragmentModel; const representationController = config.representationController; - const mediaController = config.mediaController; const adapter = config.adapter; const textController = config.textController; const abrController = config.abrController; @@ -75,21 +73,17 @@ function BufferController(config) { criticalBufferLevel, mediaSource, maxAppendedIndex, - lastIndex, - buffer, - dischargeBuffer, - dischargeFragments, + maximumIndex, + sourceBufferSink, bufferState, appendedBytesInfo, wallclockTicked, isPruningInProgress, isQuotaExceeded, initCache, - seekTarget, - seekClearedBufferingCompleted, pendingPruningRanges, replacingBuffer, - mediaChunk; + seekTarget; function setup() { @@ -99,119 +93,130 @@ function BufferController(config) { resetInitialSettings(); } - function getBufferControllerType() { - return BUFFER_CONTROLLER_TYPE; - } - - function initialize(Source) { - setMediaSource(Source); - - requiredQuality = abrController.getQualityFor(type); - - eventBus.on(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, this); - eventBus.on(Events.INIT_FRAGMENT_LOADED, onInitFragmentLoaded, this); - eventBus.on(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, this); - eventBus.on(Events.QUALITY_CHANGE_REQUESTED, onQualityChanged, this); - eventBus.on(Events.STREAM_COMPLETED, onStreamCompleted, this); - eventBus.on(Events.PLAYBACK_PLAYING, onPlaybackPlaying, this); - eventBus.on(Events.PLAYBACK_PROGRESS, onPlaybackProgression, this); - eventBus.on(Events.PLAYBACK_TIME_UPDATED, onPlaybackProgression, this); - eventBus.on(Events.PLAYBACK_RATE_CHANGED, onPlaybackRateChanged, this); - eventBus.on(Events.PLAYBACK_SEEKING, onPlaybackSeeking, this); - eventBus.on(Events.PLAYBACK_SEEKED, onPlaybackSeeked, this); - eventBus.on(Events.PLAYBACK_STALLED, onPlaybackStalled, this); - eventBus.on(Events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, this); - eventBus.on(Events.CURRENT_TRACK_CHANGED, onCurrentTrackChanged, this, { priority: EventBus.EVENT_PRIORITY_HIGH }); - eventBus.on(Events.SOURCEBUFFER_REMOVE_COMPLETED, onRemoved, this); + /** + * Initialize the BufferController. Sets the media source and registers the event handlers. + * @param {object} mediaSource + */ + function initialize(mediaSource) { + setMediaSource(mediaSource); + + requiredQuality = abrController.getQualityFor(type, streamInfo.id); + + eventBus.on(Events.INIT_FRAGMENT_LOADED, _onInitFragmentLoaded, instance); + eventBus.on(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, instance); + eventBus.on(Events.STREAM_REQUESTING_COMPLETED, _onStreamRequestingCompleted, instance); + eventBus.on(Events.WALLCLOCK_TIME_UPDATED, _onWallclockTimeUpdated, instance); + + eventBus.on(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, _onQualityChanged, instance); + eventBus.on(MediaPlayerEvents.PLAYBACK_PLAYING, _onPlaybackPlaying, instance); + eventBus.on(MediaPlayerEvents.PLAYBACK_PROGRESS, _onPlaybackProgression, instance); + eventBus.on(MediaPlayerEvents.PLAYBACK_TIME_UPDATED, _onPlaybackProgression, instance); + eventBus.on(MediaPlayerEvents.PLAYBACK_RATE_CHANGED, _onPlaybackRateChanged, instance); + eventBus.on(MediaPlayerEvents.PLAYBACK_STALLED, _onPlaybackStalled, instance); } + /** + * Returns the stream id + * @return {string} + */ function getStreamId() { return streamInfo.id; } + /** + * Returns the media type + * @return {type} + */ function getType() { return type; } - - function getRepresentationInfo(quality) { - return adapter.convertDataToRepresentationInfo(representationController.getRepresentationForQuality(quality)); + /** + * Returns the type of the BufferController. We distinguish between standard buffer controllers and buffer controllers related to texttracks. + * @return {string} + */ + function getBufferControllerType() { + return BUFFER_CONTROLLER_TYPE; } - function createBuffer(mediaInfoArr, oldBuffers) { - if (!initCache || !mediaInfoArr) return null; - const mediaInfo = mediaInfoArr[0]; - if (mediaSource) { - try { - if (oldBuffers && oldBuffers[type]) { - buffer = SourceBufferSink(context).create(mediaSource, mediaInfo, onAppended.bind(this), oldBuffers[type]); - } else { - buffer = SourceBufferSink(context).create(mediaSource, mediaInfo, onAppended.bind(this), null); - } - if (settings.get().streaming.useAppendWindow) { - buffer.updateAppendWindow(streamInfo); - } - if (typeof buffer.getBuffer().initialize === 'function') { - buffer.getBuffer().initialize(type, streamInfo, mediaInfoArr, fragmentModel); - } - } catch (e) { - logger.fatal('Caught error on create SourceBuffer: ' + e); - errHandler.error(new DashJSError(Errors.MEDIASOURCE_TYPE_UNSUPPORTED_CODE, Errors.MEDIASOURCE_TYPE_UNSUPPORTED_MESSAGE + type)); - } - } else { - buffer = PreBufferSink(context).create(onAppended.bind(this)); - } - updateBufferTimestampOffset(getRepresentationInfo(requiredQuality)); - return buffer; + /** + * Sets the mediasource. + * @param {object} value + */ + function setMediaSource(value) { + mediaSource = value; } - function dischargePreBuffer() { - if (buffer && dischargeBuffer && typeof dischargeBuffer.discharge === 'function') { - const ranges = dischargeBuffer.getAllBufferRanges(); + /** + * Get the RepresentationInfo for a certain quality. + * @param {number} quality + * @return {object} + * @private + */ + function _getRepresentationInfo(quality) { + return adapter.convertDataToRepresentationInfo(representationController.getRepresentationForQuality(quality)); + } - if (ranges.length > 0) { - let rangeStr = 'Beginning ' + type + 'PreBuffer discharge, adding buffer for:'; - for (let i = 0; i < ranges.length; i++) { - rangeStr += ' start: ' + ranges.start(i) + ', end: ' + ranges.end(i) + ';'; - } - logger.debug(rangeStr); - } else { - logger.debug('PreBuffer discharge requested, but there were no media segments in the PreBuffer.'); + /** + * Creates a SourceBufferSink object + * @param {object} mediaInfo + * @param {array} oldBufferSinks + * @return {object|null} SourceBufferSink + */ + function createBufferSink(mediaInfo, oldBufferSinks = []) { + return new Promise((resolve, reject) => { + if (!initCache || !mediaInfo || !mediaSource) { + resolve(null); + return; } - //A list of fragments to supress bytesAppended events for. This makes transferring from a prebuffer to a sourcebuffer silent. - dischargeFragments = []; - let chunks = dischargeBuffer.discharge(); - let lastInit = null; - for (let j = 0; j < chunks.length; j++) { - const chunk = chunks[j]; - if (chunk.segmentType !== 'InitializationSegment') { - const initChunk = initCache.extract(chunk.streamId, chunk.representationId); - if (initChunk) { - if (lastInit !== initChunk) { - dischargeFragments.push(initChunk); - buffer.append(initChunk); - lastInit = initChunk; - } - } - } - dischargeFragments.push(chunk); - buffer.append(chunk); - } + sourceBufferSink = SourceBufferSink(context).create({ mediaSource, textController }); + _initializeSink(mediaInfo, oldBufferSinks) + .then(() => { + return updateBufferTimestampOffset(_getRepresentationInfo(requiredQuality)); + }) + .then(() => { + resolve(sourceBufferSink); + }) + .catch((e) => { + logger.fatal('Caught error on create SourceBuffer: ' + e); + errHandler.error(new DashJSError(Errors.MEDIASOURCE_TYPE_UNSUPPORTED_CODE, Errors.MEDIASOURCE_TYPE_UNSUPPORTED_MESSAGE + type)); + reject(e); + }); + }); + } + + function _initializeSink(mediaInfo, oldBufferSinks) { + const selectedRepresentation = _getRepresentationInfo(requiredQuality); - dischargeBuffer.reset(); - dischargeBuffer = null; + if (oldBufferSinks && oldBufferSinks[type] && (type === Constants.VIDEO || type === Constants.AUDIO)) { + return sourceBufferSink.initializeForStreamSwitch(mediaInfo, selectedRepresentation, oldBufferSinks[type]); + } else { + return sourceBufferSink.initializeForFirstUse(streamInfo, mediaInfo, selectedRepresentation); } } - function onInitFragmentLoaded(e) { - logger.info('Init fragment finished loading saving to', type + '\'s init cache'); - initCache.save(e.chunk); + + /** + * Callback handler when init segment has been loaded. Based on settings, the init segment is saved to the cache, and appended to the buffer. + * @param {object} e + * @private + */ + function _onInitFragmentLoaded(e) { + if (settings.get().streaming.cacheInitSegments) { + logger.info('Init fragment finished loading saving to', type + '\'s init cache'); + initCache.save(e.chunk); + } logger.debug('Append Init fragment', type, ' with representationId:', e.chunk.representationId, ' and quality:', e.chunk.quality, ', data size:', e.chunk.bytes.byteLength); - appendToBuffer(e.chunk); + _appendToBuffer(e.chunk); } - function appendInitSegment(representationId) { + /** + * Append the init segment for a certain representation to the buffer. If the init segment is cached we take the one from the cache. Otherwise the function returns false and the segment has to be requested again. + * @param {string} representationId + * @return {boolean} + */ + function appendInitSegmentFromCache(representationId) { // Get init segment from cache const chunk = initCache.extract(streamInfo.id, representationId); @@ -222,38 +227,39 @@ function BufferController(config) { // Append init segment into buffer logger.info('Append Init fragment', type, ' with representationId:', chunk.representationId, ' and quality:', chunk.quality, ', data size:', chunk.bytes.byteLength); - appendToBuffer(chunk); + _appendToBuffer(chunk); + return true; } - function onMediaFragmentLoaded(e) { - const chunk = e.chunk; - - if (replacingBuffer) { - mediaChunk = chunk; - const ranges = buffer && buffer.getAllBufferRanges(); - if (ranges && ranges.length > 0 && playbackController.getTimeToStreamEnd() > settings.get().streaming.stallThreshold) { - logger.debug('Clearing buffer because track changed - ' + (ranges.end(ranges.length - 1) + BUFFER_END_THRESHOLD)); - clearBuffers([{ - start: 0, - end: ranges.end(ranges.length - 1) + BUFFER_END_THRESHOLD, - force: true // Force buffer removal even when buffering is completed and MediaSource is ended - }]); - } - } else { - appendToBuffer(chunk); - } + /** + * Calls the _appendToBuffer function to append the segment to the buffer. In case of a track switch the buffer might be cleared. + * @param {object} e + */ + function _onMediaFragmentLoaded(e) { + _appendToBuffer(e.chunk); } - function appendToBuffer(chunk) { - buffer.append(chunk); + /** + * Append data to the MSE buffer using the SourceBufferSink + * @param {object} chunk + * @private + */ + function _appendToBuffer(chunk) { + sourceBufferSink.append(chunk) + .then((e) => { + _onAppended(e); + }) + .catch((e) => { + _onAppended({ error: e }); + }); if (chunk.mediaInfo.type === Constants.VIDEO) { - triggerEvent(Events.VIDEO_CHUNK_RECEIVED, {chunk: chunk}); + triggerEvent(Events.VIDEO_CHUNK_RECEIVED, { chunk: chunk }); } } - function showBufferRanges(ranges) { + function _showBufferRanges(ranges) { if (ranges && ranges.length > 0) { for (let i = 0, len = ranges.length; i < len; i++) { logger.debug('Buffered range: ' + ranges.start(i) + ' - ' + ranges.end(i) + ', currentTime = ', playbackController.getTime()); @@ -261,32 +267,15 @@ function BufferController(config) { } } - function onAppended(e) { + function _onAppended(e) { if (e.error) { + // If we receive a QUOTA_EXCEEDED_ERROR_CODE we should adjust the target buffer times to avoid this error in the future. if (e.error.code === QUOTA_EXCEEDED_ERROR_CODE) { - isQuotaExceeded = true; - criticalBufferLevel = getTotalBufferedTime() * 0.8; - logger.warn('Quota exceeded, Critical Buffer: ' + criticalBufferLevel); - - if (criticalBufferLevel > 0) { - // recalculate buffer lengths according to criticalBufferLevel - const bufferToKeep = Math.max(0.2 * criticalBufferLevel, 1); - const bufferAhead = criticalBufferLevel - bufferToKeep; - const bufferTimeAtTopQuality = Math.min(settings.get().streaming.bufferTimeAtTopQuality, bufferAhead * 0.9); - const bufferTimeAtTopQualityLongForm = Math.min(settings.get().streaming.bufferTimeAtTopQualityLongForm, bufferAhead * 0.9); - const s = { - streaming: { - bufferToKeep: parseFloat(bufferToKeep.toFixed(5)), - bufferTimeAtTopQuality: parseFloat(bufferTimeAtTopQuality.toFixed(5)), - bufferTimeAtTopQualityLongForm: parseFloat(bufferTimeAtTopQualityLongForm.toFixed(5)) - } - }; - settings.update(s); - } + _handleQuotaExceededError(); } if (e.error.code === QUOTA_EXCEEDED_ERROR_CODE || !hasEnoughSpaceToAppend()) { logger.warn('Clearing playback buffer to overcome quota exceed situation'); - // Notify Schedulecontroller to stop scheduling until buffer has been pruned + // Notify ScheduleController to stop scheduling until buffer has been pruned triggerEvent(Events.QUOTA_EXCEEDED, { criticalBufferLevel: criticalBufferLevel, quotaExceededTime: e.chunk.start @@ -295,207 +284,286 @@ function BufferController(config) { } return; } - isQuotaExceeded = false; + _updateBufferLevel(); + + isQuotaExceeded = false; appendedBytesInfo = e.chunk; + + if (!appendedBytesInfo.endFragment) { + return; + } + if (appendedBytesInfo && !isNaN(appendedBytesInfo.index)) { maxAppendedIndex = Math.max(appendedBytesInfo.index, maxAppendedIndex); - checkIfBufferingCompleted(); + _checkIfBufferingCompleted(); } - const ranges = buffer.getAllBufferRanges(); + const ranges = sourceBufferSink.getAllBufferRanges(); if (appendedBytesInfo.segmentType === HTTPRequest.MEDIA_SEGMENT_TYPE) { - showBufferRanges(ranges); - onPlaybackProgression(); - adjustSeekTarget(); - } else if (replacingBuffer) { - // When replacing buffer due to switch track, and once new initialization segment has been appended - // (and previous buffered data removed) then seek stream to current time - const currentTime = playbackController.getTime(); - logger.debug('AppendToBuffer seek target should be ' + currentTime); - triggerEvent(Events.SEEK_TARGET, {time: currentTime}); + _showBufferRanges(ranges); + _onPlaybackProgression(); + _adjustSeekTarget(); } - let suppressAppendedEvent = false; - if (dischargeFragments) { - if (dischargeFragments.indexOf(appendedBytesInfo) > 0) { - suppressAppendedEvent = true; - } - dischargeFragments = null; - } - if (appendedBytesInfo && !suppressAppendedEvent) { - triggerEvent(appendedBytesInfo.endFragment ? Events.BYTES_APPENDED_END_FRAGMENT : Events.BYTES_APPENDED, { + if (appendedBytesInfo) { + triggerEvent(Events.BYTES_APPENDED_END_FRAGMENT, { quality: appendedBytesInfo.quality, startTime: appendedBytesInfo.start, index: appendedBytesInfo.index, bufferedRanges: ranges, + segmentType: appendedBytesInfo.segmentType, mediaType: type }); } } - function adjustSeekTarget() { - // Check buffered data only for audio and video - if (type !== Constants.AUDIO && type !== Constants.VIDEO) return; + /** + * In some cases the segment we requested might start at a different time than we initially aimed for. segments timeline/template tolerance. + * We might need to do an internal seek if there is drift. + * @private + */ + function _adjustSeekTarget() { if (isNaN(seekTarget)) return; + // Check buffered data only for audio and video + if (type !== Constants.AUDIO && type !== Constants.VIDEO) { + seekTarget = NaN; + return; + } // Check if current buffered range already contains seek target (and current video element time) const currentTime = playbackController.getTime(); let range = getRangeAt(seekTarget, 0); - if (currentTime === seekTarget && range) return; + if (currentTime === seekTarget && range) { + seekTarget = NaN; + return; + } // Get buffered range corresponding to the seek target const segmentDuration = representationController.getCurrentRepresentation().segmentDuration; range = getRangeAt(seekTarget, segmentDuration); if (!range) return; - if (Math.abs(currentTime - seekTarget) > segmentDuration) { - // If current video model time is decorrelated from seek target (and appended buffer) then seek video element - // (in case of live streams on some browsers/devices for which we can't set video element time at unavalaible range) - - // Check if appended segment is not anterior from seek target (segments timeline/template tolerance) - if (seekTarget <= range.end) { - // Seek video element to seek target or range start if appended buffer starts after seek target (segments timeline/template tolerance) - playbackController.seek(Math.max(seekTarget, range.start), false, true); - seekTarget = NaN; - } - } else if (currentTime < range.start) { + if (currentTime < range.start) { // If appended buffer starts after seek target (segments timeline/template tolerance) then seek to range start playbackController.seek(range.start, false, true); seekTarget = NaN; } } - function onQualityChanged(e) { - if (requiredQuality === e.newQuality) return; + function _handleQuotaExceededError() { + isQuotaExceeded = true; + criticalBufferLevel = getTotalBufferedTime() * 0.8; + logger.warn('Quota exceeded, Critical Buffer: ' + criticalBufferLevel); + + if (criticalBufferLevel > 0) { + // recalculate buffer lengths according to criticalBufferLevel + const bufferToKeep = Math.max(0.2 * criticalBufferLevel, 1); + const bufferAhead = criticalBufferLevel - bufferToKeep; + const bufferTimeAtTopQuality = Math.min(settings.get().streaming.buffer.bufferTimeAtTopQuality, bufferAhead * 0.9); + const bufferTimeAtTopQualityLongForm = Math.min(settings.get().streaming.buffer.bufferTimeAtTopQualityLongForm, bufferAhead * 0.9); + const s = { + streaming: { + buffer: { + bufferToKeep: parseFloat(bufferToKeep.toFixed(5)), + bufferTimeAtTopQuality: parseFloat(bufferTimeAtTopQuality.toFixed(5)), + bufferTimeAtTopQualityLongForm: parseFloat(bufferTimeAtTopQualityLongForm.toFixed(5)) + } + } + }; + settings.update(s); + } + } + + function _onQualityChanged(e) { + if (requiredQuality === e.newQuality || isBufferingCompleted) return; - updateBufferTimestampOffset(this.getRepresentationInfo(e.newQuality)); + const representationInfo = _getRepresentationInfo(e.newQuality); requiredQuality = e.newQuality; + updateBufferTimestampOffset(representationInfo); } //********************************************************************** // START Buffer Level, State & Sufficiency Handling. //********************************************************************** - function onPlaybackSeeking(e) { - if (!buffer) return; - seekTarget = e.seekTime; + function prepareForPlaybackSeek() { if (isBufferingCompleted) { - seekClearedBufferingCompleted = true; - isBufferingCompleted = false; - //a seek command has occured, reset lastIndex value, it will be set next time that onStreamCompleted will be called. - lastIndex = Number.POSITIVE_INFINITY; - } - if (type !== Constants.FRAGMENTED_TEXT) { - // remove buffer after seeking operations - pruneAllSafely(); - } else { - onPlaybackProgression(); + setIsBufferingCompleted(false); } + + // Abort the current request and empty all possible segments to be appended + return sourceBufferSink.abort(); } - function onPlaybackSeeked() { - seekTarget = NaN; + function prepareForReplacementTrackSwitch(codec) { + return new Promise((resolve, reject) => { + sourceBufferSink.abort() + .then(() => { + return updateAppendWindow(); + }) + .then(() => { + return sourceBufferSink.changeType(codec); + }) + .then(() => { + return pruneAllSafely(); + }) + .then(() => { + setIsBufferingCompleted(false); + resolve(); + }) + .catch((e) => { + reject(e); + }); + }); + } + + function prepareForNonReplacementTrackSwitch(codec) { + return new Promise((resolve, reject) => { + updateAppendWindow() + .then(() => { + return sourceBufferSink.changeType(codec); + }) + .then(() => { + resolve(); + }) + .catch((e) => { + reject(e); + }); + }); } - // Prune full buffer but what is around current time position function pruneAllSafely() { - if (!buffer) return; - buffer.waitForUpdateEnd(() => { - const ranges = getAllRangesWithSafetyFactor(); + return new Promise((resolve, reject) => { + let ranges = getAllRangesWithSafetyFactor(); + if (!ranges || ranges.length === 0) { - onPlaybackProgression(); + _onPlaybackProgression(); + resolve(); + return; } - clearBuffers(ranges); + + clearBuffers(ranges) + .then(() => { + resolve(); + }) + .catch((e) => { + reject(e); + }); }); } - // Get all buffer ranges but a range around current time position - function getAllRangesWithSafetyFactor() { - if (!buffer) return; + function getAllRangesWithSafetyFactor(seekTime) { const clearRanges = []; - const ranges = buffer.getAllBufferRanges(); + const ranges = sourceBufferSink.getAllBufferRanges(); + + // no valid ranges if (!ranges || ranges.length === 0) { return clearRanges; } - const currentTime = playbackController.getTime(); - const endOfBuffer = ranges.end(ranges.length - 1) + BUFFER_END_THRESHOLD; - - const currentTimeRequest = fragmentModel.getRequests({ - state: FragmentModel.FRAGMENT_MODEL_EXECUTED, - time: currentTime, - threshold: BUFFER_RANGE_CALCULATION_THRESHOLD - })[0]; - - // There is no request in current time position yet. Let's remove everything - if (!currentTimeRequest) { - logger.debug('getAllRangesWithSafetyFactor - No request found in current time position, removing full buffer 0 -', endOfBuffer); + // if no target time is provided we clear everyhing + if (!seekTime || isNaN(seekTime)) { clearRanges.push({ - start: 0, - end: endOfBuffer + start: ranges.start(0), + end: ranges.end(ranges.length - 1) + BUFFER_END_THRESHOLD }); - } else { - // Build buffer behind range. To avoid pruning time around current time position, - // we include fragment right behind the one in current time position - const behindRange = { - start: 0, - end: currentTimeRequest.startTime - settings.get().streaming.stallThreshold - }; - const prevReq = fragmentModel.getRequests({ + } + + // otherwise we need to calculate the correct pruning range + else { + + const behindPruningRange = _getRangeBehindForPruning(seekTime, ranges); + const aheadPruningRange = _getRangeAheadForPruning(seekTime, ranges); + + if (behindPruningRange) { + clearRanges.push(behindPruningRange); + } + + if (aheadPruningRange) { + clearRanges.push(aheadPruningRange); + } + } + + return clearRanges; + } + + function _getRangeBehindForPruning(targetTime, ranges) { + const bufferToKeepBehind = settings.get().streaming.buffer.bufferToKeep; + const startOfBuffer = ranges.start(0); + + // if we do a seek ahead of the current play position we do need to prune behind the new play position + const behindDiff = targetTime - startOfBuffer; + if (behindDiff > bufferToKeepBehind) { + + let rangeEnd = Math.max(0, targetTime - bufferToKeepBehind); + // Ensure we keep full range of current fragment + const currentTimeRequest = fragmentModel.getRequests({ state: FragmentModel.FRAGMENT_MODEL_EXECUTED, - time: currentTimeRequest.startTime - (currentTimeRequest.duration / 2), + time: targetTime, threshold: BUFFER_RANGE_CALCULATION_THRESHOLD })[0]; - if (prevReq && prevReq.startTime != currentTimeRequest.startTime) { - behindRange.end = prevReq.startTime; + + if (currentTimeRequest) { + rangeEnd = Math.min(currentTimeRequest.startTime, rangeEnd); } - if (behindRange.start < behindRange.end && behindRange.end > ranges.start(0)) { - clearRanges.push(behindRange); + if (rangeEnd > 0) { + return { + start: startOfBuffer, + end: rangeEnd + }; } + } - // Build buffer ahead range. To avoid pruning time around current time position, - // we include fragment right after the one in current time position - const aheadRange = { - start: currentTimeRequest.startTime + currentTimeRequest.duration + settings.get().streaming.stallThreshold, - end: endOfBuffer - }; - const nextReq = fragmentModel.getRequests({ + return null; + } + + function _getRangeAheadForPruning(targetTime, ranges) { + // if we do a seek behind the current play position we do need to prune ahead of the new play position + const endOfBuffer = ranges.end(ranges.length - 1) + BUFFER_END_THRESHOLD; + const bufferToKeepAhead = settings.get().streaming.buffer.bufferTimeAtTopQuality; + const aheadDiff = endOfBuffer - targetTime; + + if (aheadDiff > bufferToKeepAhead) { + + let rangeStart = targetTime + bufferToKeepAhead; + // Ensure we keep full range of current fragment + const currentTimeRequest = fragmentModel.getRequests({ state: FragmentModel.FRAGMENT_MODEL_EXECUTED, - time: currentTimeRequest.startTime + currentTimeRequest.duration + settings.get().streaming.stallThreshold, + time: targetTime, threshold: BUFFER_RANGE_CALCULATION_THRESHOLD })[0]; - if (nextReq && nextReq.startTime !== currentTimeRequest.startTime) { - aheadRange.start = nextReq.startTime + nextReq.duration + settings.get().streaming.stallThreshold; + + if (currentTimeRequest) { + rangeStart = Math.max(currentTimeRequest.startTime + currentTimeRequest.duration, rangeStart); } - if (aheadRange.start < aheadRange.end && aheadRange.start < endOfBuffer) { - clearRanges.push(aheadRange); + if (rangeStart < endOfBuffer) { + return { + start: rangeStart, + end: endOfBuffer + }; } } - return clearRanges; - } - - function getWorkingTime() { - return isNaN(seekTarget) ? playbackController.getTime() : seekTarget; + return null; } - function onPlaybackProgression() { + function _onPlaybackProgression() { if (!replacingBuffer || (type === Constants.FRAGMENTED_TEXT && textController.isTextEnabled())) { - updateBufferLevel(); + _updateBufferLevel(); } } - function onPlaybackStalled() { + function _onPlaybackStalled() { checkIfSufficientBuffer(); } - function onPlaybackPlaying() { - seekTarget = NaN; + function _onPlaybackPlaying() { checkIfSufficientBuffer(); + seekTarget = NaN; } function getRangeAt(time, tolerance) { - const ranges = buffer.getAllBufferRanges(); + const ranges = sourceBufferSink.getAllBufferRanges(); let start = 0; let end = 0; let firstStart = null; @@ -548,8 +616,8 @@ function BufferController(config) { length; // Consider gap/discontinuity limit as tolerance - if (settings.get().streaming.jumpGaps) { - tolerance = settings.get().streaming.smallGapLimit; + if (settings.get().streaming.gaps.jumpGaps) { + tolerance = settings.get().streaming.gaps.smallGapLimit; } range = getRangeAt(time, tolerance); @@ -563,20 +631,22 @@ function BufferController(config) { return length; } - function updateBufferLevel() { + function _updateBufferLevel() { if (playbackController) { - bufferLevel = getBufferLength(getWorkingTime() || 0); - triggerEvent(Events.BUFFER_LEVEL_UPDATED, {bufferLevel: bufferLevel}); + const tolerance = settings.get().streaming.gaps.jumpGaps && !isNaN(settings.get().streaming.gaps.smallGapLimit) ? settings.get().streaming.gaps.smallGapLimit : NaN; + bufferLevel = getBufferLength(playbackController.getTime() || 0, tolerance); + triggerEvent(Events.BUFFER_LEVEL_UPDATED, { mediaType: type, bufferLevel: bufferLevel }); checkIfSufficientBuffer(); } } - function checkIfBufferingCompleted() { - const isLastIdxAppended = maxAppendedIndex >= lastIndex - 1; // Handles 0 and non 0 based request index - if (isLastIdxAppended && !isBufferingCompleted && buffer.discharge === undefined) { - isBufferingCompleted = true; - logger.debug('checkIfBufferingCompleted trigger BUFFERING_COMPLETED for ' + type); - triggerEvent(Events.BUFFERING_COMPLETED); + function _checkIfBufferingCompleted() { + const isLastIdxAppended = maxAppendedIndex >= maximumIndex - 1; // Handles 0 and non 0 based request index + const periodBuffered = playbackController.getTimeToStreamEnd(streamInfo) - bufferLevel <= 0; + + if ((isLastIdxAppended || periodBuffered) && !isBufferingCompleted) { + setIsBufferingCompleted(true); + logger.debug(`checkIfBufferingCompleted trigger BUFFERING_COMPLETED for stream id ${streamInfo.id} and type ${type}`); } } @@ -584,17 +654,10 @@ function BufferController(config) { // No need to check buffer if type is not audio or video (for example if several errors occur during text parsing, so that the buffer cannot be filled, no error must occur on video playback) if (type !== Constants.AUDIO && type !== Constants.VIDEO) return; - if (seekClearedBufferingCompleted && !isBufferingCompleted && bufferLevel > 0 && playbackController && playbackController.getTimeToStreamEnd() - bufferLevel < BUFFERING_COMPLETED_THRESHOLD) { - seekClearedBufferingCompleted = false; - isBufferingCompleted = true; - logger.debug('checkIfSufficientBuffer trigger BUFFERING_COMPLETED for type ' + type); - triggerEvent(Events.BUFFERING_COMPLETED); - } - - // When the player is working in low latency mode, the buffer is often below settings.get().streaming.stallThreshold. + // When the player is working in low latency mode, the buffer is often below STALL_THRESHOLD. // So, when in low latency mode, change dash.js behavior so it notifies a stall just when // buffer reach 0 seconds - if (((!settings.get().streaming.lowLatencyEnabled && bufferLevel < settings.get().streaming.stallThreshold) || bufferLevel === 0) && !isBufferingCompleted) { + if (((!settings.get().streaming.lowLatencyEnabled && bufferLevel < settings.get().streaming.buffer.stallThreshold) || bufferLevel === 0) && !isBufferingCompleted) { notifyBufferStateChanged(MetricsConstants.BUFFER_EMPTY); } else { if (isBufferingCompleted || bufferLevel >= streamInfo.manifestInfo.minBufferTime) { @@ -612,14 +675,14 @@ function BufferController(config) { bufferState = state; - triggerEvent(Events.BUFFER_LEVEL_STATE_CHANGED, {state: state}); + triggerEvent(Events.BUFFER_LEVEL_STATE_CHANGED, { state: state }); triggerEvent(state === MetricsConstants.BUFFER_LOADED ? Events.BUFFER_LOADED : Events.BUFFER_EMPTY); logger.debug(state === MetricsConstants.BUFFER_LOADED ? 'Got enough buffer to start' : 'Waiting for more buffer before starting playback'); } /* prune buffer on our own in background to avoid browsers pruning buffer silently */ function pruneBuffer() { - if (!buffer || type === Constants.FRAGMENTED_TEXT) { + if (!sourceBufferSink || type === Constants.FRAGMENTED_TEXT) { return; } @@ -630,13 +693,13 @@ function BufferController(config) { function getClearRanges() { const clearRanges = []; - const ranges = buffer.getAllBufferRanges(); + const ranges = sourceBufferSink.getAllBufferRanges(); if (!ranges || ranges.length === 0) { return clearRanges; } const currentTime = playbackController.getTime(); - let startRangeToKeep = Math.max(0, currentTime - settings.get().streaming.bufferToKeep); + let startRangeToKeep = Math.max(0, currentTime - settings.get().streaming.buffer.bufferToKeep); const currentTimeRequest = fragmentModel.getRequests({ state: FragmentModel.FRAGMENT_MODEL_EXECUTED, @@ -669,75 +732,104 @@ function BufferController(config) { } function clearBuffers(ranges) { - if (!ranges || !buffer || ranges.length === 0) return; + return new Promise((resolve, reject) => { + if (!ranges || !sourceBufferSink || ranges.length === 0) { + resolve(); + return; + } - pendingPruningRanges.push.apply(pendingPruningRanges, ranges); - if (isPruningInProgress) { - return; - } + const promises = []; + ranges.forEach((range) => { + promises.push(_addClearRangeWithPromise(range)); + }); + + + if (!isPruningInProgress) { + clearNextRange(); + } - clearNextRange(); + Promise.all(promises) + .then(() => { + resolve(); + }) + .catch((e) => { + reject(e); + }); + }); + } + + function _addClearRangeWithPromise(range) { + return new Promise((resolve, reject) => { + range.resolve = resolve; + range.reject = reject; + pendingPruningRanges.push(range); + }); } function clearNextRange() { - // If there's nothing to prune reset state - if (pendingPruningRanges.length === 0 || !buffer) { - logger.debug('Nothing to prune, halt pruning'); - pendingPruningRanges = []; - isPruningInProgress = false; - return; - } + try { + // If there's nothing to prune reset state + if (pendingPruningRanges.length === 0 || !sourceBufferSink) { + logger.debug('Nothing to prune, halt pruning'); + pendingPruningRanges = []; + isPruningInProgress = false; + return; + } - const sourceBuffer = buffer.getBuffer(); - // If there's nothing buffered any pruning is invalid, so reset our state - if (!sourceBuffer || !sourceBuffer.buffered || sourceBuffer.buffered.length === 0) { - logger.debug('SourceBuffer is empty (or does not exist), halt pruning'); - pendingPruningRanges = []; - isPruningInProgress = false; - return; - } + const sourceBuffer = sourceBufferSink.getBuffer(); + // If there's nothing buffered any pruning is invalid, so reset our state + if (!sourceBuffer || !sourceBuffer.buffered || sourceBuffer.buffered.length === 0) { + logger.debug('SourceBuffer is empty (or does not exist), halt pruning'); + pendingPruningRanges = []; + isPruningInProgress = false; + return; + } - const range = pendingPruningRanges.shift(); - logger.debug('Removing buffer from:', range.start, 'to', range.end); - isPruningInProgress = true; + const range = pendingPruningRanges.shift(); + logger.debug(`${type}: Removing buffer from: ${range.start} to ${range.end}`); + isPruningInProgress = true; - // If removing buffer ahead current playback position, update maxAppendedIndex - const currentTime = playbackController.getTime(); - if (currentTime < range.end) { - isBufferingCompleted = false; - maxAppendedIndex = 0; - } + // If removing buffer ahead current playback position, update maxAppendedIndex + const currentTime = playbackController.getTime(); + if (currentTime < range.end) { + setIsBufferingCompleted(false); + } - buffer.remove(range.start, range.end, range.force); + sourceBufferSink.remove(range) + .then((e) => { + _onRemoved(e); + }) + .catch((e) => { + _onRemoved(e); + }); + } catch (e) { + isPruningInProgress = false; + } } - function onRemoved(e) { - if (buffer !== e.buffer) return; - + function _onRemoved(e) { logger.debug('onRemoved buffer from:', e.from, 'to', e.to); - const ranges = buffer.getAllBufferRanges(); - showBufferRanges(ranges); + const ranges = sourceBufferSink.getAllBufferRanges(); + _showBufferRanges(ranges); if (pendingPruningRanges.length === 0) { isPruningInProgress = false; + _updateBufferLevel(); } if (e.unintended) { - logger.warn('Detected unintended removal from:', e.from, 'to', e.to, 'setting index handler time to', e.from); - triggerEvent(Events.SEEK_TARGET, {time: e.from, mediaType: type, streamId: streamInfo.id}); + logger.warn('Detected unintended removal from:', e.from, 'to', e.to, 'setting streamprocessor time to', e.from); + triggerEvent(Events.SEEK_TARGET, { time: e.from }); } if (isPruningInProgress) { clearNextRange(); } else { if (!replacingBuffer) { - updateBufferLevel(); + _updateBufferLevel(); } else { replacingBuffer = false; - if (mediaChunk) { - appendToBuffer(mediaChunk); - } } triggerEvent(Events.BUFFER_CLEARED, { from: e.from, @@ -750,98 +842,87 @@ function BufferController(config) { } function updateBufferTimestampOffset(representationInfo) { - if (!representationInfo || representationInfo.MSETimeOffset === undefined) return; - // Each track can have its own @presentationTimeOffset, so we should set the offset - // if it has changed after switching the quality or updating an mpd - if (buffer && buffer.updateTimestampOffset) { - buffer.updateTimestampOffset(representationInfo.MSETimeOffset); - } + return new Promise((resolve) => { + if (!representationInfo || representationInfo.MSETimeOffset === undefined || !sourceBufferSink || !sourceBufferSink.updateTimestampOffset) { + resolve(); + return; + } + // Each track can have its own @presentationTimeOffset, so we should set the offset + // if it has changed after switching the quality or updating an mpd + sourceBufferSink.updateTimestampOffset(representationInfo.MSETimeOffset) + .then(() => { + resolve(); + }) + .catch(() => { + resolve(); + }); + }); + } function updateAppendWindow() { - if (buffer && !isBufferingCompleted) { - buffer.updateAppendWindow(streamInfo); + if (sourceBufferSink && !isBufferingCompleted) { + return sourceBufferSink.updateAppendWindow(streamInfo); } + return Promise.resolve(); } - function onDataUpdateCompleted(e) { - if (e.error || isBufferingCompleted) return; - updateBufferTimestampOffset(e.currentRepresentation); - } - - function onStreamCompleted(e) { - lastIndex = e.request.index; - checkIfBufferingCompleted(); - } - - function onCurrentTrackChanged(e) { - if (e.newMediaInfo.streamInfo.id !== streamInfo.id || e.newMediaInfo.type !== type) return; - - const ranges = buffer && buffer.getAllBufferRanges(); - if (!ranges) return; - - logger.info('Track change asked'); - if (mediaController.getSwitchMode(type) === Constants.TRACK_SWITCH_MODE_ALWAYS_REPLACE) { - if (ranges && ranges.length > 0 && playbackController.getTimeToStreamEnd() > settings.get().streaming.stallThreshold) { - isBufferingCompleted = false; - lastIndex = Number.POSITIVE_INFINITY; - } + function _onStreamRequestingCompleted(e) { + if (!isNaN(e.segmentIndex)) { + maximumIndex = e.segmentIndex; + _checkIfBufferingCompleted(); } } - function onWallclockTimeUpdated() { + function _onWallclockTimeUpdated() { wallclockTicked++; const secondsElapsed = (wallclockTicked * (settings.get().streaming.wallclockTimeUpdateInterval / 1000)); - if ((secondsElapsed >= settings.get().streaming.bufferPruningInterval)) { + if ((secondsElapsed >= settings.get().streaming.buffer.bufferPruningInterval)) { wallclockTicked = 0; pruneBuffer(); } } - function onPlaybackRateChanged() { + function _onPlaybackRateChanged() { checkIfSufficientBuffer(); } function getBuffer() { - return buffer; - } - - function setBuffer(newBuffer) { - buffer = newBuffer; + return sourceBufferSink; } function getBufferLevel() { return bufferLevel; } - function setMediaSource(value, mediaInfo) { - mediaSource = value; - if (buffer && mediaInfo) { //if we have a prebuffer, we should prepare to discharge it, and make a new sourceBuffer ready - if (typeof buffer.discharge === 'function') { - dischargeBuffer = buffer; - createBuffer(mediaInfo); - } - } - } - function getMediaSource() { return mediaSource; } - function replaceBuffer() { - replacingBuffer = true; - } - function getIsBufferingCompleted() { return isBufferingCompleted; } + function setIsBufferingCompleted(value) { + if (isBufferingCompleted === value) { + return; + } + + isBufferingCompleted = value; + + if (isBufferingCompleted) { + triggerEvent(Events.BUFFERING_COMPLETED); + } else { + maximumIndex = Number.POSITIVE_INFINITY; + } + } + function getIsPruningInProgress() { return isPruningInProgress; } function getTotalBufferedTime() { - const ranges = buffer.getAllBufferRanges(); + const ranges = sourceBufferSink.getAllBufferRanges(); let totalBufferedTime = 0; let ln, i; @@ -855,11 +936,49 @@ function BufferController(config) { return totalBufferedTime; } + /** + * This function returns the maximum time for which the buffer is continuous starting from a target time. + * As soon as there is a gap we return the time before the gap starts + * @param {number} targetTime + */ + function getContinuousBufferTimeForTargetTime(targetTime) { + try { + let adjustedTime = targetTime; + const ranges = sourceBufferSink.getAllBufferRanges(); + + if (!ranges || ranges.length === 0) { + return targetTime; + } + + let i = 0; + + while (adjustedTime === targetTime && i < ranges.length) { + const start = ranges.start(i); + const end = ranges.end(i); + + if (adjustedTime >= start && adjustedTime <= end) { + adjustedTime = end; + } + + i += 1; + } + + return adjustedTime; + + } catch (e) { + + } + } + function hasEnoughSpaceToAppend() { const totalBufferedTime = getTotalBufferedTime(); return (totalBufferedTime < criticalBufferLevel); } + function setSeekTarget(value) { + seekTarget = value; + } + function triggerEvent(eventType, data) { let payload = data || {}; eventBus.trigger(eventType, payload, { streamId: streamInfo.id, mediaType: type }); @@ -869,45 +988,45 @@ function BufferController(config) { criticalBufferLevel = Number.POSITIVE_INFINITY; bufferState = undefined; requiredQuality = AbrController.QUALITY_DEFAULT; - lastIndex = Number.POSITIVE_INFINITY; + maximumIndex = Number.POSITIVE_INFINITY; maxAppendedIndex = 0; appendedBytesInfo = null; isBufferingCompleted = false; isPruningInProgress = false; isQuotaExceeded = false; - seekClearedBufferingCompleted = false; bufferLevel = 0; wallclockTicked = 0; pendingPruningRanges = []; seekTarget = NaN; - if (buffer) { - if (!errored) { - buffer.abort(); + if (sourceBufferSink) { + if (!errored && !keepBuffers) { + sourceBufferSink.abort() + .then(() => { + sourceBufferSink.reset(keepBuffers); + sourceBufferSink = null; + }); + } else { + sourceBufferSink = null; } - buffer.reset(keepBuffers); - buffer = null; } replacingBuffer = false; } function reset(errored, keepBuffers) { - eventBus.off(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, this); - eventBus.off(Events.INIT_FRAGMENT_LOADED, onInitFragmentLoaded, this); - eventBus.off(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, this); - eventBus.off(Events.QUALITY_CHANGE_REQUESTED, onQualityChanged, this); - eventBus.off(Events.STREAM_COMPLETED, onStreamCompleted, this); - eventBus.off(Events.PLAYBACK_PLAYING, onPlaybackPlaying, this); - eventBus.off(Events.PLAYBACK_PROGRESS, onPlaybackProgression, this); - eventBus.off(Events.PLAYBACK_TIME_UPDATED, onPlaybackProgression, this); - eventBus.off(Events.PLAYBACK_RATE_CHANGED, onPlaybackRateChanged, this); - eventBus.off(Events.PLAYBACK_SEEKING, onPlaybackSeeking, this); - eventBus.off(Events.PLAYBACK_SEEKED, onPlaybackSeeked, this); - eventBus.off(Events.PLAYBACK_STALLED, onPlaybackStalled, this); - eventBus.off(Events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, this); - eventBus.off(Events.CURRENT_TRACK_CHANGED, onCurrentTrackChanged, this); - eventBus.off(Events.SOURCEBUFFER_REMOVE_COMPLETED, onRemoved, this); + eventBus.off(Events.INIT_FRAGMENT_LOADED, _onInitFragmentLoaded, this); + eventBus.off(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, this); + eventBus.off(Events.WALLCLOCK_TIME_UPDATED, _onWallclockTimeUpdated, this); + eventBus.off(Events.STREAM_REQUESTING_COMPLETED, _onStreamRequestingCompleted, this); + + eventBus.off(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, _onQualityChanged, this); + eventBus.off(MediaPlayerEvents.PLAYBACK_PLAYING, _onPlaybackPlaying, this); + eventBus.off(MediaPlayerEvents.PLAYBACK_PROGRESS, _onPlaybackProgression, this); + eventBus.off(MediaPlayerEvents.PLAYBACK_TIME_UPDATED, _onPlaybackProgression, this); + eventBus.off(MediaPlayerEvents.PLAYBACK_RATE_CHANGED, _onPlaybackRateChanged, this); + eventBus.off(MediaPlayerEvents.PLAYBACK_STALLED, _onPlaybackStalled, this); + resetInitialSettings(errored, keepBuffers); } @@ -917,21 +1036,27 @@ function BufferController(config) { getStreamId, getType, getBufferControllerType, - getRepresentationInfo, - createBuffer, - dischargePreBuffer, + createBufferSink, getBuffer, - setBuffer, getBufferLevel, getRangeAt, setMediaSource, getMediaSource, - appendInitSegment, - replaceBuffer, + appendInitSegmentFromCache, getIsBufferingCompleted, + setIsBufferingCompleted, getIsPruningInProgress, reset, - updateAppendWindow + prepareForPlaybackSeek, + prepareForReplacementTrackSwitch, + prepareForNonReplacementTrackSwitch, + updateAppendWindow, + getAllRangesWithSafetyFactor, + getContinuousBufferTimeForTargetTime, + clearBuffers, + pruneAllSafely, + updateBufferTimestampOffset, + setSeekTarget }; setup(); diff --git a/src/streaming/controllers/FragmentController.js b/src/streaming/controllers/FragmentController.js index e7baba01a5..7a4f9c45c6 100644 --- a/src/streaming/controllers/FragmentController.js +++ b/src/streaming/controllers/FragmentController.js @@ -35,11 +35,12 @@ import FragmentLoader from '../FragmentLoader'; import RequestModifier from '../utils/RequestModifier'; import EventBus from '../../core/EventBus'; import Events from '../../core/events/Events'; +import MediaPlayerEvents from '../MediaPlayerEvents'; import Errors from '../../core/errors/Errors'; import FactoryMaker from '../../core/FactoryMaker'; import Debug from '../../core/Debug'; -function FragmentController( config ) { +function FragmentController(config) { config = config || {}; const context = this.context; @@ -58,8 +59,8 @@ function FragmentController( config ) { function setup() { logger = debug.getLogger(instance); resetInitialSettings(); - eventBus.on(Events.FRAGMENT_LOADING_COMPLETED, onFragmentLoadingCompleted, instance); - eventBus.on(Events.FRAGMENT_LOADING_PROGRESS, onFragmentLoadingCompleted, instance); + eventBus.on(MediaPlayerEvents.FRAGMENT_LOADING_COMPLETED, onFragmentLoadingCompleted, instance); + eventBus.on(MediaPlayerEvents.FRAGMENT_LOADING_PROGRESS, onFragmentLoadingCompleted, instance); } function getStreamId() { @@ -84,7 +85,8 @@ function FragmentController( config ) { events: Events, errors: Errors, dashConstants: config.dashConstants, - urlUtils: config.urlUtils + urlUtils: config.urlUtils, + streamId: getStreamId() }), debug: debug, eventBus: eventBus, @@ -105,8 +107,8 @@ function FragmentController( config ) { } function reset() { - eventBus.off(Events.FRAGMENT_LOADING_COMPLETED, onFragmentLoadingCompleted, this); - eventBus.off(Events.FRAGMENT_LOADING_PROGRESS, onFragmentLoadingCompleted, this); + eventBus.off(MediaPlayerEvents.FRAGMENT_LOADING_COMPLETED, onFragmentLoadingCompleted, this); + eventBus.off(MediaPlayerEvents.FRAGMENT_LOADING_PROGRESS, onFragmentLoadingCompleted, this); resetInitialSettings(); } diff --git a/src/streaming/controllers/GapController.js b/src/streaming/controllers/GapController.js index ee05f0fd4e..b616287b38 100644 --- a/src/streaming/controllers/GapController.js +++ b/src/streaming/controllers/GapController.js @@ -36,6 +36,7 @@ import EventBus from '../../core/EventBus'; const GAP_HANDLER_INTERVAL = 100; const THRESHOLD_TO_STALLS = 30; const GAP_THRESHOLD = 0.1; +const GAP_JUMP_WAITING_TIME_OFFSET = 0.1; function GapController() { const context = this.context; @@ -53,6 +54,7 @@ function GapController() { timelineConverter, adapter, jumpTimeoutHandler, + trackSwitchByMediaType, logger; function initialize() { @@ -76,6 +78,7 @@ function GapController() { lastGapJumpPosition = NaN; wallclockTicked = 0; jumpTimeoutHandler = null; + trackSwitchByMediaType = {}; } function setConfig(config) { @@ -104,20 +107,18 @@ function GapController() { function registerEvents() { eventBus.on(Events.WALLCLOCK_TIME_UPDATED, _onWallclockTimeUpdated, this); + eventBus.on(Events.INITIAL_STREAM_SWITCH, _onInitialStreamSwitch, this); eventBus.on(Events.PLAYBACK_SEEKING, _onPlaybackSeeking, this); - eventBus.on(Events.BYTES_APPENDED_END_FRAGMENT, onBytesAppended, instance); + eventBus.on(Events.TRACK_REPLACEMENT_STARTED, _onTrackReplacementStarted, instance); + eventBus.on(Events.TRACK_CHANGE_RENDERED, _onTrackChangeRendered, instance); } function unregisterEvents() { eventBus.off(Events.WALLCLOCK_TIME_UPDATED, _onWallclockTimeUpdated, this); + eventBus.off(Events.INITIAL_STREAM_SWITCH, _onInitialStreamSwitch, this); eventBus.off(Events.PLAYBACK_SEEKING, _onPlaybackSeeking, this); - eventBus.off(Events.BYTES_APPENDED_END_FRAGMENT, onBytesAppended, instance); - } - - function onBytesAppended() { - if (!gapHandlerInterval) { - startGapHandler(); - } + eventBus.off(Events.TRACK_REPLACEMENT_STARTED, _onTrackReplacementStarted, instance); + eventBus.on(Events.BYTES_APPENDED_END_FRAGMENT, _onTrackChangeRendered, instance); } function _onPlaybackSeeking() { @@ -127,6 +128,40 @@ function GapController() { } } + /** + * If the track was changed in the current active period and the player might aggressively replace segments the buffer will be empty for a short period of time. Avoid gap jumping at that time. + * We wait until the next media fragment of the target type has been appended before activating again + * @param {object} e + * @private + */ + function _onTrackReplacementStarted(e) { + try { + if (e.streamId !== streamController.getActiveStreamInfo().id || !e.mediaType) { + return; + } + + if (e.streamId === streamController.getActiveStreamInfo().id) { + trackSwitchByMediaType[e.mediaType] = true; + } + } catch (e) { + logger.error(e); + } + } + + function _onTrackChangeRendered(e) { + if (!e || !e.mediaType) { + return; + } + + trackSwitchByMediaType[e.mediaType] = false; + } + + function _onInitialStreamSwitch() { + if (!gapHandlerInterval) { + startGapHandler(); + } + } + function _onWallclockTimeUpdated(/*e*/) { if (!_shouldCheckForGaps()) { return; @@ -146,9 +181,12 @@ function GapController() { } function _shouldCheckForGaps() { - return settings.get().streaming.jumpGaps && streamController.getActiveStreamProcessors().length > 0 && - (!playbackController.isSeeking() || streamController.hasStreamFinishedBuffering(streamController.getActiveStream())) && !playbackController.isPaused() && !streamController.getIsStreamSwitchInProgress() && - !streamController.getHasMediaOrIntialisationError(); + const trackSwitchInProgress = Object.keys(trackSwitchByMediaType).some((key) => { + return trackSwitchByMediaType[key]; + }); + + return !trackSwitchInProgress && settings.get().streaming.gaps.jumpGaps && streamController.getActiveStreamProcessors().length > 0 && !playbackController.isSeeking() && !playbackController.isPaused() && !streamController.getIsStreamSwitchInProgress() && + !streamController.getHasMediaOrInitialisationError(); } function getNextRangeIndex(ranges, currentTime) { @@ -201,8 +239,8 @@ function GapController() { } function jumpGap(currentTime, playbackStalled = false) { - const smallGapLimit = settings.get().streaming.smallGapLimit; - const jumpLargeGaps = settings.get().streaming.jumpLargeGaps; + const smallGapLimit = settings.get().streaming.gaps.smallGapLimit; + const jumpLargeGaps = settings.get().streaming.gaps.jumpLargeGaps; const ranges = videoModel.getBufferRange(); let nextRangeIndex; let seekToPosition = NaN; @@ -231,23 +269,19 @@ function GapController() { const timeUntilGapEnd = seekToPosition - currentTime; if (jumpToStreamEnd) { + const nextStream = streamController.getStreamForTime(seekToPosition); + const internalSeek = nextStream && !!nextStream.getPreloaded(); + logger.warn(`Jumping to end of stream because of gap from ${currentTime} to ${seekToPosition}. Gap duration: ${timeUntilGapEnd}`); - eventBus.trigger(Events.GAP_CAUSED_SEEK_TO_PERIOD_END, { - seekTime: seekToPosition, - duration: timeUntilGapEnd - }); + playbackController.seek(seekToPosition, true, internalSeek); } else { const isDynamic = playbackController.getIsDynamic(); const start = nextRangeIndex > 0 ? ranges.end(nextRangeIndex - 1) : currentTime; - const timeToWait = !isDynamic ? 0 : timeUntilGapEnd * 1000; + const timeToWait = !isDynamic ? 0 : Math.max(0, timeUntilGapEnd - GAP_JUMP_WAITING_TIME_OFFSET) * 1000; jumpTimeoutHandler = window.setTimeout(() => { playbackController.seek(seekToPosition, true, true); - logger.warn(`Jumping gap starting at ${start} and ending at ${seekToPosition}. Jumping by: ${timeUntilGapEnd}`); - eventBus.trigger(Events.GAP_CAUSED_INTERNAL_SEEK, { - seekTime: seekToPosition, - duration: timeUntilGapEnd - }); + logger.warn(`Jumping gap occuring in period ${streamController.getActiveStream().getStreamId()} starting at ${start} and ending at ${seekToPosition}. Jumping by: ${timeUntilGapEnd}`); jumpTimeoutHandler = null; }, timeToWait); } diff --git a/src/streaming/controllers/MediaController.js b/src/streaming/controllers/MediaController.js index c8fe4d7a07..2ecafc4f49 100644 --- a/src/streaming/controllers/MediaController.js +++ b/src/streaming/controllers/MediaController.js @@ -44,20 +44,9 @@ function MediaController() { tracks, settings, initialSettings, + lastSelectedTracks, domStorage; - const validTrackSwitchModes = [ - Constants.TRACK_SWITCH_MODE_ALWAYS_REPLACE, - Constants.TRACK_SWITCH_MODE_NEVER_REPLACE - ]; - - const validTrackSelectionModes = [ - Constants.TRACK_SELECTION_MODE_HIGHEST_BITRATE, - Constants.TRACK_SELECTION_MODE_FIRST_TRACK, - Constants.TRACK_SELECTION_MODE_HIGHEST_EFFICIENCY, - Constants.TRACK_SELECTION_MODE_WIDEST_RANGE - ]; - function setup() { logger = Debug(context).getInstance().getLogger(instance); reset(); @@ -68,9 +57,9 @@ function MediaController() { * @param {StreamInfo} streamInfo * @memberof MediaController# */ - function checkInitialMediaSettingsForType(type, streamInfo) { - let settings = getInitialSettings(type); - const tracksForType = getTracksFor(type, streamInfo); + function setInitialMediaSettingsForType(type, streamInfo) { + let settings = lastSelectedTracks[type] || getInitialSettings(type); + const tracksForType = getTracksFor(type, streamInfo.id); const tracks = []; if (!settings) { @@ -82,17 +71,17 @@ function MediaController() { if (settings) { tracksForType.forEach(function (track) { - if (matchSettings(settings, track)) { + if (matchSettings(settings, track, !!lastSelectedTracks[type])) { tracks.push(track); } }); } if (tracks.length === 0) { - setTrack(this.selectInitialTrack(type, tracksForType), true); + setTrack(selectInitialTrack(type, tracksForType), true); } else { if (tracks.length > 1) { - setTrack(this.selectInitialTrack(type, tracks)); + setTrack(selectInitialTrack(type, tracks, !!lastSelectedTracks[type])); } else { setTrack(tracks[0]); } @@ -107,7 +96,7 @@ function MediaController() { if (!track) return; const mediaType = track.type; - if (!isMultiTrackSupportedByType(mediaType)) return; + if (!_isMultiTrackSupportedByType(mediaType)) return; let streamId = track.streamInfo.id; if (!tracks[streamId]) { @@ -125,36 +114,34 @@ function MediaController() { mediaTracks.push(track); let initSettings = getInitialSettings(mediaType); - if (initSettings && (matchSettings(initSettings, track)) && !getCurrentTrackFor(mediaType, track.streamInfo)) { + if (initSettings && (matchSettings(initSettings, track)) && !getCurrentTrackFor(mediaType, track.streamInfo.id)) { setTrack(track); } } /** * @param {string} type - * @param {StreamInfo} streamInfo + * @param {string} streamId * @returns {Array} * @memberof MediaController# */ - function getTracksFor(type, streamInfo) { - if (!type || !streamInfo) return []; + function getTracksFor(type, streamId) { + if (!type) return []; - const id = streamInfo.id; - - if (!tracks[id] || !tracks[id][type]) return []; + if (!tracks[streamId] || !tracks[streamId][type]) return []; - return tracks[id][type].list; + return tracks[streamId][type].list; } /** * @param {string} type - * @param {StreamInfo} streamInfo + * @param {string} streamId * @returns {Object|null} * @memberof MediaController# */ - function getCurrentTrackFor(type, streamInfo) { - if (!type || !streamInfo || (streamInfo && !tracks[streamInfo.id])) return null; - return tracks[streamInfo.id][type].current; + function getCurrentTrackFor(type, streamId) { + if (!type || !tracks[streamId]) return null; + return tracks[streamId][type].current; } /** @@ -177,24 +164,24 @@ function MediaController() { * @param {boolean} noSettingsSave specify if settings must be not be saved * @memberof MediaController# */ - function setTrack(track, noSettingsSave) { + function setTrack(track, noSettingsSave = false) { if (!track || !track.streamInfo) return; const type = track.type; const streamInfo = track.streamInfo; const id = streamInfo.id; - const current = getCurrentTrackFor(type, streamInfo); + const current = getCurrentTrackFor(type, id); if (!tracks[id] || !tracks[id][type] || isTracksEqual(track, current)) return; tracks[id][type].current = track; - if (tracks[id][type].current && !(noSettingsSave && type === Constants.FRAGMENTED_TEXT)) { + if (tracks[id][type].current) { eventBus.trigger(Events.CURRENT_TRACK_CHANGED, { oldMediaInfo: current, newMediaInfo: track, - switchMode: getSwitchMode(type) - }); + switchMode: settings.get().streaming.trackSwitchMode[type] + }, { streamId: id }); } if (!noSettingsSave) { @@ -216,6 +203,7 @@ function MediaController() { settings.audioChannelConfiguration = settings.audioChannelConfiguration[0]; } + lastSelectedTracks[type] = settings; domStorage.setSavedMediaSettings(type, settings); } } @@ -249,75 +237,12 @@ function MediaController() { domStorage.setSavedMediaSettings(Constants.FRAGMENTED_TEXT, null); } - /** - * @param {string} type - * @param {string} mode - * @memberof MediaController# - * @deprecated Please use updateSettings({streaming: { trackSwitchMode: mode } }) instead - */ - function setSwitchMode(type, mode) { - logger.warn('deprecated: Please use updateSettings({streaming: { trackSwitchMode: mode } }) instead'); - const isModeSupported = (validTrackSwitchModes.indexOf(mode) !== -1); - - if (!isModeSupported) { - logger.warn('Track switch mode is not supported: ' + mode); - return; - } - - let switchMode = {}; - switchMode[type] = mode; - - settings.update({ - streaming: { - trackSwitchMode: switchMode - } - }); - } - - /** - * @param {string} type - * @returns {string} mode - * @memberof MediaController# - */ - function getSwitchMode(type) { - return settings.get().streaming.trackSwitchMode[type]; - } - - /** - * @param {string} mode - * @memberof MediaController# - * @deprecated Please use updateSettings({streaming: { selectionModeForInitialTrack: mode } }) instead - */ - function setSelectionModeForInitialTrack(mode) { - logger.warn('deprecated: Please use updateSettings({streaming: { selectionModeForInitialTrack: mode } }) instead'); - const isModeSupported = (validTrackSelectionModes.indexOf(mode) !== -1); - - if (!isModeSupported) { - logger.warn('Track selection mode is not supported: ' + mode); - return; - } - - settings.update({ - streaming: { - selectionModeForInitialTrack: mode - } - }); - } - - /** - * @returns {string} mode - * @memberof MediaController# - */ - function getSelectionModeForInitialTrack() { - return settings.get().streaming.selectionModeForInitialTrack; - } - /** * @param {string} type * @returns {boolean} * @memberof MediaController# */ - function isMultiTrackSupportedByType(type) { + function _isMultiTrackSupportedByType(type) { return (type === Constants.AUDIO || type === Constants.VIDEO || type === Constants.TEXT || type === Constants.FRAGMENTED_TEXT || type === Constants.IMAGE); } @@ -340,11 +265,12 @@ function MediaController() { const sameId = t1.id === t2.id; const sameViewpoint = t1.viewpoint === t2.viewpoint; const sameLang = t1.lang === t2.lang; + const sameCodec = t1.codec === t2.codec; const sameRoles = t1.roles.toString() === t2.roles.toString(); const sameAccessibility = t1.accessibility.toString() === t2.accessibility.toString(); const sameAudioChannelConfiguration = t1.audioChannelConfiguration.toString() === t2.audioChannelConfiguration.toString(); - return (sameId && sameViewpoint && sameLang && sameRoles && sameAccessibility && sameAudioChannelConfiguration); + return (sameId && sameCodec && sameViewpoint && sameLang && sameRoles && sameAccessibility && sameAudioChannelConfiguration); } function setConfig(config) { @@ -364,6 +290,7 @@ function MediaController() { */ function reset() { tracks = {}; + lastSelectedTracks = {}; resetInitialSettings(); } @@ -376,12 +303,12 @@ function MediaController() { audioChannelConfiguration: mediaInfo.audioChannelConfiguration }; let notEmpty = settings.lang || settings.viewpoint || (settings.role && settings.role.length > 0) || - (settings.accessibility && settings.accessibility.length > 0) || (settings.audioChannelConfiguration && settings.audioChannelConfiguration.length > 0); + (settings.accessibility && settings.accessibility.length > 0) || (settings.audioChannelConfiguration && settings.audioChannelConfiguration.length > 0); return notEmpty ? settings : null; } - function matchSettings(settings, track) { + function matchSettings(settings, track, isTrackActive = false) { const matchLang = !settings.lang || (track.lang.match(settings.lang)); const matchIndex = (settings.index === undefined) || (settings.index === null) || (track.index === settings.index); const matchViewPoint = !settings.viewpoint || (settings.viewpoint === track.viewpoint); @@ -395,7 +322,8 @@ function MediaController() { return item === settings.audioChannelConfiguration; })[0]; - return (matchLang && matchIndex && matchViewPoint && matchRole && matchAccessibility && matchAudioChannelConfiguration); + + return (matchLang && matchIndex && matchViewPoint && (matchRole || (track.type === Constants.AUDIO && isTrackActive)) && matchAccessibility && matchAudioChannelConfiguration); } function resetInitialSettings() { @@ -406,13 +334,15 @@ function MediaController() { }; } - function getTracksWithHighestBitrate (trackArr) { + function getTracksWithHighestBitrate(trackArr) { let max = 0; let result = []; let tmp; trackArr.forEach(function (track) { - tmp = Math.max.apply(Math, track.bitrateList.map(function (obj) { return obj.bandwidth; })); + tmp = Math.max.apply(Math, track.bitrateList.map(function (obj) { + return obj.bandwidth; + })); if (tmp > max) { max = tmp; @@ -425,7 +355,7 @@ function MediaController() { return result; } - function getTracksWithHighestEfficiency (trackArr) { + function getTracksWithHighestEfficiency(trackArr) { let min = Infinity; let result = []; let tmp; @@ -449,7 +379,7 @@ function MediaController() { return result; } - function getTracksWithWidestRange (trackArr) { + function getTracksWithWidestRange(trackArr) { let max = 0; let result = []; let tmp; @@ -471,7 +401,7 @@ function MediaController() { function selectInitialTrack(type, tracks) { if (type === Constants.FRAGMENTED_TEXT) return tracks[0]; - let mode = getSelectionModeForInitialTrack(); + let mode = settings.get().streaming.selectionModeForInitialTrack; let tmpArr = []; switch (mode) { @@ -538,28 +468,23 @@ function MediaController() { } instance = { - checkInitialMediaSettingsForType: checkInitialMediaSettingsForType, - addTrack: addTrack, - getTracksFor: getTracksFor, - getCurrentTrackFor: getCurrentTrackFor, - isCurrentTrack: isCurrentTrack, - setTrack: setTrack, - setInitialSettings: setInitialSettings, - getInitialSettings: getInitialSettings, - setSwitchMode: setSwitchMode, - getSwitchMode: getSwitchMode, - selectInitialTrack: selectInitialTrack, - getTracksWithHighestBitrate: getTracksWithHighestBitrate, - getTracksWithHighestEfficiency: getTracksWithHighestEfficiency, - getTracksWithWidestRange: getTracksWithWidestRange, - setSelectionModeForInitialTrack: setSelectionModeForInitialTrack, - getSelectionModeForInitialTrack: getSelectionModeForInitialTrack, - isMultiTrackSupportedByType: isMultiTrackSupportedByType, - isTracksEqual: isTracksEqual, - matchSettings: matchSettings, - saveTextSettingsDisabled: saveTextSettingsDisabled, - setConfig: setConfig, - reset: reset + setInitialMediaSettingsForType, + addTrack, + getTracksFor, + getCurrentTrackFor, + isCurrentTrack, + setTrack, + selectInitialTrack, + setInitialSettings, + getInitialSettings, + getTracksWithHighestBitrate, + getTracksWithHighestEfficiency, + getTracksWithWidestRange, + isTracksEqual, + matchSettings, + saveTextSettingsDisabled, + setConfig, + reset }; setup(); diff --git a/src/streaming/controllers/MediaSourceController.js b/src/streaming/controllers/MediaSourceController.js index d86de0643f..0c6e40cf3f 100644 --- a/src/streaming/controllers/MediaSourceController.js +++ b/src/streaming/controllers/MediaSourceController.js @@ -34,6 +34,7 @@ import Debug from '../../core/Debug'; function MediaSourceController() { let instance, + mediaSource, logger; const context = this.context; @@ -48,17 +49,17 @@ function MediaSourceController() { let hasMediaSource = ('MediaSource' in window); if (hasMediaSource) { - return new MediaSource(); + mediaSource = new MediaSource(); } else if (hasWebKit) { - return new WebKitMediaSource(); + mediaSource = new WebKitMediaSource(); } - return null; + return mediaSource; } - function attachMediaSource(source, videoModel) { + function attachMediaSource(videoModel) { - let objectURL = window.URL.createObjectURL(source); + let objectURL = window.URL.createObjectURL(mediaSource); videoModel.setSource(objectURL); @@ -69,24 +70,24 @@ function MediaSourceController() { videoModel.setSource(null); } - function setDuration(source, value) { - if (!source || source.readyState !== 'open') return; + function setDuration(value) { + if (!mediaSource || mediaSource.readyState !== 'open') return; if (value === null && isNaN(value)) return; - if (source.duration === value) return; + if (mediaSource.duration === value) return; - if (!isBufferUpdating(source)) { + if (!isBufferUpdating(mediaSource)) { logger.info('Set MediaSource duration:' + value); - source.duration = value; + mediaSource.duration = value; } else { - setTimeout(setDuration.bind(null, source, value), 50); + setTimeout(setDuration.bind(null, mediaSource, value), 50); } } - function setSeekable(source, start, end) { - if (source && typeof source.setLiveSeekableRange === 'function' && typeof source.clearLiveSeekableRange === 'function' && - source.readyState === 'open' && start >= 0 && start < end) { - source.clearLiveSeekableRange(); - source.setLiveSeekableRange(start, end); + function setSeekable(start, end) { + if (mediaSource && typeof mediaSource.setLiveSeekableRange === 'function' && typeof mediaSource.clearLiveSeekableRange === 'function' && + mediaSource.readyState === 'open' && start >= 0 && start < end) { + mediaSource.clearLiveSeekableRange(); + mediaSource.setLiveSeekableRange(start, end); } } @@ -120,12 +121,12 @@ function MediaSourceController() { } instance = { - createMediaSource: createMediaSource, - attachMediaSource: attachMediaSource, - detachMediaSource: detachMediaSource, - setDuration: setDuration, - setSeekable: setSeekable, - signalEndOfStream: signalEndOfStream + createMediaSource, + attachMediaSource, + detachMediaSource, + setDuration, + setSeekable, + signalEndOfStream }; setup(); diff --git a/src/streaming/controllers/PlaybackController.js b/src/streaming/controllers/PlaybackController.js index 3a501056d0..7114d5e570 100644 --- a/src/streaming/controllers/PlaybackController.js +++ b/src/streaming/controllers/PlaybackController.js @@ -49,8 +49,6 @@ function PlaybackController() { adapter, videoModel, timelineConverter, - streamSwitch, - streamSeekTime, wallclockTimeIntervalId, liveDelay, streamInfo, @@ -64,7 +62,6 @@ function PlaybackController() { isLowLatencySeekingInProgress, playbackStalled, minPlaybackRateChange, - uriFragmentModel, settings; function setup() { @@ -73,23 +70,37 @@ function PlaybackController() { reset(); } - function initialize(sInfo, periodSwitch, seekTime) { + /** + * Initializes the PlaybackController. This function is called whenever the stream is switched. + * @param {object} sInfo + * @param {boolean} periodSwitch + */ + function initialize(sInfo, periodSwitch) { streamInfo = sInfo; + + if (periodSwitch !== true) { + _initializeForFirstStream(); + } else { + _initializeAfterStreamSwitch(); + } + } + + /** + * Initializes the PlaybackController when the first stream is to be played. + * @private + */ + function _initializeForFirstStream() { addAllListeners(); isDynamic = streamInfo.manifestInfo.isDynamic; isLowLatencySeekingInProgress = false; playbackStalled = false; - streamSwitch = periodSwitch === true; - streamSeekTime = seekTime; internalSeek = false; - const ua = typeof navigator !== 'undefined' ? navigator.userAgent.toLowerCase() : ''; - // Detect safari browser (special behavior for low latency streams) + const ua = typeof navigator !== 'undefined' ? navigator.userAgent.toLowerCase() : ''; const isSafari = /safari/.test(ua) && !/chrome/.test(ua); minPlaybackRateChange = isSafari ? 0.25 : 0.02; - eventBus.on(Events.STREAM_INITIALIZED, onStreamInitialized, this); eventBus.on(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, this); eventBus.on(Events.LOADING_PROGRESS, onFragmentLoadProgress, this); eventBus.on(Events.BUFFER_LEVEL_STATE_CHANGED, onBufferLevelStateChanged, this); @@ -104,56 +115,21 @@ function PlaybackController() { } } - function onStreamInitialized(e) { - // Seamless period switch - if (streamSwitch && isNaN(streamSeekTime)) return; - - // Seek new stream in priority order: - // - at seek time (streamSeekTime) when switching period - // - at start time provided in URI parameters - // - at stream/period start time (for static streams) or live start time (for dynamic streams) - let startTime = streamSeekTime; - if (isNaN(startTime)) { - if (isDynamic) { - // For dynamic stream, start by default at (live edge - live delay) - startTime = e.liveStartTime; - // If start time in URI, take min value between live edge time and time from URI (capped by DVR window range) - const dvrInfo = dashMetrics.getCurrentDVRInfo(); - const dvrWindow = dvrInfo ? dvrInfo.range : null; - if (dvrWindow) { - // #t shall be relative to period start - const startTimeFromUri = getStartTimeFromUriParameters(true); - if (!isNaN(startTimeFromUri)) { - logger.info('Start time from URI parameters: ' + startTimeFromUri); - startTime = Math.max(Math.min(startTime, startTimeFromUri), dvrWindow.start); - } - } - } else { - // For static stream, start by default at period start - startTime = streamInfo.start; - // If start time in URI, take max value between period start and time from URI (if in period range) - const startTimeFromUri = getStartTimeFromUriParameters(false); - if (!isNaN(startTimeFromUri) && startTimeFromUri < (startTime + streamInfo.duration)) { - logger.info('Start time from URI parameters: ' + startTimeFromUri); - startTime = Math.max(startTime, startTimeFromUri); - } - } - } + /** + * Initializes the PlaybackController after the stream is switched. This will only happen with multiperiod MPDs. + * @private + */ + function _initializeAfterStreamSwitch() { - if (!isNaN(startTime) && startTime !== videoModel.getTime()) { - // Trigger PLAYBACK_SEEKING event for controllers - eventBus.trigger(Events.PLAYBACK_SEEKING, { seekTime: startTime }); - // Seek video model - seek(startTime, false, true); - } } - function getTimeToStreamEnd() { - return parseFloat((getStreamEndTime() - getTime()).toFixed(5)); + function getTimeToStreamEnd(sInfo = null) { + return parseFloat((getStreamEndTime(sInfo) - getTime()).toFixed(5)); } - function getStreamEndTime() { - return streamInfo.start + streamInfo.duration; + function getStreamEndTime(sInfo) { + const refInfo = sInfo ? sInfo : streamInfo; + return refInfo.start + refInfo.duration; } function play() { @@ -257,13 +233,11 @@ function PlaybackController() { let suggestedPresentationDelay = adapter.getSuggestedPresentationDelay(); - if (settings.get().streaming.lowLatencyEnabled) { - delay = 0; - } else if (mediaPlayerModel.getLiveDelay()) { + if (mediaPlayerModel.getLiveDelay()) { delay = mediaPlayerModel.getLiveDelay(); // If set by user, this value takes precedence - } else if (settings.get().streaming.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.liveDelayFragmentCount) && !isNaN(adjustedFragmentDuration)) { - delay = adjustedFragmentDuration * settings.get().streaming.liveDelayFragmentCount; - } else if (settings.get().streaming.useSuggestedPresentationDelay === true && suggestedPresentationDelay !== null && !isNaN(suggestedPresentationDelay) && suggestedPresentationDelay > 0) { + } else if (settings.get().streaming.delay.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.delay.liveDelayFragmentCount) && !isNaN(adjustedFragmentDuration)) { + delay = adjustedFragmentDuration * settings.get().streaming.delay.liveDelayFragmentCount; + } else if (settings.get().streaming.delay.useSuggestedPresentationDelay === true && suggestedPresentationDelay !== null && !isNaN(suggestedPresentationDelay) && suggestedPresentationDelay > 0) { delay = suggestedPresentationDelay; } else if (!isNaN(adjustedFragmentDuration)) { delay = adjustedFragmentDuration * FRAGMENT_DURATION_FACTOR; @@ -290,16 +264,12 @@ function PlaybackController() { return ret; } - function getLiveDelay() { - return liveDelay; + function getAvailabilityStartTime() { + return availabilityStartTime; } - function setLiveDelay(value, useMaxValue = false) { - if (useMaxValue && value < liveDelay) { - return; - } - - liveDelay = value; + function getLiveDelay() { + return liveDelay; } function getCurrentLiveLatency() { @@ -316,14 +286,12 @@ function PlaybackController() { } function reset() { + pause(); playOnceInitialized = false; - streamSwitch = false; - streamSeekTime = NaN; liveDelay = 0; availabilityStartTime = 0; seekTarget = NaN; if (videoModel) { - eventBus.off(Events.STREAM_INITIALIZED, onStreamInitialized, this); eventBus.off(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, this); eventBus.off(Events.BUFFER_LEVEL_STATE_CHANGED, onBufferLevelStateChanged, this); eventBus.off(Events.LOADING_PROGRESS, onFragmentLoadProgress, this); @@ -361,31 +329,11 @@ function PlaybackController() { if (config.timelineConverter) { timelineConverter = config.timelineConverter; } - if (config.uriFragmentModel) { - uriFragmentModel = config.uriFragmentModel; - } if (config.settings) { settings = config.settings; } } - function getStartTimeFromUriParameters(isDynamic) { - const fragData = uriFragmentModel.getURIFragmentData(); - if (!fragData || !fragData.t) { - return NaN; - } - const refStream = streamController.getStreams()[0]; - const refStreamStartTime = refStream.getStreamInfo().start; - // Consider only start time of MediaRange - // TODO: consider end time of MediaRange to stop playback at provided end time - fragData.t = fragData.t.split(',')[0]; - // "t=