diff --git a/demo/common/assets.js b/demo/common/assets.js index 3ac402f91a..cf7b289720 100644 --- a/demo/common/assets.js +++ b/demo/common/assets.js @@ -131,6 +131,9 @@ shakaAssets.Feature = { DASH: shakaDemo.MessageIds.DASH, // Set if the asset is an HLS manifest. HLS: shakaDemo.MessageIds.HLS, + + // Set if the asset has at least one image stream. + THUMBNAILS: shakaDemo.MessageIds.THUMBNAILS, }; @@ -676,6 +679,17 @@ shakaAssets.testAssets = [ .addFeature(shakaAssets.Feature.MP4) .addFeature(shakaAssets.Feature.SUBTITLES) .addFeature(shakaAssets.Feature.OFFLINE), + new ShakaDemoAssetInfo( + /* name= */ 'Tears of Steel (Thumbnails)', + /* iconUri= */ 'https://storage.googleapis.com/shaka-asset-icons/tears_of_steel.png', + /* manifestUri= */ 'https://demo.unified-streaming.com/video/tears-of-steel/tears-of-steel-tiled-thumbnails-timeline.ism/.mpd', + /* source= */ shakaAssets.Source.UNIFIED_STREAMING) + .addFeature(shakaAssets.Feature.DASH) + .addFeature(shakaAssets.Feature.HIGH_DEFINITION) + .addFeature(shakaAssets.Feature.MP4) + .addFeature(shakaAssets.Feature.SUBTITLES) + .addFeature(shakaAssets.Feature.OFFLINE) + .addFeature(shakaAssets.Feature.THUMBNAILS), // End Unified Streaming assets }}} // DASH-IF assets {{{ @@ -776,6 +790,52 @@ shakaAssets.testAssets = [ .addFeature(shakaAssets.Feature.DASH) .addFeature(shakaAssets.Feature.LIVE) .addFeature(shakaAssets.Feature.MP4), + new ShakaDemoAssetInfo( + /* name= */ 'DASH-IF THUMBNAILS - Single adaptation set, 7 tiles at 10x1, each thumb 320x180', + /* iconUri= */ 'https://storage.googleapis.com/shaka-asset-icons/dash_if_test_pattern.png', + /* manifestUri= */ 'https://dash.akamaized.net/akamai/bbb_30fps/bbb_with_tiled_thumbnails.mpd', + /* source= */ shakaAssets.Source.DASH_IF) + .addFeature(shakaAssets.Feature.DASH) + .addFeature(shakaAssets.Feature.ULTRA_HIGH_DEFINITION) + .addFeature(shakaAssets.Feature.MP4) + .addFeature(shakaAssets.Feature.THUMBNAILS), + new ShakaDemoAssetInfo( + /* name= */ 'DASH-IF THUMBNAILS - Single adaptation set, 4 tiles at 10x1, each thumb 205x115', + /* iconUri= */ 'https://storage.googleapis.com/shaka-asset-icons/dash_if_test_pattern.png', + /* manifestUri= */ 'https://dash.akamaized.net/akamai/bbb_30fps/bbb_with_4_tiles_thumbnails.mpd', + /* source= */ shakaAssets.Source.DASH_IF) + .addFeature(shakaAssets.Feature.DASH) + .addFeature(shakaAssets.Feature.ULTRA_HIGH_DEFINITION) + .addFeature(shakaAssets.Feature.MP4) + .addFeature(shakaAssets.Feature.THUMBNAILS), + new ShakaDemoAssetInfo( + /* name= */ 'DASH-IF THUMBNAILS - Single adaptation set, 1 tile at 10x20, each thumb 102x58', + /* iconUri= */ 'https://storage.googleapis.com/shaka-asset-icons/dash_if_test_pattern.png', + /* manifestUri= */ 'https://dash.akamaized.net/akamai/bbb_30fps/bbb_with_tiled_thumbnails_2.mpd', + /* source= */ shakaAssets.Source.DASH_IF) + .addFeature(shakaAssets.Feature.DASH) + .addFeature(shakaAssets.Feature.ULTRA_HIGH_DEFINITION) + .addFeature(shakaAssets.Feature.MP4) + .addFeature(shakaAssets.Feature.THUMBNAILS), + new ShakaDemoAssetInfo( + /* name= */ 'DASH-IF THUMBNAILS - Two adaptation sets with different thumb resolutions', + /* iconUri= */ 'https://storage.googleapis.com/shaka-asset-icons/dash_if_test_pattern.png', + /* manifestUri= */ 'https://dash.akamaized.net/akamai/bbb_30fps/bbb_with_multiple_tiled_thumbnails.mpd', + /* source= */ shakaAssets.Source.DASH_IF) + .addFeature(shakaAssets.Feature.DASH) + .addFeature(shakaAssets.Feature.ULTRA_HIGH_DEFINITION) + .addFeature(shakaAssets.Feature.MP4) + .addFeature(shakaAssets.Feature.THUMBNAILS), + new ShakaDemoAssetInfo( + /* name= */ 'DASH-IF THUMBNAILS - Live stream, Single adaptation set, 1x1 tiles (livesim)', + /* iconUri= */ 'https://storage.googleapis.com/shaka-asset-icons/dash_if_test_pattern.png', + /* manifestUri= */ 'https://livesim.dashif.org/livesim/testpic_2s/Manifest_thumbs.mpd', + /* source= */ shakaAssets.Source.DASH_IF) + .addFeature(shakaAssets.Feature.DASH) + .addFeature(shakaAssets.Feature.ULTRA_HIGH_DEFINITION) + .addFeature(shakaAssets.Feature.MP4) + .addFeature(shakaAssets.Feature.LIVE) + .addFeature(shakaAssets.Feature.THUMBNAILS), // End DASH-IF Assets }}} // bitcodin assets {{{ diff --git a/demo/common/message_ids.js b/demo/common/message_ids.js index e388e4d97f..be0e68e1cc 100644 --- a/demo/common/message_ids.js +++ b/demo/common/message_ids.js @@ -24,6 +24,7 @@ shakaDemo.MessageIds = { STORED: 'DEMO_STORED', SUBTITLES: 'DEMO_SUBTITLES', SURROUND: 'DEMO_SURROUND', + THUMBNAILS: 'DEMO_THUMBNAILS', TRICK_MODE: 'DEMO_TRICK_MODE', ULTRA_HIGH_DEFINITION: 'DEMO_ULTRA_HIGH_DEFINITION', VOD: 'DEMO_VOD', @@ -155,6 +156,7 @@ shakaDemo.MessageIds = { DELAY_LICENSE: 'DEMO_DELAY_LICENSE', DISABLE_AUDIO: 'DEMO_DISABLE_AUDIO', DISABLE_TEXT: 'DEMO_DISABLE_TEXT', + DISABLE_THUMBNAILS: 'DEMO_DISABLE_THUMBNAILS', DISABLE_VIDEO: 'DEMO_DISABLE_VIDEO', DRM_RETRY_SECTION_HEADER: 'DEMO_DRM_RETRY_SECTION_HEADER', DRM_SECTION_HEADER: 'DEMO_DRM_SECTION_HEADER', diff --git a/demo/config.js b/demo/config.js index c18f171e08..d2697c2488 100644 --- a/demo/config.js +++ b/demo/config.js @@ -208,7 +208,9 @@ shakaDemo.Config = class { .addBoolInput_(MessageIds.DISABLE_VIDEO, 'manifest.disableVideo') .addBoolInput_(MessageIds.DISABLE_TEXT, - 'manifest.disableText'); + 'manifest.disableText') + .addBoolInput_(MessageIds.DISABLE_THUMBNAILS, + 'manifest.disableThumbnails'); this.addRetrySection_('manifest', MessageIds.MANIFEST_RETRY_SECTION_HEADER); } diff --git a/demo/locales/en.json b/demo/locales/en.json index 0763a457b3..a614acaa13 100644 --- a/demo/locales/en.json +++ b/demo/locales/en.json @@ -52,6 +52,7 @@ "DEMO_DEMO_MODE_HEADER": "DEMO MODE", "DEMO_DISABLE_AUDIO": "Disable Audio", "DEMO_DISABLE_TEXT": "Disable Text", + "DEMO_DISABLE_THUMBNAILS": "Disable Thumbnails", "DEMO_DISABLE_VIDEO": "Disable Video", "DEMO_DOCUMENTATION": "Documentation", "DEMO_DRM_RETRY_SECTION_HEADER": "DRM Retry Parameters", @@ -176,6 +177,7 @@ "DEMO_SURROUND_SEARCH": "Filters for assets with at least one surround sound audio track.", "DEMO_SWITCH_INTERVAL": "Switch Interval", "DEMO_TEXT_LANGUAGE": "Preferred Text Language", + "DEMO_THUMBNAILS": "Thumbnails", "DEMO_TIMEOUT": "Timeout Factor", "DEMO_TRICK_MODE": "Special trick mode track", "DEMO_TRICK_MODE_SEARCH": "Filters for assets that have special video tracks to be used in trick mode playback (aka fast-forward).", diff --git a/demo/locales/source.json b/demo/locales/source.json index 428774128e..b391be0666 100644 --- a/demo/locales/source.json +++ b/demo/locales/source.json @@ -211,6 +211,10 @@ "description": "The name of a configuration value.", "message": "Disable Text" }, + "DEMO_DISABLE_THUMBNAILS": { + "description": "The name of a configuration value.", + "message": "Disable Thumbnails" + }, "DEMO_DISABLE_VIDEO": { "description": "The name of a configuration value.", "message": "Disable Video" @@ -707,6 +711,10 @@ "description": "The name of a configuration value.", "message": "Preferred Text Language" }, + "DEMO_THUMBNAILS": { + "description": "Text that describes an asset that has a thumbnail stream.", + "message": "Thumbnails" + }, "DEMO_TIMEOUT": { "description": "The name of a configuration value.", "message": "Timeout Factor" diff --git a/demo/search.js b/demo/search.js index 2548310b5c..f8783fc983 100644 --- a/demo/search.js +++ b/demo/search.js @@ -398,6 +398,8 @@ shakaDemo.Search = class { shakaDemo.MessageIds.AD_SEARCH); this.makeBooleanInput_(specialContainer, Feature.AUDIO_ONLY, FEATURE, shakaDemo.MessageIds.AUDIO_ONLY_SEARCH); + this.makeBooleanInput_(specialContainer, Feature.THUMBNAILS, FEATURE, + shakaDemo.MessageIds.THUMBNAILS); container.appendChild(this.resultsDiv_); } diff --git a/externs/shaka/manifest.js b/externs/shaka/manifest.js index 3c59f3bc34..93e347a855 100644 --- a/externs/shaka/manifest.js +++ b/externs/shaka/manifest.js @@ -15,6 +15,7 @@ * presentationTimeline: !shaka.media.PresentationTimeline, * variants: !Array., * textStreams: !Array., + * imageStreams: !Array., * offlineSessionIds: !Array., * minBufferTime: number * }} @@ -60,6 +61,9 @@ * @property {!Array.} textStreams * Required.
* The presentation's text streams. + * @property {!Array.} imageStreams + * Required.
+ * The presentation's image streams * @property {!Array.} offlineSessionIds * Defaults to [].
* An array of EME sessions to load for offline playback. @@ -244,7 +248,8 @@ shaka.extern.CreateSegmentIndexFunction; * channelsCount: ?number, * audioSamplingRate: ?number, * spatialAudio: boolean, - * closedCaptions: Map. + * closedCaptions: Map., + * tilesLayout: (string|undefined) * }} * * @description @@ -346,6 +351,11 @@ shaka.extern.CreateSegmentIndexFunction; * as the value. If the channel number is not provided by the description, * we'll set an 0-based index as the key. * Example: {'CC1': 'eng'; 'CC3': 'swe'}, or {'1', 'eng'; '2': 'swe'}, etc. + * @property {(string|undefined)} tilesLayout + * Image streams only.
+ * The value is a grid-item-dimension consisting of two positive decimal + * integers in the format: column-x-row ('4x3'). It describes the arrangement + * of Images in a Grid. The minimum valid LAYOUT is '1x1'. * @exportDoc */ shaka.extern.Stream; diff --git a/externs/shaka/offline.js b/externs/shaka/offline.js index 9d9be5001f..3ba632cd0a 100644 --- a/externs/shaka/offline.js +++ b/externs/shaka/offline.js @@ -120,7 +120,8 @@ shaka.extern.ManifestDB; * channelsCount: ?number, * audioSamplingRate: ?number, * spatialAudio: boolean, - * closedCaptions: Map. + * closedCaptions: Map., + * tilesLayout: (string|undefined) * }} * * @property {number} id @@ -168,7 +169,7 @@ shaka.extern.ManifestDB; * @property {?number} channelsCount * The channel count information for the audio stream. * @property {?number} audioSamplingRate - * Specifies the maximum sampling rate of the content + * Specifies the maximum sampling rate of the content. * @property {boolean} spatialAudio * Whether the stream set has spatial audio. * @property {Map.} closedCaptions @@ -177,6 +178,10 @@ shaka.extern.ManifestDB; * as the value. If the channel number is not provided by the description, * we'll set an 0-based index as the key. * Example: {'CC1': 'eng'; 'CC3': 'swe'}, or {'1', 'eng'; '2': 'swe'}, etc. + * @property {(string|undefined)} tilesLayout + * The value is a grid-item-dimension consisting of two positive decimal + * integers in the format: column-x-row ('4x3'). It describes the arrangement + * of Images in a Grid. The minimum valid LAYOUT is '1x1'. */ shaka.extern.StreamDB; diff --git a/externs/shaka/player.js b/externs/shaka/player.js index d02edaeb96..abc077a4cc 100644 --- a/externs/shaka/player.js +++ b/externs/shaka/player.js @@ -219,19 +219,21 @@ shaka.extern.BufferedInfo; * audioId: ?number, * channelsCount: ?number, * audioSamplingRate: ?number, + * tilesLayout: ?string, * audioBandwidth: ?number, * videoBandwidth: ?number, * spatialAudio: boolean, * originalVideoId: ?string, * originalAudioId: ?string, - * originalTextId: ?string + * originalTextId: ?string, + * originalImageId: ?string * }} * * @description * An object describing a media track. This object should be treated as * read-only as changing any values does not have any effect. This is the * public view of an audio/video paring (variant type) or text track (text - * type). + * type) or image track (image type). * * @property {number} id * The unique ID of the track. @@ -240,7 +242,8 @@ shaka.extern.BufferedInfo; * visible/audible in the buffer). * * @property {string} type - * The type of track, either 'variant' or 'text'. + * The type of track, either 'variant' or 'text' + * or 'image'. * @property {number} bandwidth * The bandwidth required to play the track, in bits/sec. * @@ -294,6 +297,10 @@ shaka.extern.BufferedInfo; * The count of the audio track channels. * @property {?number} audioSamplingRate * Specifies the maximum sampling rate of the content. + * @property {?string} tilesLayout + * The value is a grid-item-dimension consisting of two positive decimal + * integers in the format: column-x-row ('4x3'). It describes the arrangement + * of Images in a Grid. The minimum valid LAYOUT is '1x1'. * @property {boolean} spatialAudio * True indicates that the content has spatial audio. * This flag is based on signals from the manifest. @@ -310,6 +317,9 @@ shaka.extern.BufferedInfo; * @property {?string} originalTextId * (text tracks only) The original ID of the text track, if any, as it * appeared in the original manifest. + * @property {?string} originalImageId + * (image tracks only) The original ID of the image track, if any, as it + * appeared in the original manifest. * @exportDoc */ shaka.extern.Track; @@ -681,6 +691,7 @@ shaka.extern.HlsManifestConfiguration; * disableAudio: boolean, * disableVideo: boolean, * disableText: boolean, + * disableThumbnails: boolean, * defaultPresentationDelay: number, * dash: shaka.extern.DashManifestConfiguration, * hls: shaka.extern.HlsManifestConfiguration @@ -702,6 +713,9 @@ shaka.extern.HlsManifestConfiguration; * @property {boolean} disableText * If true, the text tracks are ignored. * Defaults to false. + * @property {boolean} disableThumbnails + * If true, the image tracks are ignored. + * Defaults to false. * @property {number} defaultPresentationDelay * A default presentationDelay value. * For DASH, it's a default presentationDelay value if @@ -1015,3 +1029,28 @@ shaka.extern.PlayerConfiguration; * @exportDoc */ shaka.extern.LanguageRole; + + +/** + * @typedef {{ + * height: number, + * positionX: number, + * positionY: number, + * uris: !Array., + * width: number + * }} + * + * @property {number} height + * The thumbnail height in px. + * @property {number} positionX + * The thumbnail left position in px. + * @property {number} positionY + * The thumbnail top position in px. + * @property {!Array.} uris + * An array of URIs to attempt. They will be tried in the order they are + * given. + * @property {number} width + * The thumbnail width in px. + * @exportDoc + */ +shaka.extern.Thumbnail; diff --git a/lib/cast/cast_utils.js b/lib/cast/cast_utils.js index e4c81d8f35..fade3dce79 100644 --- a/lib/cast/cast_utils.js +++ b/lib/cast/cast_utils.js @@ -320,6 +320,8 @@ shaka.cast.CastUtils.PlayerGetterMethods = { 'getTextTracks': 2, 'getStats': 5, 'getVariantTracks': 2, + 'getImageTracks': 2, + 'getThumbnails': 2, 'isAudioOnly': 10, 'isBuffering': 1, 'isInProgress': 1, diff --git a/lib/dash/dash_parser.js b/lib/dash/dash_parser.js index e61e7d46ac..2b19c7aaf6 100644 --- a/lib/dash/dash_parser.js +++ b/lib/dash/dash_parser.js @@ -442,6 +442,7 @@ shaka.dash.DashParser = class { presentationTimeline: presentationTimeline, variants: this.periodCombiner_.getVariants(), textStreams: this.periodCombiner_.getTextStreams(), + imageStreams: this.periodCombiner_.getImageStreams(), offlineSessionIds: [], minBufferTime: minBufferTime || 0, }; @@ -463,6 +464,7 @@ shaka.dash.DashParser = class { // are added or removed. this.manifest_.variants = this.periodCombiner_.getVariants(); this.manifest_.textStreams = this.periodCombiner_.getTextStreams(); + this.manifest_.imageStreams = this.periodCombiner_.getImageStreams(); } // Add text streams to correspond to closed captions. This happens right @@ -673,6 +675,8 @@ shaka.dash.DashParser = class { this.getSetsOfType_(normalAdaptationSets, ContentType.VIDEO); const textSets = this.config_.disableText ? [] : this.getSetsOfType_(normalAdaptationSets, ContentType.TEXT); + const imageSets = this.config_.disableThumbnails ? [] : + this.getSetsOfType_(normalAdaptationSets, ContentType.IMAGE); if (!videoSets.length && !audioSets.length) { throw new shaka.util.Error( @@ -696,11 +700,17 @@ shaka.dash.DashParser = class { textStreams.push(...textSet.streams); } + const imageStreams = []; + for (const imageSet of imageSets) { + imageStreams.push(...imageSet.streams); + } + return { id: context.period.id, audioStreams, videoStreams, textStreams, + imageStreams, }; } @@ -733,14 +743,6 @@ shaka.dash.DashParser = class { context.adaptationSet = this.createFrame_(elem, context.period, null); - // Filter image adaptation sets, until we add thumbnail track support. - // See: https://github.com/google/shaka-player/issues/559 - const isImage = context.adaptationSet.contentType == ContentType.IMAGE; - if (isImage) { - shaka.log.warning('Skipping Image AdaptationSet', context.adaptationSet); - return null; - } - let main = false; const roleElements = XmlUtils.findChildren(elem, 'Role'); const roleValues = roleElements.map((role) => { @@ -914,9 +916,10 @@ shaka.dash.DashParser = class { }).filter((s) => !!s); if (streams.length == 0) { + const isImage = context.adaptationSet.contentType == ContentType.IMAGE; // Ignore empty AdaptationSets if ignoreEmptyAdaptationSet is true - // or they are for text content. - if (this.config_.dash.ignoreEmptyAdaptationSet || isText) { + // or they are for text/image content. + if (this.config_.dash.ignoreEmptyAdaptationSet || isText || isImage) { return null; } throw new shaka.util.Error( @@ -1017,6 +1020,7 @@ shaka.dash.DashParser = class { const contentType = context.representation.contentType; const isText = contentType == ContentType.TEXT || contentType == ContentType.APPLICATION; + const isImage = contentType == ContentType.IMAGE; try { const requestInitSegment = (uris, startByte, endByte) => { @@ -1047,8 +1051,10 @@ shaka.dash.DashParser = class { }; } } catch (error) { - if (isText && error.code == shaka.util.Error.Code.DASH_NO_SEGMENT_INFO) { - // We will ignore any DASH_NO_SEGMENT_INFO errors for text streams. + if ((isText || isImage) && + error.code == shaka.util.Error.Code.DASH_NO_SEGMENT_INFO) { + // We will ignore any DASH_NO_SEGMENT_INFO errors for text/image + // streams. return null; } @@ -1085,6 +1091,26 @@ shaka.dash.DashParser = class { forced = roles.includes('forced_subtitle'); } + let tilesLayout; + if (isImage) { + const essentialPropertyElems = + XmlUtils.findChildren(node, 'EssentialProperty'); + const thumbnailTileElem = essentialPropertyElems.find((element) => { + const expectedUris = [ + 'http://dashif.org/thumbnail_tile', + 'http://dashif.org/guidelines/thumbnail_tile', + ]; + return expectedUris.includes(element.getAttribute('schemeIdUri')); + }); + if (thumbnailTileElem) { + tilesLayout = thumbnailTileElem.getAttribute('value'); + } + // Filter image adaptation sets that has no tilesLayout. + if (!tilesLayout) { + return null; + } + } + /** @type {shaka.extern.Stream} */ const stream = { id: this.globalId_++, @@ -1119,6 +1145,7 @@ shaka.dash.DashParser = class { audioSamplingRate: context.representation.audioSamplingRate, spatialAudio: spatialAudio, closedCaptions, + tilesLayout, }; return stream; } diff --git a/lib/dash/mpd_utils.js b/lib/dash/mpd_utils.js index 4c18e01656..c18200f338 100644 --- a/lib/dash/mpd_utils.js +++ b/lib/dash/mpd_utils.js @@ -268,6 +268,12 @@ shaka.dash.MpdUtils = class { const durationStr = MpdUtils.inheritAttribute(context, callback, 'duration'); let segmentDuration = XmlUtils.parsePositiveInt(durationStr || ''); + const ContentType = shaka.util.ManifestParserUtils.ContentType; + // TODO: The specification is not clear, check this once it is resolved: + // https://github.com/Dash-Industry-Forum/DASH-IF-IOP/issues/404 + if (context.representation.contentType == ContentType.IMAGE) { + segmentDuration = XmlUtils.parseFloat(durationStr || ''); + } if (segmentDuration) { segmentDuration /= timescale; } diff --git a/lib/hls/hls_parser.js b/lib/hls/hls_parser.js index ab4cebd926..e9931256ab 100644 --- a/lib/hls/hls_parser.js +++ b/lib/hls/hls_parser.js @@ -531,6 +531,7 @@ shaka.hls.HlsParser = class { presentationTimeline: this.presentationTimeline_, variants, textStreams, + imageStreams: [], offlineSessionIds: [], minBufferTime: 0, }; @@ -1393,6 +1394,7 @@ shaka.hls.HlsParser = class { spatialAudio: spatialAudio, closedCaptions, hdr: undefined, + tilesLayout: undefined, }; return { diff --git a/lib/offline/indexeddb/v1_storage_cell.js b/lib/offline/indexeddb/v1_storage_cell.js index 24dba95ab4..448fed6e0a 100644 --- a/lib/offline/indexeddb/v1_storage_cell.js +++ b/lib/offline/indexeddb/v1_storage_cell.js @@ -174,6 +174,7 @@ shaka.offline.indexeddb.V1StorageCell = class channelsCount: null, spatialAudio: false, closedCaptions: null, + tilesLayout: undefined, }; } diff --git a/lib/offline/indexeddb/v2_storage_cell.js b/lib/offline/indexeddb/v2_storage_cell.js index b3c2954f66..2ee4595beb 100644 --- a/lib/offline/indexeddb/v2_storage_cell.js +++ b/lib/offline/indexeddb/v2_storage_cell.js @@ -123,6 +123,7 @@ shaka.offline.indexeddb.V2StorageCell = class channelsCount: null, spatialAudio: false, closedCaptions: null, + tilesLayout: undefined, }; } diff --git a/lib/offline/manifest_converter.js b/lib/offline/manifest_converter.js index 7859255abe..be774b5fa2 100644 --- a/lib/offline/manifest_converter.js +++ b/lib/offline/manifest_converter.js @@ -63,6 +63,11 @@ shaka.offline.ManifestConverter = class { manifestDB.streams.filter((streamDB) => this.isText_(streamDB)) .map((streamDB) => this.fromStreamDB_(streamDB, timeline)); + /** @type {!Array.} */ + const imageStreams = + manifestDB.streams.filter((streamDB) => this.isImage_(streamDB)) + .map((streamDB) => this.fromStreamDB_(streamDB, timeline)); + const drmInfos = manifestDB.drmInfo ? [manifestDB.drmInfo] : []; if (manifestDB.drmInfo) { for (const variant of variants.values()) { @@ -81,6 +86,7 @@ shaka.offline.ManifestConverter = class { offlineSessionIds: manifestDB.sessionIds, variants: Array.from(variants.values()), textStreams: textStreams, + imageStreams: imageStreams, }; } @@ -194,6 +200,7 @@ shaka.offline.ManifestConverter = class { audioSamplingRate: streamDB.audioSamplingRate, spatialAudio: streamDB.spatialAudio, closedCaptions: streamDB.closedCaptions, + tilesLayout: streamDB.tilesLayout, }; return stream; @@ -271,6 +278,16 @@ shaka.offline.ManifestConverter = class { return streamDB.type == ContentType.TEXT; } + /** + * @param {shaka.extern.StreamDB} streamDB + * @return {boolean} + * @private + */ + isImage_(streamDB) { + const ContentType = shaka.util.ManifestParserUtils.ContentType; + return streamDB.type == ContentType.IMAGE; + } + /** * Creates an empty Variant. * diff --git a/lib/offline/storage.js b/lib/offline/storage.js index d526b94a65..d1f0a77892 100644 --- a/lib/offline/storage.js +++ b/lib/offline/storage.js @@ -514,6 +514,10 @@ shaka.offline.Storage = class { allTracks.push(StreamUtils.textStreamToTrack(text)); } + for (const image of manifest.imageStreams) { + allTracks.push(StreamUtils.imageStreamToTrack(image)); + } + // Let the application choose which tracks to store. const chosenTracks = await config.offline.trackSelectionCallback(allTracks); @@ -1154,6 +1158,7 @@ shaka.offline.Storage = class { audioSamplingRate: stream.audioSamplingRate, spatialAudio: stream.spatialAudio, closedCaptions: stream.closedCaptions, + tilesLayout: stream.tilesLayout, }; // Download each stream in parallel. diff --git a/lib/player.js b/lib/player.js index fa5b593b5a..42c2ed866d 100644 --- a/lib/player.js +++ b/lib/player.js @@ -3252,6 +3252,95 @@ shaka.Player = class extends shaka.util.FakeEventTarget { } } + /** + * Return a list of image tracks that can be switched to. + * + * If the player has not loaded content, this will return an empty list. + * + * @return {!Array.} + * @export + */ + getImageTracks() { + if (this.manifest_) { + const imageStreams = this.manifest_.imageStreams; + const StreamUtils = shaka.util.StreamUtils; + return imageStreams.map((image) => StreamUtils.imageStreamToTrack(image)); + } else { + return []; + } + } + + /** + * Return a Thumbnail object from a image track Id and time. + * + * If the player has not loaded content, this will return a null. + * + * @param {number} trackId + * @param {number} time + * @return {!Promise.} + * @export + */ + async getThumbnails(trackId, time) { + if (this.manifest_) { + const imageStream = this.manifest_.imageStreams.find( + (stream) => stream.id == trackId); + if (!imageStream) { + return null; + } + if (!imageStream.segmentIndex) { + await imageStream.createSegmentIndex(); + } + const referencePosition = imageStream.segmentIndex.find(time); + if (referencePosition == null) { + return null; + } + const reference = imageStream.segmentIndex.get(referencePosition); + // This expression is used to detect one or more numbers (0-9) followed + // by an x and after one or more numbers (0-9) + const match = /(\d+)x(\d+)/.exec(imageStream.tilesLayout); + if (!match) { + shaka.log.warning('Tiles layout does not contain a valid format ' + + ' (columns x rows)'); + return null; + } + const fullImageWidth = imageStream.width || 0; + const fullImageHeight = imageStream.height || 0; + const columns = parseInt(match[1], 10); + const rows = parseInt(match[2], 10); + const width = fullImageWidth / columns; + const height = fullImageHeight / rows; + let positionX = 0; + let positionY = 0; + const totalImages = columns * rows; + // If the number of images in the segment is greater than 1, we have to + // find the correct image. For that we will return to the app the + // coordinates of the position of the correct image. + // Image search is always from left to right and top to bottom. + // Note: The time between images within the segment is always + // equidistant. + // + // Eg: Total images 5, tileLayout 5x1, segmentTime 5, thumbnailTime 2 + // positionX = 0.4 * fullImageWidth + // positionY = 0 + if (totalImages > 1) { + const thumbnailTime = time - reference.startTime; + const segmentTime = reference.endTime - reference.startTime; + const thumbnailPosition = + Math.floor(thumbnailTime * totalImages / segmentTime); + positionX = (thumbnailPosition % columns) / columns * fullImageWidth; + positionY = (thumbnailPosition % rows) / rows * fullImageHeight; + } + return { + height: height, + positionX: positionX, + positionY: positionY, + uris: reference.getUris(), + width: width, + }; + } + return null; + } + /** * Select a specific text track. track should come from a call to * getTextTracks. If the track is not found, this will be a diff --git a/lib/util/periods.js b/lib/util/periods.js index 21e1b5ff0e..d582ec9ba7 100644 --- a/lib/util/periods.js +++ b/lib/util/periods.js @@ -41,6 +41,9 @@ shaka.util.PeriodCombiner = class { /** @private {!Array.} */ this.textStreams_ = []; + /** @private {!Array.} */ + this.imageStreams_ = []; + /** * The IDs of the periods we have already used to generate streams. * This helps us identify the periods which have been added when a live @@ -54,7 +57,8 @@ shaka.util.PeriodCombiner = class { /** @override */ release() { const allStreams = - this.audioStreams_.concat(this.videoStreams_, this.textStreams_); + this.audioStreams_.concat(this.videoStreams_, this.textStreams_, + this.imageStreams_); for (const stream of allStreams) { if (stream.segmentIndex) { @@ -65,6 +69,7 @@ shaka.util.PeriodCombiner = class { this.audioStreams_ = []; this.videoStreams_ = []; this.textStreams_ = []; + this.imageStreams_ = []; this.variants_ = []; } @@ -78,6 +83,11 @@ shaka.util.PeriodCombiner = class { return this.textStreams_; } + /** @return {!Array.} */ + getImageStreams() { + return this.imageStreams_; + } + /** * @param {!Array.} periods * @param {boolean} isDynamic @@ -99,6 +109,7 @@ shaka.util.PeriodCombiner = class { this.audioStreams_ = firstPeriod.audioStreams; this.videoStreams_ = firstPeriod.videoStreams; this.textStreams_ = firstPeriod.textStreams; + this.imageStreams_ = firstPeriod.imageStreams; } else { // Find the first period we haven't seen before. Tag all the periods we // see now as "used". @@ -128,6 +139,8 @@ shaka.util.PeriodCombiner = class { (period) => period.videoStreams); const textStreamsPerPeriod = periods.map( (period) => period.textStreams); + const imageStreamsPerPeriod = periods.map( + (period) => period.imageStreams); // It's okay to have a period with no text, but our algorithm fails on any // period without matching streams. So we add dummy text streams to each @@ -157,6 +170,13 @@ shaka.util.PeriodCombiner = class { firstNewPeriodIndex, shaka.util.PeriodCombiner.cloneStream_, shaka.util.PeriodCombiner.concatenateStreams_); + + await shaka.util.PeriodCombiner.combine_( + this.imageStreams_, + imageStreamsPerPeriod, + firstNewPeriodIndex, + shaka.util.PeriodCombiner.cloneStream_, + shaka.util.PeriodCombiner.concatenateStreams_); } // Create variants for all audio/video combinations. @@ -876,12 +896,14 @@ shaka.util.PeriodCombiner = class { 'audio': shaka.util.PeriodCombiner.areAVStreamsCompatible_, 'video': shaka.util.PeriodCombiner.areAVStreamsCompatible_, 'text': shaka.util.PeriodCombiner.areTextStreamsCompatible_, + 'image': shaka.util.PeriodCombiner.areImageStreamsCompatible_, }[outputStream.type]; const isBetterMatch = { 'audio': shaka.util.PeriodCombiner.isAudioStreamBetterMatch_, 'video': shaka.util.PeriodCombiner.isVideoStreamBetterMatch_, 'text': shaka.util.PeriodCombiner.isTextStreamBetterMatch_, + 'image': shaka.util.PeriodCombiner.isImageStreamBetterMatch_, }[outputStream.type]; let best = null; @@ -975,6 +997,32 @@ shaka.util.PeriodCombiner = class { return true; } + /** + * @param {T} outputStream A image output stream + * @param {T} candidate A candidate stream to be combined with the output + * @return {boolean} True if the candidate could be combined with the + * output + * + * @template T + * Accepts either a StreamDB or Stream type. + * + * @private + */ + static areImageStreamsCompatible_(outputStream, candidate) { + // For image, we don't care about MIME type. We can always switch + // between image types. + + // The output stream should not be a dummy stream inserted to fill a period + // gap. So reject any candidate if the output has no tilesLayout. This + // would cause findMatchesInAllPeriods_ to return null and this output + // stream to be skipped (meaning no output streams based on it). + if (!outputStream.tilesLayout) { + return false; + } + + return true; + } + /** * @param {T} outputStream An audio output stream * @param {T} best The best match so far for this period @@ -1253,6 +1301,35 @@ shaka.util.PeriodCombiner = class { return false; } + /** + * @param {T} outputStream A image output stream + * @param {T} best The best match so far for this period + * @param {T} candidate A candidate stream which might be better + * @return {boolean} True if the candidate is a better match + * + * @template T + * Accepts either a StreamDB or Stream type. + * + * @private + */ + static isImageStreamBetterMatch_(outputStream, best, candidate) { + // If the output stream was based on the candidate stream, the candidate + // stream should be considered a better match. We can check this by + // comparing their ids. + if (outputStream.id == candidate.id) { + return true; + } + + // If the candidate has the same MIME type, upgrade to the + // candidate. It's not required that image streams use the same format + // across periods, but it's a helpful signal. + if (candidate.mimeType == outputStream.mimeType) { + return true; + } + + return false; + } + /** * Create a dummy text StreamDB to fill in periods with no text, to avoid * failing the general flattening algorithm. @@ -1411,7 +1488,8 @@ shaka.util.PeriodCombiner = class { * id: string, * audioStreams: !Array., * videoStreams: !Array., - * textStreams: !Array. + * textStreams: !Array., + * imageStreams: !Array. * }} * * @description Contains the streams from one DASH period. @@ -1424,6 +1502,8 @@ shaka.util.PeriodCombiner = class { * The video streams from one Period. * @property {!Array.} textStreams * The text streams from one Period. + * @property {!Array.} imageStreams + * The image streams from one Period. */ shaka.util.PeriodCombiner.Period; diff --git a/lib/util/player_configuration.js b/lib/util/player_configuration.js index ed93189abc..e55f88b639 100644 --- a/lib/util/player_configuration.js +++ b/lib/util/player_configuration.js @@ -83,6 +83,7 @@ shaka.util.PlayerConfiguration = class { disableAudio: false, disableVideo: false, disableText: false, + disableThumbnails: false, defaultPresentationDelay: 0, dash: { clockSyncUri: '', diff --git a/lib/util/stream_utils.js b/lib/util/stream_utils.js index 1a7643112c..7827652b03 100644 --- a/lib/util/stream_utils.js +++ b/lib/util/stream_utils.js @@ -16,6 +16,7 @@ goog.require('shaka.util.LanguageUtils'); goog.require('shaka.util.ManifestParserUtils'); goog.require('shaka.util.MimeUtils'); goog.require('shaka.util.MultiMap'); +goog.require('shaka.util.Platform'); /** @@ -274,6 +275,7 @@ shaka.util.StreamUtils = class { shaka.util.StreamUtils.filterManifestByCurrentVariant( currentVariant, manifest); shaka.util.StreamUtils.filterTextStreams_(manifest); + shaka.util.StreamUtils.filterImageStreams_(manifest); } @@ -392,6 +394,43 @@ shaka.util.StreamUtils = class { } + /** + * Alters the given Manifest to filter out any unsupported image streams. + * + * @param {shaka.extern.Manifest} manifest + * @private + */ + static filterImageStreams_(manifest) { + // Filter image streams. + manifest.imageStreams = manifest.imageStreams.filter((stream) => { + // TODO: re-examine this and avoid allow-listing the MIME types we can + // accept. + const validMimeTypes = [ + 'image/svg+xml', + 'image/png', + 'image/jpeg', + ]; + const Platform = shaka.util.Platform; + // Add webp support to popular platforms that support it. + const webpSupport = Platform.isWebOS() || + Platform.isTizen() || + Platform.isChromecast(); + if (webpSupport) { + validMimeTypes.push('image/webp'); + } + // TODO: add support to image/webp and image/avif + const keep = validMimeTypes.includes(stream.mimeType); + + if (!keep) { + shaka.log.debug('Dropping image stream. Is not supported by the ' + + 'platform.', stream); + } + + return keep; + }); + } + + /** * @param {shaka.extern.Stream} s0 * @param {shaka.extern.Stream} s1 @@ -502,11 +541,13 @@ shaka.util.StreamUtils = class { channelsCount: null, audioSamplingRate: null, spatialAudio: false, + tilesLayout: null, audioBandwidth: null, videoBandwidth: null, originalVideoId: null, originalAudioId: null, originalTextId: null, + originalImageId: null, }; if (video) { @@ -567,11 +608,60 @@ shaka.util.StreamUtils = class { channelsCount: null, audioSamplingRate: null, spatialAudio: false, + tilesLayout: null, audioBandwidth: null, videoBandwidth: null, originalVideoId: null, originalAudioId: null, originalTextId: stream.originalId, + originalImageId: null, + }; + + return track; + } + + + /** + * @param {shaka.extern.Stream} stream + * @return {shaka.extern.Track} + */ + static imageStreamToTrack(stream) { + const ContentType = shaka.util.ManifestParserUtils.ContentType; + + /** @type {shaka.extern.Track} */ + const track = { + id: stream.id, + active: false, + type: ContentType.IMAGE, + bandwidth: stream.bandwidth || 0, + language: '', + label: null, + kind: null, + width: stream.width || null, + height: stream.height || null, + frameRate: null, + pixelAspectRatio: null, + hdr: null, + mimeType: stream.mimeType, + codecs: null, + audioCodec: null, + videoCodec: null, + primary: false, + roles: [], + audioRoles: null, + forced: false, + videoId: null, + audioId: null, + channelsCount: null, + audioSamplingRate: null, + spatialAudio: false, + tilesLayout: stream.tilesLayout || null, + audioBandwidth: null, + videoBandwidth: null, + originalVideoId: null, + originalAudioId: null, + originalTextId: null, + originalImageId: stream.originalId, }; return track; @@ -683,11 +773,13 @@ shaka.util.StreamUtils = class { channelsCount: null, audioSamplingRate: null, spatialAudio: false, + tilesLayout: null, audioBandwidth: null, videoBandwidth: null, originalVideoId: null, originalAudioId: null, originalTextId: null, + originalImageId: null, }; return track; diff --git a/test/dash/dash_parser_manifest_unit.js b/test/dash/dash_parser_manifest_unit.js index 2f7e2827f3..039cbb72ea 100644 --- a/test/dash/dash_parser_manifest_unit.js +++ b/test/dash/dash_parser_manifest_unit.js @@ -36,6 +36,9 @@ describe('DashParser Manifest', () => { /** @type {!ArrayBuffer} */ let mp4Index; + /** @type {!string} */ + const thumbnailScheme = 'http://dashif.org/guidelines/thumbnail_tile'; + beforeAll(async () => { mp4Index = await shaka.test.Util.fetch(mp4IndexSegmentUri); }); @@ -1868,6 +1871,11 @@ describe('DashParser Manifest', () => { ' ', ' ', ' ', + ' ', + ' ', + ` `, // eslint-disable-line max-len + ' ', ' ', ' ', '', @@ -1882,6 +1890,90 @@ describe('DashParser Manifest', () => { expect(variant.video).toBeTruthy(); }); + it('parse single representation of image adaptation sets', async () => { + const manifestText = [ + '', + ' ', + ' ', + ' ', + ' v-sd.mp4', + ' ', + ' ', + ' ', + ' ', + ' ', + ' a-en.mp4', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ` `, // eslint-disable-line max-len + ' ', + ' ', + ' ', + '', + ].join('\n'); + + fakeNetEngine.setResponseText('dummy://foo', manifestText); + + /** @type {shaka.extern.Manifest} */ + const manifest = await parser.start('dummy://foo', playerInterface); + expect(manifest.imageStreams.length).toBe(1); + const imageStream = manifest.imageStreams[0]; + expect(imageStream.width).toBe(1024); + expect(imageStream.height).toBe(1152); + expect(imageStream.tilesLayout).toBe('10x20'); + }); + + + it('parse multiple representation of image adaptation sets', async () => { + const manifestText = [ + '', + ' ', + ' ', + ' ', + ' v-sd.mp4', + ' ', + ' ', + ' ', + ' ', + ' ', + ' a-en.mp4', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ` `, // eslint-disable-line max-len + ' ', + ' ', + ` `, // eslint-disable-line max-len + ' ', + ' ', + ' ', + '', + ].join('\n'); + + fakeNetEngine.setResponseText('dummy://foo', manifestText); + + /** @type {shaka.extern.Manifest} */ + const manifest = await parser.start('dummy://foo', playerInterface); + expect(manifest.imageStreams.length).toBe(2); + const firstImageStream = manifest.imageStreams[0]; + expect(firstImageStream.width).toBe(1024); + expect(firstImageStream.height).toBe(1152); + expect(firstImageStream.tilesLayout).toBe('10x20'); + const secondImageStream = manifest.imageStreams[1]; + expect(secondImageStream.width).toBe(2048); + expect(secondImageStream.height).toBe(1152); + expect(secondImageStream.tilesLayout).toBe('20x20'); + }); + // Regression #2650 in v3.0.0 // A later BaseURL was being applied to earlier Representations, specifically // in the context of SegmentTimeline. diff --git a/test/media/playhead_unit.js b/test/media/playhead_unit.js index 957a3746a9..191b7bc301 100644 --- a/test/media/playhead_unit.js +++ b/test/media/playhead_unit.js @@ -148,6 +148,7 @@ describe('Playhead', () => { manifest = { variants: [], textStreams: [], + imageStreams: [], presentationTimeline: timeline, minBufferTime: 10, offlineSessionIds: [], diff --git a/test/media/streaming_engine_integration.js b/test/media/streaming_engine_integration.js index 5829a77455..c867c89805 100644 --- a/test/media/streaming_engine_integration.js +++ b/test/media/streaming_engine_integration.js @@ -637,6 +637,7 @@ describe('StreamingEngine', () => { offlineSessionIds: [], minBufferTime: 2, textStreams: [], + imageStreams: [], variants: [{ id: 1, video: { diff --git a/test/offline/manifest_convert_unit.js b/test/offline/manifest_convert_unit.js index c9b611a5d0..25d21c895f 100644 --- a/test/offline/manifest_convert_unit.js +++ b/test/offline/manifest_convert_unit.js @@ -365,6 +365,7 @@ describe('ManifestConverter', () => { audioSamplingRate: null, spatialAudio: false, closedCaptions: null, + tilesLayout: undefined, }; } @@ -413,6 +414,7 @@ describe('ManifestConverter', () => { audioSamplingRate: null, spatialAudio: false, closedCaptions: null, + tilesLayout: undefined, }; } @@ -460,6 +462,7 @@ describe('ManifestConverter', () => { audioSamplingRate: null, spatialAudio: false, closedCaptions: null, + tilesLayout: undefined, }; } @@ -504,6 +507,7 @@ describe('ManifestConverter', () => { audioSamplingRate: streamDb.audioSamplingRate, spatialAudio: streamDb.spatialAudio, closedCaptions: streamDb.closedCaptions, + tilesLayout: streamDb.tilesLayout, }; expect(stream).toEqual(expectedStream); diff --git a/test/offline/storage_integration.js b/test/offline/storage_integration.js index b87af4b9e8..8b85aac1cf 100644 --- a/test/offline/storage_integration.js +++ b/test/offline/storage_integration.js @@ -1347,11 +1347,13 @@ filterDescribe('Storage', storageSupport, () => { channelsCount: 2, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: bandwidth * 0.33, videoBandwidth: bandwidth * 0.67, originalVideoId: videoId.toString(), originalAudioId: audioId.toString(), originalTextId: null, + originalImageId: null, }; } @@ -1387,11 +1389,13 @@ filterDescribe('Storage', storageSupport, () => { channelsCount: null, audioSamplingRate: null, spatialAudio: false, + tilesLayout: null, audioBandwidth: null, videoBandwidth: null, originalVideoId: null, originalAudioId: null, originalTextId: id.toString(), + originalImageId: null, }; } diff --git a/test/player_unit.js b/test/player_unit.js index bcb6530ba2..36efef91a2 100644 --- a/test/player_unit.js +++ b/test/player_unit.js @@ -949,6 +949,8 @@ describe('Player', () => { let variantTracks; /** @type {!Array.} */ let textTracks; + /** @type {!Array.} */ + let imageTracks; beforeEach(async () => { // A manifest we can use to test track expectations. @@ -1066,6 +1068,16 @@ describe('Player', () => { stream.kind = 'caption'; stream.roles = ['commentary']; }); + + // Image tracks + manifest.addImageStream(53, (stream) => { + stream.originalId = 'thumbnail'; + stream.width = 100; + stream.height = 200; + stream.bandwidth = 10; + stream.mimeType = 'image/jpeg'; + stream.tilesLayout = '1x1'; + }); }); variantTracks = [ @@ -1095,11 +1107,13 @@ describe('Player', () => { channelsCount: 6, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 300, videoBandwidth: 1000, originalAudioId: 'audio-en-6c', originalVideoId: 'video-1kbps', originalTextId: null, + originalImageId: null, }, { id: 101, @@ -1127,11 +1141,13 @@ describe('Player', () => { channelsCount: 6, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 300, videoBandwidth: 2000, originalAudioId: 'audio-en-6c', originalVideoId: 'video-2kbps', originalTextId: null, + originalImageId: null, }, { id: 102, @@ -1159,11 +1175,13 @@ describe('Player', () => { channelsCount: 2, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 100, videoBandwidth: 1000, originalAudioId: 'audio-en-2c', originalVideoId: 'video-1kbps', originalTextId: null, + originalImageId: null, }, { id: 103, @@ -1191,11 +1209,13 @@ describe('Player', () => { channelsCount: 2, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 100, videoBandwidth: 2000, originalAudioId: 'audio-en-2c', originalVideoId: 'video-2kbps', originalTextId: null, + originalImageId: null, }, { id: 104, @@ -1223,11 +1243,13 @@ describe('Player', () => { channelsCount: 2, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 100, videoBandwidth: 1000, originalAudioId: 'audio-commentary', originalVideoId: 'video-1kbps', originalTextId: null, + originalImageId: null, }, { id: 105, @@ -1255,11 +1277,13 @@ describe('Player', () => { channelsCount: 2, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 100, videoBandwidth: 2000, originalAudioId: 'audio-commentary', originalVideoId: 'video-2kbps', originalTextId: null, + originalImageId: null, }, { id: 106, @@ -1287,11 +1311,13 @@ describe('Player', () => { channelsCount: 2, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 100, videoBandwidth: 1000, originalAudioId: 'audio-es', originalVideoId: 'video-1kbps', originalTextId: null, + originalImageId: null, }, { id: 107, @@ -1319,11 +1345,13 @@ describe('Player', () => { channelsCount: 2, audioSamplingRate: 48000, spatialAudio: false, + tilesLayout: null, audioBandwidth: 100, videoBandwidth: 2000, originalAudioId: 'audio-es', originalVideoId: 'video-2kbps', originalTextId: null, + originalImageId: null, }, ]; @@ -1346,6 +1374,7 @@ describe('Player', () => { channelsCount: null, audioSamplingRate: null, spatialAudio: false, + tilesLayout: null, audioBandwidth: null, videoBandwidth: null, bandwidth: 0, @@ -1359,6 +1388,7 @@ describe('Player', () => { originalAudioId: null, originalVideoId: null, originalTextId: 'text-es', + originalImageId: null, }, { id: 51, @@ -1378,6 +1408,7 @@ describe('Player', () => { channelsCount: null, audioSamplingRate: null, spatialAudio: false, + tilesLayout: null, audioBandwidth: null, videoBandwidth: null, bandwidth: 0, @@ -1391,6 +1422,7 @@ describe('Player', () => { originalAudioId: null, originalVideoId: null, originalTextId: 'text-en', + originalImageId: null, }, { id: 52, @@ -1410,6 +1442,7 @@ describe('Player', () => { channelsCount: null, spatialAudio: false, audioSamplingRate: null, + tilesLayout: null, audioBandwidth: null, videoBandwidth: null, bandwidth: 0, @@ -1423,6 +1456,44 @@ describe('Player', () => { originalAudioId: null, originalVideoId: null, originalTextId: 'text-commentary', + originalImageId: null, + }, + ]; + + imageTracks = [ + { + id: 53, + active: false, + type: ContentType.IMAGE, + language: '', + label: null, + kind: null, + mimeType: 'image/jpeg', + codecs: null, + audioCodec: null, + videoCodec: null, + primary: false, + roles: [], + audioRoles: null, + forced: false, + channelsCount: null, + audioSamplingRate: null, + spatialAudio: false, + tilesLayout: '1x1', + audioBandwidth: null, + videoBandwidth: null, + bandwidth: 10, + width: 100, + height: 200, + frameRate: null, + pixelAspectRatio: null, + hdr: null, + videoId: null, + audioId: null, + originalAudioId: null, + originalVideoId: null, + originalTextId: null, + originalImageId: 'thumbnail', }, ]; @@ -1444,6 +1515,7 @@ describe('Player', () => { it('returns the correct tracks', () => { expect(player.getVariantTracks()).toEqual(variantTracks); expect(player.getTextTracks()).toEqual(textTracks); + expect(player.getImageTracks()).toEqual(imageTracks); }); it('returns empty arrays before tracks can be determined', async () => { @@ -1453,6 +1525,7 @@ describe('Player', () => { // The player does not yet have a manifest. expect(player.getVariantTracks()).toEqual([]); expect(player.getTextTracks()).toEqual([]); + expect(player.getImageTracks()).toEqual([]); parser.playerInterface = playerInterface; return Promise.resolve(manifest); @@ -1462,6 +1535,7 @@ describe('Player', () => { expect(player.getVariantTracks()).toEqual(variantTracks); expect(player.getTextTracks()).toEqual(textTracks); + expect(player.getImageTracks()).toEqual(imageTracks); }); it('doesn\'t disable AbrManager if switching variants', () => { diff --git a/test/test/util/manifest_generator.js b/test/test/util/manifest_generator.js index 5ce29b58cd..a957f1ce9b 100644 --- a/test/test/util/manifest_generator.js +++ b/test/test/util/manifest_generator.js @@ -94,6 +94,9 @@ shaka.test.ManifestGenerator.Manifest = class { /** @type {!Array.} */ this.textStreams = []; + /** @type {!Array.} */ + this.imageStreams = []; + const timeline = new this.shaka_.media.PresentationTimeline(0, 0); timeline.setSegmentAvailabilityDuration(Infinity); timeline.notifyMaxSegmentDuration(10); @@ -210,6 +213,22 @@ shaka.test.ManifestGenerator.Manifest = class { this.textStreams.push(stream.build_()); } + /** + * Adds an image stream to the manifest. + * + * @param {number} id + * @param {function(!shaka.test.ManifestGenerator.Stream)=} func + */ + addImageStream(id, func) { + const ContentType = shaka.util.ManifestParserUtils.ContentType; + const stream = new shaka.test.ManifestGenerator.Stream( + this, /* isPartial= */ false, id, ContentType.IMAGE, 'und'); + if (func) { + func(stream); + } + this.imageStreams.push(stream.build_()); + } + /** * Adds a "partial" stream which, when used with jasmine, will only compare * the properties that were explicitly given to it. All other properties will @@ -525,6 +544,8 @@ shaka.test.ManifestGenerator.Stream = class { this.closedCaptions = null; /** @type {(string|undefined)} */ this.hdr = undefined; + /** @type {(string|undefined)} */ + this.tilesLayout = undefined; } /** @type {shaka.extern.Stream} */ diff --git a/test/test/util/streaming_engine_util.js b/test/test/util/streaming_engine_util.js index f9db83d257..a2c0aa6ae4 100644 --- a/test/test/util/streaming_engine_util.js +++ b/test/test/util/streaming_engine_util.js @@ -286,6 +286,7 @@ shaka.test.StreamingEngineUtil = class { offlineSessionIds: [], variants: [], textStreams: [], + imageStreams: [], }; /** @type {shaka.extern.Variant} */ diff --git a/test/util/stream_utils_unit.js b/test/util/stream_utils_unit.js index 59db3d9ccb..b9ab927c1f 100644 --- a/test/util/stream_utils_unit.js +++ b/test/util/stream_utils_unit.js @@ -498,6 +498,35 @@ describe('StreamUtils', () => { expect(manifest.textStreams[0].id).toBe(1); expect(manifest.textStreams[1].id).toBe(2); }); + + it('filters image streams', async () => { + manifest = shaka.test.ManifestGenerator.generate((manifest) => { + manifest.addImageStream(1, (stream) => { + stream.mimeType = 'image/svg+xml'; + }); + manifest.addImageStream(2, (stream) => { + stream.mimeType = 'image/png'; + }); + manifest.addImageStream(3, (stream) => { + stream.mimeType = 'image/jpeg'; + }); + manifest.addImageStream(4, (stream) => { + stream.mimeType = 'image/bogus'; + }); + }); + + const noVariant = null; + await shaka.util.StreamUtils.filterManifest( + fakeDrmEngine, noVariant, manifest); + + // Covers a regression in which we would remove streams with codecs. + // The last two streams should be removed because their full MIME types + // are bogus. + expect(manifest.imageStreams.length).toBe(3); + expect(manifest.imageStreams[0].id).toBe(1); + expect(manifest.imageStreams[1].id).toBe(2); + expect(manifest.imageStreams[2].id).toBe(3); + }); }); describe('chooseCodecsAndFilterManifest', () => {