diff --git a/doc/api/Player_Errors.md b/doc/api/Player_Errors.md index 5fc9841340..c405903b0c 100644 --- a/doc/api/Player_Errors.md +++ b/doc/api/Player_Errors.md @@ -64,7 +64,7 @@ all have a `type` property equal to `"NETWORK_ERROR"`. #### codes -A NetworkError can only have the following code (`code` property): +An error of `type` `NETWORK_ERROR` can only have the following code (`code` property): - `"PIPELINE_LOAD_ERROR"`: the [Manifest](../Getting_Started/Glossary.md#manifest) or @@ -107,16 +107,30 @@ parsing) or from the browser itself (content playback). They all have a `type` property equal to `"MEDIA_ERROR"`. +Depending on its `code` property (listed below), a `MEDIA_ERROR` may also have +a supplementary `trackInfo` property, describing the track related to the issue. +The format of that property is decribed in the chapter below listed codes, and +the codes for which it is set are indicated in the corresponding code's +description below. + #### codes -A MediaError can have the following codes (`code` property): +An error of `type` `MEDIA_ERROR` can have the following codes (`code` property): - `"BUFFER_APPEND_ERROR"`: A media segment could not have been added to the corresponding media buffer. This often happens with malformed segments. + For those errors, you may be able to know the characteristics of the track + linked to that segment by inspecting the error's `trackInfo` property, + described below. + - `"BUFFER_FULL_ERROR"`: The needed segment could not have been added because the corresponding media buffer was full. + For those errors, you may be able to know the characteristics of the track + linked to that segment by inspecting the error's `trackInfo` property, + described below. + - `"BUFFER_TYPE_UNKNOWN"`: The type of buffer considered (e.g. "audio" / "video" / "text") has no media buffer implementation in your build. @@ -125,6 +139,9 @@ A MediaError can have the following codes (`code` property): [Representations](../Getting_Started/Glossary.md#representation) (read quality) in a supported codec. + For those errors, you may be able to know the characteristics of the track + linked to that codec by inspecting the error's `trackInfo` property, described below. + - `"MANIFEST_PARSE_ERROR"`: Generic error to signal than the [Manifest](../Getting_Started/Glossary.md#structure_of_a_manifest_object) could not be parsed. @@ -193,10 +210,14 @@ A MediaError can have the following codes (`code` property): This is rarely a problem and may be encountered at a very start of a content when the initial segment's start is much later than expected. -- `"NO_PLAYABLE_REPRESENTATION"`: The currently chosen Adaptation does not +- `"NO_PLAYABLE_REPRESENTATION"`: One of the currently chosen track does not contain any playable Representation. This usually happens when every Representation has been blacklisted due to encryption limitations. + For those errors, you may be able to know the characteristics of the + corresponding track by inspecting the error's `trackInfo` property, described + below. + - `"MANIFEST_UPDATE_ERROR"`: This error should never be emitted as it is handled internally by the RxPlayer. Please open an issue if you encounter it. @@ -211,16 +232,174 @@ A MediaError can have the following codes (`code` property): It is triggered when a time we initially thought to be in the bounds of the Manifest actually does not link to any "Period" of the Manifest. +#### `trackInfo` property + +As described in the corresponding code's documentation, A aupplementary +`trackInfo` property may be set on `MEDIA_ERROR` depending on its `code` +property. + +That `trackInfo` describes, when it makes sense, the characteristics of the track +linked to an error. For example, you may want to know which video track led to a +`BUFFER_APPEND_ERROR` and thus might be linked to corrupted segments. + +The `trackInfo` property has itself two sub-properties: + + - `type`: The type of track: `"audio"` for an audio track, `"text"` for a text + track, or `"video"` for a video track. + + - `track`: Characteristics of the track. Its format depends on the + `trackInfo`'s `type` property and is described below. + +##### For video tracks + +When `trackInfo.type` is set to `"video"`, `track` describes a video track. It +contains the following properties: + + - `id` (`string`): The id used to identify this track. No other + video track for the same [Period](../Getting_Started/Glossary.md#period) + will have the same `id`. + + - `label` (`string|undefined`): A human readable label that may be displayed in + the user interface providing a choice between video tracks. + + This information is usually set only if the current Manifest contains one. + + - `representations` (`Array.`): + [Representations](../Getting_Started/Glossary.md#representation) of this + video track, with attributes: + + - `id` (`string`): The id used to identify this Representation. + No other Representation from this track will have the same `id`. + + - `bitrate` (`Number`): The bitrate of this Representation, in bits per + seconds. + + - `width` (`Number|undefined`): The width of video, in pixels. + + - `height` (`Number|undefined`): The height of video, in pixels. + + - `codec` (`string|undefined`): The video codec the Representation is + in, as announced in the corresponding Manifest. + + - `frameRate` (`string|undefined`): The video frame rate. + + - `hdrInfo` (`Object|undefined`) Information about the hdr + characteristics of the track. + (see [HDR support documentation](./Miscellaneous/hdr.md#hdrinfo)) + + - `signInterpreted` (`Boolean|undefined`): If set to `true`, this track is + known to contain an interpretation in sign language. + If set to `false`, the track is known to not contain that type of content. + If not set or set to undefined we don't know whether that video track + contains an interpretation in sign language. + + - `isTrickModeTrack` (`Boolean|undefined`): If set to `true`, this track + is a trick mode track. This type of tracks proposes video content that is + often encoded with a very low framerate with the purpose to be played more + efficiently at a much higher speed. + + - `trickModeTracks` (`Array. | undefined`): Trick mode video tracks + attached to this video track. + + Each of those objects contain the same properties that a regular video track + (same properties than what is documented here). + + It this property is either `undefined` or not set, then this track has no + linked trickmode video track. + +##### For audio tracks + +When `trackInfo.type` is set to `"audio"`, `track` describes an audio track. It +contains the following properties: + +- `id` (`Number|string`): The id used to identify this track. No other + audio track for the same [Period](../Getting_Started/Glossary.md#period) + will have the same `id`. + +- `language` (`string`): The language the audio track is in, as set in the + [Manifest](../Getting_Started/Glossary.md#manifest). + +- `normalized` (`string`): An attempt to translate the `language` + property into an ISO 639-3 language code (for now only support translations + from ISO 639-1 and ISO 639-3 language codes). If the translation attempt + fails (no corresponding ISO 639-3 language code is found), it will equal the + value of `language` + +- `audioDescription` (`Boolean`): Whether the track is an audio + description of what is happening at the screen. + +- `dub` (`Boolean|undefined`): If set to `true`, this audio track is a + "dub", meaning it was recorded in another language than the original. + If set to `false`, we know that this audio track is in an original language. + This property is `undefined` if we do not known whether it is in an original + language. + +- `label` (`string|undefined`): A human readable label that may be displayed in + the user interface providing a choice between audio tracks. + + This information is usually set only if the current Manifest contains one. + +- `representations` (`Array.`): + [Representations](../Getting_Started/Glossary.md#representation) of this video track, with + attributes: + + - `id` (`string`): The id used to identify this Representation. + No other Representation from this track will have the same `id`. + + - `bitrate` (`Number`): The bitrate of this Representation, in bits per + seconds. + + - `codec` (`string|undefined`): The audio codec the Representation is + in, as announced in the corresponding Manifest. + +##### For text tracks + +When `trackInfo.type` is set to `"text"`, `track` describes a text track. It +contains the following properties: + +- `id` (`string`): The id used to identify this track. No other + text track for the same [Period](../Getting_Started/Glossary.md#period) + will have the same `id`. + +- `language` (`string`): The language the text trac./../Basic_Methods/loadVideo.md#transport set in the + [Manifest](../Getting_Started/Glossary.md#manifest). + +- `normalized` (`string`): An attempt to translate the `language` + property into an ISO 639-3 language code (for now only support translations + from ISO 639-1 and ISO 639-3 language codes). If the translation attempt + fails (no corresponding ISO./../Basic_Methods/loadVideo.md#transport found), it will equal the + value of `language` + +- `label` (`string|undefined`): A human readable label that may be displayed in + the user interface providing a choice between text tracks. + + This information is usually set only if the current Manifest contains one. + +- `closedCaption` (`Boolean`): Whether the track is specially adapted for + the hard of hearing or not. + +- `forced` (`Boolean`): If `true` this text track is meant to be displayed by + default if no other text track is selected. + + It is often used to clarify dialogue, alternate languages, texted graphics or + location and person identification. + + ### ENCRYPTED_MEDIA_ERROR -Those errors are linked to the Encrypted Media Extensions. They concern various -DRM-related problems. +Those errors are linked to the "Encrypted Media Extensions" API. +They concern various DRM-related problems. They all have a `type` property equal to `"ENCRYPTED_MEDIA_ERROR"`. +When its code is set to `KEY_STATUS_CHANGE_ERROR`, an ENCRYPTED_MEDIA_ERROR +generally also have a `keyStatuses` property, which is documented in the +corresponding `KEY_STATUS_CHANGE_ERROR` code explanation below. + #### codes -An EncryptedMediaError can have the following codes (`code` property): +An error of `type` `ENCRYPTED_MEDIA_ERROR` can have the following codes (`code` +property): - `"INCOMPATIBLE_KEYSYSTEMS"`: None of the provided key systems was compatible with the current browser. @@ -295,7 +474,7 @@ They all have a `type` property equal to `"OTHER_ERROR"`. #### codes -An OtherError can have the following codes (`code` property): +An error of `type` `OTHER_ERROR` can have the following codes (`code` property): - `"PIPELINE_LOAD_ERROR"`: The [Manifest](../Getting_Started/Glossary.md#structure_of_a_manifest_object) or segment diff --git a/src/core/api/tracks_management/track_choice_manager.ts b/src/core/api/tracks_management/track_choice_manager.ts index f6b7b75b00..be92300a78 100644 --- a/src/core/api/tracks_management/track_choice_manager.ts +++ b/src/core/api/tracks_management/track_choice_manager.ts @@ -26,7 +26,6 @@ import { Representation, } from "../../../manifest"; import { - IAudioRepresentation, IAudioTrack, IAudioTrackPreference, IAvailableAudioTrack, @@ -34,7 +33,6 @@ import { IAvailableVideoTrack, ITextTrack, ITextTrackPreference, - IVideoRepresentation, IVideoTrack, IVideoTrackPreference, } from "../../../public_types"; @@ -42,6 +40,7 @@ import arrayFind from "../../../utils/array_find"; import arrayIncludes from "../../../utils/array_includes"; import isNullOrUndefined from "../../../utils/is_null_or_undefined"; import normalizeLanguage from "../../../utils/languages"; +import objectAssign from "../../../utils/object_assign"; import { ISharedReference } from "../../../utils/reference"; import SortedList from "../../../utils/sorted_list"; import takeFirstSet from "../../../utils/take_first_set"; @@ -618,19 +617,7 @@ export default class TrackChoiceManager { if (isNullOrUndefined(chosenTrack)) { return null; } - - const audioTrack : IAudioTrack = { - language: takeFirstSet(chosenTrack.language, ""), - normalized: takeFirstSet(chosenTrack.normalizedLanguage, ""), - audioDescription: chosenTrack.isAudioDescription === true, - id: chosenTrack.id, - representations: chosenTrack.representations.map(parseAudioRepresentation), - label: chosenTrack.label, - }; - if (chosenTrack.isDub === true) { - audioTrack.dub = true; - } - return audioTrack; + return chosenTrack.toAudioTrack(); } /** @@ -655,16 +642,7 @@ export default class TrackChoiceManager { if (isNullOrUndefined(chosenTextAdaptation)) { return null; } - - const formatted : ITextTrack = { - language: takeFirstSet(chosenTextAdaptation.language, ""), - normalized: takeFirstSet(chosenTextAdaptation.normalizedLanguage, ""), - closedCaption: chosenTextAdaptation.isClosedCaption === true, - id: chosenTextAdaptation.id, - label: chosenTextAdaptation.label, - forced: chosenTextAdaptation.isForcedSubtitles, - }; - return formatted; + return chosenTextAdaptation.toTextTrack(); } /** @@ -690,36 +668,7 @@ export default class TrackChoiceManager { return null; } const currAdaptation = chosenVideoAdaptation.adaptation; - - const trickModeTracks = currAdaptation.trickModeTracks !== undefined ? - currAdaptation.trickModeTracks.map((trickModeAdaptation) => { - const representations = trickModeAdaptation.representations - .map(parseVideoRepresentation); - const trickMode : IVideoTrack = { id: trickModeAdaptation.id, - representations, - isTrickModeTrack: true }; - if (trickModeAdaptation.isSignInterpreted === true) { - trickMode.signInterpreted = true; - } - return trickMode; - }) : - undefined; - - const videoTrack: IVideoTrack = { - id: currAdaptation.id, - representations: currAdaptation.representations.map(parseVideoRepresentation), - label: currAdaptation.label, - }; - if (currAdaptation.isSignInterpreted === true) { - videoTrack.signInterpreted = true; - } - if (currAdaptation.isTrickModeTrack === true) { - videoTrack.isTrickModeTrack = true; - } - if (trickModeTracks !== undefined) { - videoTrack.trickModeTracks = trickModeTracks; - } - return videoTrack; + return currAdaptation.toVideoTrack(); } /** @@ -743,20 +692,9 @@ export default class TrackChoiceManager { return audioInfos.adaptations .map((adaptation) => { - const formatted : IAvailableAudioTrack = { - language: takeFirstSet(adaptation.language, ""), - normalized: takeFirstSet(adaptation.normalizedLanguage, ""), - audioDescription: adaptation.isAudioDescription === true, - id: adaptation.id, - active: currentId === null ? false : - currentId === adaptation.id, - representations: adaptation.representations.map(parseAudioRepresentation), - label: adaptation.label, - }; - if (adaptation.isDub === true) { - formatted.dub = true; - } - return formatted; + const active = currentId === null ? false : + currentId === adaptation.id; + return objectAssign(adaptation.toAudioTrack(), { active }); }); } @@ -782,17 +720,9 @@ export default class TrackChoiceManager { return textInfos.adaptations .map((adaptation) => { - const formatted : IAvailableTextTrack = { - language: takeFirstSet(adaptation.language, ""), - normalized: takeFirstSet(adaptation.normalizedLanguage, ""), - closedCaption: adaptation.isClosedCaption === true, - id: adaptation.id, - active: currentId === null ? false : - currentId === adaptation.id, - label: adaptation.label, - forced: adaptation.isForcedSubtitles, - }; - return formatted; + const active = currentId === null ? false : + currentId === adaptation.id; + return objectAssign(adaptation.toTextTrack(), { active }); }); } @@ -817,37 +747,21 @@ export default class TrackChoiceManager { return videoInfos.adaptations .map((adaptation) => { - const trickModeTracks = adaptation.trickModeTracks !== undefined ? - adaptation.trickModeTracks.map((trickModeAdaptation) => { + const active = currentId === null ? false : + currentId === adaptation.id; + const track = adaptation.toVideoTrack(); + const trickModeTracks = track.trickModeTracks !== undefined ? + track.trickModeTracks.map((trickModeAdaptation) => { const isActive = currentId === null ? false : currentId === trickModeAdaptation.id; - const representations = trickModeAdaptation.representations - .map(parseVideoRepresentation); - const trickMode : IAvailableVideoTrack = { id: trickModeAdaptation.id, - representations, - isTrickModeTrack: true, - active: isActive }; - if (trickModeAdaptation.isSignInterpreted === true) { - trickMode.signInterpreted = true; - } - return trickMode; + return objectAssign(trickModeAdaptation, { active: isActive }); }) : - undefined; - - const formatted: IAvailableVideoTrack = { - id: adaptation.id, - active: currentId === null ? false : - currentId === adaptation.id, - representations: adaptation.representations.map(parseVideoRepresentation), - label: adaptation.label, - }; - if (adaptation.isSignInterpreted === true) { - formatted.signInterpreted = true; - } + []; + const availableTrack = objectAssign(track, { active }); if (trickModeTracks !== undefined) { - formatted.trickModeTracks = trickModeTracks; + availableTrack.trickModeTracks = trickModeTracks; } - return formatted; + return availableTrack; }); } @@ -1355,28 +1269,6 @@ function getPeriodItem( } } -/** - * Parse video Representation into a IVideoRepresentation. - * @param {Object} representation - * @returns {Object} - */ -function parseVideoRepresentation( - { id, bitrate, frameRate, width, height, codec, hdrInfo } : Representation -) : IVideoRepresentation { - return { id, bitrate, frameRate, width, height, codec, hdrInfo }; -} - -/** - * Parse audio Representation into a ITMAudioRepresentation. - * @param {Object} representation - * @returns {Object} - */ -function parseAudioRepresentation( - { id, bitrate, codec } : Representation -) : IAudioRepresentation { - return { id, bitrate, codec }; -} - function getRightVideoTrack( adaptation : Adaptation, isTrickModeEnabled : boolean diff --git a/src/core/segment_buffers/implementations/audio_video/audio_video_segment_buffer.ts b/src/core/segment_buffers/implementations/audio_video/audio_video_segment_buffer.ts index 60c022a4ca..2178aefc47 100644 --- a/src/core/segment_buffers/implementations/audio_video/audio_video_segment_buffer.ts +++ b/src/core/segment_buffers/implementations/audio_video/audio_video_segment_buffer.ts @@ -21,11 +21,8 @@ import { import config from "../../../../config"; import log from "../../../../log"; import { getLoggableSegmentId } from "../../../../manifest"; -import areArraysOfNumbersEqual from "../../../../utils/are_arrays_of_numbers_equal"; import assertUnreachable from "../../../../utils/assert_unreachable"; -import { toUint8Array } from "../../../../utils/byte_parsing"; import createCancellablePromise from "../../../../utils/create_cancellable_promise"; -import hashBuffer from "../../../../utils/hash_buffer"; import noop from "../../../../utils/noop"; import objectAssign from "../../../../utils/object_assign"; import TaskCanceller, { @@ -139,20 +136,26 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { private _pendingTask : IAVSBPendingTask | null; /** - * Keep track of the of the latest init segment pushed in the linked - * SourceBuffer. + * Keep track of the unique identifier of the of the latest init segment + * pushed to the linked SourceBuffer. * - * This allows to be sure the right initialization segment is pushed before - * any chunk is. + * Such identifiers are first declared through the `declareInitSegment` + * method and the corresponding initialization segment is then pushed through + * the `pushChunk` method. + * + * Keeping track of this allows to be sure the right initialization segment is + * pushed before any chunk is. * * `null` if no initialization segment have been pushed to the * `AudioVideoSegmentBuffer` yet. */ - private _lastInitSegment : { /** The init segment itself. */ - data : Uint8Array; - /** Hash of the initSegment for fast comparison */ - hash : number; } | - null; + private _lastInitSegmentUniqueId : string | null; + + /** + * Link unique identifiers for initialization segments (as communicated by + * `declareInitSegment`) to the corresponding initialization data. + */ + private _initSegmentsMap : Map; /** * @constructor @@ -174,8 +177,9 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { this._sourceBuffer = sourceBuffer; this._queue = []; this._pendingTask = null; - this._lastInitSegment = null; + this._lastInitSegmentUniqueId = null; this.codec = codec; + this._initSegmentsMap = new Map(); const onError = this._onPendingTaskError.bind(this); const reCheck = this._flush.bind(this); @@ -198,6 +202,20 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { }); } + public declareInitSegment( + uniqueId : string, + initSegmentData : unknown + ) : void { + assertDataIsBufferSource(initSegmentData); + this._initSegmentsMap.set(uniqueId, initSegmentData); + } + + public freeInitSegment( + uniqueId : string + ) : void { + this._initSegmentsMap.delete(uniqueId); + } + /** * Push a chunk of the media segment given to the attached SourceBuffer, in a * FIFO queue. @@ -229,12 +247,12 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { infos : IPushChunkInfos, cancellationSignal : CancellationSignal ) : Promise { - assertPushedDataIsBufferSource(infos); + assertDataIsBufferSource(infos.data.chunk); log.debug("AVSB: receiving order to push data to the SourceBuffer", this.bufferType, getLoggableSegmentId(infos.inventoryInfos)); return this._addToQueue({ type: SegmentBufferOperation.Push, - value: infos }, + value: infos as IPushChunkInfos }, cancellationSignal); } @@ -350,7 +368,7 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { * @param {Event} err */ private _onPendingTaskError(err : unknown) : void { - this._lastInitSegment = null; // initialize init segment as a security + this._lastInitSegmentUniqueId = null; // initialize init segment as a security if (this._pendingTask !== null) { const error = err instanceof Error ? err : @@ -447,7 +465,7 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { const error = e instanceof Error ? e : new Error("An unknown error occured when preparing a push operation"); - this._lastInitSegment = null; // initialize init segment as a security + this._lastInitSegmentUniqueId = null; // initialize init segment as a security nextItem.reject(error); return; } @@ -557,15 +575,17 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { this._sourceBuffer.appendWindowEnd = appendWindow[1]; } - if (data.initSegment !== null && - (hasUpdatedSourceBufferType || !this._isLastInitSegment(data.initSegment))) + if (data.initSegmentUniqueId !== null && + (hasUpdatedSourceBufferType || + !this._isLastInitSegment(data.initSegmentUniqueId))) { // Push initialization segment before the media segment - const segmentData = data.initSegment; + const segmentData = this._initSegmentsMap.get(data.initSegmentUniqueId); + if (segmentData === undefined) { + throw new Error("Invalid initialization segment uniqueId"); + } dataToPush.push(segmentData); - const initU8 = toUint8Array(segmentData); - this._lastInitSegment = { data: initU8, - hash: hashBuffer(initU8) }; + this._lastInitSegmentUniqueId = data.initSegmentUniqueId; } if (data.chunk !== null) { @@ -576,28 +596,16 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { } /** - * Return `true` if the given `segmentData` is the same segment than the last + * Return `true` if the given `uniqueId` is the identifier of the last * initialization segment pushed to the `AudioVideoSegmentBuffer`. - * @param {BufferSource} segmentData + * @param {string} uniqueId * @returns {boolean} */ - private _isLastInitSegment(segmentData : BufferSource) : boolean { - if (this._lastInitSegment === null) { + private _isLastInitSegment(uniqueId : string) : boolean { + if (this._lastInitSegmentUniqueId === null) { return false; } - if (this._lastInitSegment.data === segmentData) { - return true; - } - const oldInit = this._lastInitSegment.data; - if (oldInit.byteLength === segmentData.byteLength) { - const newInitU8 = toUint8Array(segmentData); - if (hashBuffer(newInitU8) === this._lastInitSegment.hash && - areArraysOfNumbersEqual(oldInit, newInitU8)) - { - return true; - } - } - return false; + return this._lastInitSegmentUniqueId === uniqueId; } } @@ -605,27 +613,20 @@ export default class AudioVideoSegmentBuffer extends SegmentBuffer { * Throw if the given input is not in the expected format. * Allows to enforce runtime type-checking as compile-time type-checking here is * difficult to enforce. - * @param {Object} pushedData + * @param {Object} data */ -function assertPushedDataIsBufferSource( - pushedData : IPushChunkInfos -) : asserts pushedData is IPushChunkInfos { +function assertDataIsBufferSource( + data : unknown +) : asserts data is BufferSource { if (__ENVIRONMENT__.CURRENT_ENV === __ENVIRONMENT__.PRODUCTION as number) { return; } - const { chunk, initSegment } = pushedData.data; if ( - typeof chunk !== "object" || - typeof initSegment !== "object" || - ( - chunk !== null && - !(chunk instanceof ArrayBuffer) && - !((chunk as ArrayBufferView).buffer instanceof ArrayBuffer) - ) || + typeof data !== "object" || ( - initSegment !== null && - !(initSegment instanceof ArrayBuffer) && - !((initSegment as ArrayBufferView).buffer instanceof ArrayBuffer) + data !== null && + !(data instanceof ArrayBuffer) && + !((data as ArrayBufferView).buffer instanceof ArrayBuffer) ) ) { throw new Error("Invalid data given to the AudioVideoSegmentBuffer"); diff --git a/src/core/segment_buffers/implementations/image/image_segment_buffer.ts b/src/core/segment_buffers/implementations/image/image_segment_buffer.ts index fe0f08e798..d89d834b07 100644 --- a/src/core/segment_buffers/implementations/image/image_segment_buffer.ts +++ b/src/core/segment_buffers/implementations/image/image_segment_buffer.ts @@ -38,6 +38,22 @@ export default class ImageSegmentBuffer extends SegmentBuffer { this._buffered = new ManualTimeRanges(); } + /** + * @param {string} uniqueId + */ + public declareInitSegment(uniqueId : string): void { + log.warn("ISB: Declaring initialization segment for image SegmentBuffer", + uniqueId); + } + + /** + * @param {string} uniqueId + */ + public freeInitSegment(uniqueId : string): void { + log.warn("ISB: Freeing initialization segment for image SegmentBuffer", + uniqueId); + } + /** * @param {Object} data * @returns {Promise} diff --git a/src/core/segment_buffers/implementations/text/html/html_text_segment_buffer.ts b/src/core/segment_buffers/implementations/text/html/html_text_segment_buffer.ts index f3a7ecdfe2..ef294599e8 100644 --- a/src/core/segment_buffers/implementations/text/html/html_text_segment_buffer.ts +++ b/src/core/segment_buffers/implementations/text/html/html_text_segment_buffer.ts @@ -137,6 +137,22 @@ export default class HTMLTextSegmentBuffer extends SegmentBuffer { this.autoRefreshSubtitles(this._canceller.signal); } + /** + * @param {string} uniqueId + */ + public declareInitSegment(uniqueId : string): void { + log.warn("ISB: Declaring initialization segment for image SegmentBuffer", + uniqueId); + } + + /** + * @param {string} uniqueId + */ + public freeInitSegment(uniqueId : string): void { + log.warn("ISB: Freeing initialization segment for image SegmentBuffer", + uniqueId); + } + /** * Push text segment to the HTMLTextSegmentBuffer. * @param {Object} infos diff --git a/src/core/segment_buffers/implementations/text/native/native_text_segment_buffer.ts b/src/core/segment_buffers/implementations/text/native/native_text_segment_buffer.ts index 0bf209d9c8..fea7a866d9 100644 --- a/src/core/segment_buffers/implementations/text/native/native_text_segment_buffer.ts +++ b/src/core/segment_buffers/implementations/text/native/native_text_segment_buffer.ts @@ -66,6 +66,22 @@ export default class NativeTextSegmentBuffer extends SegmentBuffer { this._trackElement = trackElement; } + /** + * @param {string} uniqueId + */ + public declareInitSegment(uniqueId : string): void { + log.warn("ISB: Declaring initialization segment for image SegmentBuffer", + uniqueId); + } + + /** + * @param {string} uniqueId + */ + public freeInitSegment(uniqueId : string): void { + log.warn("ISB: Freeing initialization segment for image SegmentBuffer", + uniqueId); + } + /** * @param {Object} infos * @returns {Promise} diff --git a/src/core/segment_buffers/implementations/types.ts b/src/core/segment_buffers/implementations/types.ts index cc6d697acc..39cc1abaa4 100644 --- a/src/core/segment_buffers/implementations/types.ts +++ b/src/core/segment_buffers/implementations/types.ts @@ -87,6 +87,13 @@ export abstract class SegmentBuffer { this._segmentInventory = new SegmentInventory(); } + public abstract declareInitSegment( + uniqueId : string, + initSegmentData : unknown + ) : void; + + public abstract freeInitSegment(uniqueId : string) : void; + /** * Push a chunk of the media segment given to the attached buffer, in a * FIFO queue. @@ -96,7 +103,8 @@ export abstract class SegmentBuffer { * pushed. * * Depending on the type of data appended, the pushed chunk might rely on an - * initialization segment, given through the `data.initSegment` property. + * initialization segment, which had to be previously declared through the + * `declareInitSegment` method. * * Such initialization segment will be first pushed to the buffer if the * last pushed segment was associated to another initialization segment. @@ -106,7 +114,7 @@ export abstract class SegmentBuffer { * reference). * * If you don't need any initialization segment to push the wanted chunk, you - * can just set `data.initSegment` to `null`. + * can just set the corresponding property to `null`. * * You can also only push an initialization segment by setting the * `data.chunk` argument to null. @@ -230,12 +238,16 @@ export type IBufferType = "audio" | */ export interface IPushedChunkData { /** - * The whole initialization segment's data related to the chunk you want to + * The `uniqueId` of the initialization segment linked to the data you want to * push. + * + * That identifier should previously have been declared through the + * `declareInitSegment` method and not freed. + * * To set to `null` either if no initialization data is needed, or if you are * confident that the last pushed one is compatible. */ - initSegment: T | null; + initSegmentUniqueId : string | null; /** * Chunk you want to push. * This can be the whole decodable segment's data or just a decodable sub-part diff --git a/src/core/segment_buffers/index.ts b/src/core/segment_buffers/index.ts index 5b90fec51c..462a852b8c 100644 --- a/src/core/segment_buffers/index.ts +++ b/src/core/segment_buffers/index.ts @@ -30,6 +30,7 @@ import { import { IBufferedChunk, IChunkContext, + IInsertedChunkInfos, } from "./inventory"; import SegmentBuffersStore, { ISegmentBufferOptions, @@ -49,6 +50,7 @@ export { IBufferedChunk, IChunkContext, + IInsertedChunkInfos, IPushChunkInfos, IPushedChunkData, diff --git a/src/core/stream/adaptation/utils/create_representation_estimator.ts b/src/core/stream/adaptation/utils/create_representation_estimator.ts index 1885ab74b7..76404ec18c 100644 --- a/src/core/stream/adaptation/utils/create_representation_estimator.ts +++ b/src/core/stream/adaptation/utils/create_representation_estimator.ts @@ -86,7 +86,8 @@ export default function getRepresentationEstimate( if (newRepr.length === 0) { const noRepErr = new MediaError("NO_PLAYABLE_REPRESENTATION", "No Representation in the chosen " + - adaptation.type + " Adaptation can be played"); + adaptation.type + " Adaptation can be played", + { adaptation }); cleanUp(); onFatalError(noRepErr); return; diff --git a/src/core/stream/period/period_stream.ts b/src/core/stream/period/period_stream.ts index a06db4fd66..87a71df09c 100644 --- a/src/core/stream/period/period_stream.ts +++ b/src/core/stream/period/period_stream.ts @@ -380,7 +380,8 @@ function getFirstDeclaredMimeType(adaptation : Adaptation) : string { if (representations.length === 0) { const noRepErr = new MediaError("NO_PLAYABLE_REPRESENTATION", "No Representation in the chosen " + - adaptation.type + " Adaptation can be played"); + adaptation.type + " Adaptation can be played", + { adaptation }); throw noRepErr; } return representations[0].getMimeTypeString(); diff --git a/src/core/stream/representation/representation_stream.ts b/src/core/stream/representation/representation_stream.ts index a508ca4ef8..692b38c0fa 100644 --- a/src/core/stream/representation/representation_stream.ts +++ b/src/core/stream/representation/representation_stream.ts @@ -109,11 +109,17 @@ export default function RepresentationStream( segmentsLoadingCanceller.linkToSignal(globalCanceller.signal); /** Saved initialization segment state for this representation. */ - const initSegmentState : IInitSegmentState = { + const initSegmentState : IInitSegmentState = { segment: representation.index.getInitSegment(), - segmentData: null, + uniqueId: null, isLoaded: false, }; + globalCanceller.signal.register(() => { + // Free initialization segment if one has been declared + if (initSegmentState.uniqueId !== null) { + segmentBuffer.freeInitSegment(initSegmentState.uniqueId); + } + }); /** Emit the last scheduled downloading queue for segments. */ const lastSegmentQueue = createSharedReference({ @@ -125,7 +131,6 @@ export default function RepresentationStream( const hasInitSegment = initSegmentState.segment !== null; if (!hasInitSegment) { - initSegmentState.segmentData = null; initSegmentState.isLoaded = true; } @@ -338,7 +343,6 @@ export default function RepresentationStream( return ; } if (evt.segmentType === "init") { - initSegmentState.segmentData = evt.initializationData; initSegmentState.isLoaded = true; // Now that the initialization segment has been parsed - which may have @@ -350,21 +354,31 @@ export default function RepresentationStream( callbacks.encryptionDataEncountered( allEncryptionData.map(p => objectAssign({ content }, p)) ); + if (globalCanceller.isUsed()) { + return ; // previous callback has stopped everything by side-effect + } } } - pushInitSegment({ playbackObserver, - content, - segment: evt.segment, - segmentData: evt.initializationData, - segmentBuffer }, - globalCanceller.signal) - .then((result) => { - if (result !== null) { - callbacks.addedSegment(result); - } - }) - .catch(onFatalBufferError); + if (evt.initializationData !== null) { + const initSegmentUniqueId = representation.uniqueId; + initSegmentState.uniqueId = initSegmentUniqueId; + segmentBuffer.declareInitSegment(initSegmentUniqueId, + evt.initializationData); + pushInitSegment({ playbackObserver, + content, + initSegmentUniqueId, + segment: evt.segment, + segmentData: evt.initializationData, + segmentBuffer }, + globalCanceller.signal) + .then((result) => { + if (result !== null) { + callbacks.addedSegment(result); + } + }) + .catch(onFatalBufferError); + } // Sometimes the segment list is only known once the initialization segment // is parsed. Thus we immediately re-check if there's new segments to load. @@ -401,10 +415,10 @@ export default function RepresentationStream( } } - const initSegmentData = initSegmentState.segmentData; + const initSegmentUniqueId = initSegmentState.uniqueId; pushMediaSegment({ playbackObserver, content, - initSegmentData, + initSegmentUniqueId, parsedSegment: evt, segment: evt.segment, segmentBuffer }, @@ -440,17 +454,18 @@ export default function RepresentationStream( * Information about the initialization segment linked to the Representation * which the RepresentationStream try to download segments for. */ -interface IInitSegmentState { +interface IInitSegmentState { /** * Segment Object describing that initialization segment. * `null` if there's no initialization segment for that Representation. */ segment : ISegment | null; /** - * Initialization segment data. - * `null` either when it doesn't exist or when it has not been loaded yet. + * Unique identifier used to identify the initialization segment data, used by + * the `SegmentBuffer`. + * `null` either when it doesn't exist or when it has not been declared yet. */ - segmentData : T | null; + uniqueId : string | null; /** `true` if the initialization segment has been loaded and parsed. */ isLoaded : boolean; } diff --git a/src/core/stream/representation/utils/append_segment_to_buffer.ts b/src/core/stream/representation/utils/append_segment_to_buffer.ts index ef29535890..da9a180f36 100644 --- a/src/core/stream/representation/utils/append_segment_to_buffer.ts +++ b/src/core/stream/representation/utils/append_segment_to_buffer.ts @@ -22,6 +22,7 @@ import { MediaError } from "../../../../errors"; import { CancellationError, CancellationSignal } from "../../../../utils/task_canceller"; import { IReadOnlyPlaybackObserver } from "../../../api"; import { + IInsertedChunkInfos, IPushChunkInfos, SegmentBuffer, } from "../../../segment_buffers"; @@ -41,7 +42,7 @@ import forceGarbageCollection from "./force_garbage_collection"; export default async function appendSegmentToBuffer( playbackObserver : IReadOnlyPlaybackObserver, segmentBuffer : SegmentBuffer, - dataInfos : IPushChunkInfos, + dataInfos : IPushChunkInfos & { inventoryInfos: IInsertedChunkInfos }, cancellationSignal : CancellationSignal ) : Promise { try { @@ -55,7 +56,9 @@ export default async function appendSegmentToBuffer( const reason = appendError instanceof Error ? appendError.toString() : "An unknown error happened when pushing content"; - throw new MediaError("BUFFER_APPEND_ERROR", reason); + throw new MediaError("BUFFER_APPEND_ERROR", + reason, + { adaptation: dataInfos.inventoryInfos.adaptation }); } const { position } = playbackObserver.getReference().getValue(); const currentPos = position.pending ?? position.last; @@ -66,7 +69,9 @@ export default async function appendSegmentToBuffer( const reason = err2 instanceof Error ? err2.toString() : "Could not clean the buffer"; - throw new MediaError("BUFFER_FULL_ERROR", reason); + throw new MediaError("BUFFER_FULL_ERROR", + reason, + { adaptation: dataInfos.inventoryInfos.adaptation }); } } } diff --git a/src/core/stream/representation/utils/push_init_segment.ts b/src/core/stream/representation/utils/push_init_segment.ts index bd3c66612a..a2ba790ab2 100644 --- a/src/core/stream/representation/utils/push_init_segment.ts +++ b/src/core/stream/representation/utils/push_init_segment.ts @@ -20,6 +20,7 @@ import Manifest, { Period, Representation, } from "../../../../manifest"; +import objectAssign from "../../../../utils/object_assign"; import { CancellationSignal } from "../../../../utils/task_canceller"; import { IReadOnlyPlaybackObserver } from "../../../api"; import { @@ -42,6 +43,7 @@ export default async function pushInitSegment( { playbackObserver, content, + initSegmentUniqueId, segment, segmentData, segmentBuffer, @@ -53,27 +55,30 @@ export default async function pushInitSegment( manifest : Manifest; period : Period; representation : Representation; }; - segmentData : T | null; + initSegmentUniqueId : string; + segmentData : T; segment : ISegment; segmentBuffer : SegmentBuffer; }, cancelSignal : CancellationSignal ) : Promise< IStreamEventAddedSegmentPayload | null > { - if (segmentData === null) { - return null; - } if (cancelSignal.cancellationError !== null) { throw cancelSignal.cancellationError; } const codec = content.representation.getMimeTypeString(); - const data : IPushedChunkData = { initSegment: segmentData, + const data : IPushedChunkData = { initSegmentUniqueId, chunk: null, timestampOffset: 0, appendWindow: [ undefined, undefined ], codec }; + const inventoryInfos = objectAssign({ segment, + chunkSize: undefined, + start: 0, + end: 0 }, + content); await appendSegmentToBuffer(playbackObserver, segmentBuffer, - { data, inventoryInfos: null }, + { data, inventoryInfos }, cancelSignal); const buffered = segmentBuffer.getBufferedRanges(); return { content, segment, buffered, segmentData }; diff --git a/src/core/stream/representation/utils/push_media_segment.ts b/src/core/stream/representation/utils/push_media_segment.ts index 396a0b0368..ac9e45e277 100644 --- a/src/core/stream/representation/utils/push_media_segment.ts +++ b/src/core/stream/representation/utils/push_media_segment.ts @@ -41,7 +41,7 @@ import appendSegmentToBuffer from "./append_segment_to_buffer"; export default async function pushMediaSegment( { playbackObserver, content, - initSegmentData, + initSegmentUniqueId, parsedSegment, segment, segmentBuffer } : @@ -52,7 +52,7 @@ export default async function pushMediaSegment( manifest : Manifest; period : Period; representation : Representation; }; - initSegmentData : T | null; + initSegmentUniqueId : string | null; parsedSegment : ISegmentParserParsedMediaChunk; segment : ISegment; segmentBuffer : SegmentBuffer; }, @@ -83,7 +83,7 @@ export default async function pushMediaSegment( undefined, ]; - const data = { initSegment: initSegmentData, + const data = { initSegmentUniqueId, chunk: chunkData, timestampOffset: chunkOffset, appendWindow: safeAppendWindow, diff --git a/src/errors/__tests__/media_error.test.ts b/src/errors/__tests__/media_error.test.ts index 0fee2c42eb..782601397a 100644 --- a/src/errors/__tests__/media_error.test.ts +++ b/src/errors/__tests__/media_error.test.ts @@ -19,25 +19,25 @@ import MediaError from "../media_error"; describe("errors - MediaError", () => { it("should format a MediaError", () => { const reason = "test"; - const mediaError = new MediaError("BUFFER_FULL_ERROR", reason); + const mediaError = new MediaError("MEDIA_TIME_BEFORE_MANIFEST", reason); expect(mediaError).toBeInstanceOf(Error); expect(mediaError.name).toBe("MediaError"); expect(mediaError.type).toBe("MEDIA_ERROR"); - expect(mediaError.code).toBe("BUFFER_FULL_ERROR"); + expect(mediaError.code).toBe("MEDIA_TIME_BEFORE_MANIFEST"); expect(mediaError.fatal).toBe(false); - expect(mediaError.message).toBe("MediaError (BUFFER_FULL_ERROR) test"); + expect(mediaError.message).toBe("MediaError (MEDIA_TIME_BEFORE_MANIFEST) test"); }); it("should be able to set it as fatal", () => { const reason = "test"; - const mediaError = new MediaError("BUFFER_APPEND_ERROR", reason); + const mediaError = new MediaError("MEDIA_TIME_AFTER_MANIFEST", reason); mediaError.fatal = true; expect(mediaError).toBeInstanceOf(Error); expect(mediaError.name).toBe("MediaError"); expect(mediaError.type).toBe("MEDIA_ERROR"); - expect(mediaError.code).toBe("BUFFER_APPEND_ERROR"); + expect(mediaError.code).toBe("MEDIA_TIME_AFTER_MANIFEST"); expect(mediaError.fatal).toBe(true); - expect(mediaError.message).toBe("MediaError (BUFFER_APPEND_ERROR) test"); + expect(mediaError.message).toBe("MediaError (MEDIA_TIME_AFTER_MANIFEST) test"); }); it("should filter in a valid error code", () => { diff --git a/src/errors/index.ts b/src/errors/index.ts index 4fd05aa36b..27441c4958 100644 --- a/src/errors/index.ts +++ b/src/errors/index.ts @@ -26,7 +26,9 @@ import { } from "./error_codes"; import formatError from "./format_error"; import isKnownError from "./is_known_error"; -import MediaError from "./media_error"; +import MediaError, { + IMediaErrorTrackContext, +} from "./media_error"; import NetworkError from "./network_error"; import OtherError from "./other_error"; import RequestError from "./request_error"; @@ -39,6 +41,7 @@ export { ErrorTypes, IErrorCode, IErrorType, + IMediaErrorTrackContext, formatError, MediaError as MediaError, NetworkError, diff --git a/src/errors/media_error.ts b/src/errors/media_error.ts index 2f5d17e3d0..78d8ffd6a3 100644 --- a/src/errors/media_error.ts +++ b/src/errors/media_error.ts @@ -14,12 +14,42 @@ * limitations under the License. */ +import { Adaptation } from "../manifest"; +import { + IAudioTrack, + ITextTrack, + IVideoTrack, +} from "../public_types"; import { ErrorTypes, IMediaErrorCode, } from "./error_codes"; import errorMessage from "./error_message"; +interface IAudioTrackMediaErrorContext { + type : "audio"; + track : IAudioTrack; +} + +interface IVideoTrackMediaErrorContext { + type : "video"; + track : IVideoTrack; +} + +interface ITextTrackMediaErrorContext { + type : "text"; + track : ITextTrack; +} + +export type IMediaErrorTrackContext = IAudioTrackMediaErrorContext | + IVideoTrackMediaErrorContext | + ITextTrackMediaErrorContext; + +type ICodeWithAdaptationType = "BUFFER_APPEND_ERROR" | + "BUFFER_FULL_ERROR" | + "NO_PLAYABLE_REPRESENTATION" | + "MANIFEST_INCOMPATIBLE_CODECS_ERROR"; + /** * Error linked to the media Playback. * @@ -31,13 +61,32 @@ export default class MediaError extends Error { public readonly type : "MEDIA_ERROR"; public readonly message : string; public readonly code : IMediaErrorCode; + public readonly trackInfo : IMediaErrorTrackContext | undefined; public fatal : boolean; /** * @param {string} code * @param {string} reason + * @param {Object|undefined} [context] */ - constructor(code : IMediaErrorCode, reason : string) { + constructor( + code : ICodeWithAdaptationType, + reason : string, + context: { + adaptation : Adaptation; + } + ); + constructor( + code : Exclude, + reason : string, + ); + constructor( + code : IMediaErrorCode, + reason : string, + context? : { + adaptation? : Adaptation | undefined; + } | undefined + ) { super(); // @see https://stackoverflow.com/questions/41102060/typescript-extending-error-class Object.setPrototypeOf(this, MediaError.prototype); @@ -48,5 +97,22 @@ export default class MediaError extends Error { this.code = code; this.message = errorMessage(this.name, this.code, reason); this.fatal = false; + const adaptation = context?.adaptation; + if (adaptation !== undefined) { + switch (adaptation.type) { + case "audio": + this.trackInfo = { type: "audio", + track: adaptation.toAudioTrack() }; + break; + case "video": + this.trackInfo = { type: "video", + track: adaptation.toVideoTrack() }; + break; + case "text": + this.trackInfo = { type: "text", + track: adaptation.toTextTrack() }; + break; + } + } } } diff --git a/src/experimental/tools/VideoThumbnailLoader/load_and_push_segment.ts b/src/experimental/tools/VideoThumbnailLoader/load_and_push_segment.ts index 4fa77fcf53..407af6c292 100644 --- a/src/experimental/tools/VideoThumbnailLoader/load_and_push_segment.ts +++ b/src/experimental/tools/VideoThumbnailLoader/load_and_push_segment.ts @@ -33,30 +33,33 @@ export default function loadAndPushSegment( segmentInfo : ISegmentLoaderContent, segmentBuffer: AudioVideoSegmentBuffer, segmentFetcher: ISegmentFetcher, + initSegmentUniqueId : string | null, cancelSignal: CancellationSignal ): Promise { const pushOperations : Array> = []; return segmentFetcher(segmentInfo, { onChunk(parseChunk) { const parsed = parseChunk(undefined); - let isIsInitSegment : boolean; + let isInitSegment : boolean; let data : BufferSource | null; let timestampOffset : number; const codec = segmentInfo.representation.getMimeTypeString(); if (parsed.segmentType === "init") { - isIsInitSegment = true; + isInitSegment = true; data = parsed.initializationData; timestampOffset = 0; + if (initSegmentUniqueId !== null) { + segmentBuffer.declareInitSegment(initSegmentUniqueId, data); + } } else { - isIsInitSegment = false; + isInitSegment = false; data = parsed.chunkData; timestampOffset = parsed.chunkOffset; } const pushOperation = segmentBuffer.pushChunk({ - data: { initSegment: isIsInitSegment ? data : - null, - chunk: isIsInitSegment ? null : - data, + data: { initSegmentUniqueId, + chunk: isInitSegment ? null : + data, appendWindow: [segmentInfo.period.start, segmentInfo.period.end], timestampOffset, codec }, diff --git a/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts b/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts index 17d389011d..50596f4c49 100644 --- a/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts +++ b/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts @@ -166,7 +166,7 @@ export default class VideoThumbnailLoader { let lastRepInfo : IVideoThumbnailLoaderRepresentationInfo; if (this._lastRepresentationInfo === null) { - const cleaner = new TaskCanceller(); + const lastRepInfoCleaner = new TaskCanceller(); const segmentFetcher = createSegmentFetcher( "video", loader.video, @@ -179,13 +179,17 @@ export default class VideoThumbnailLoader { maxRetryRegular: 0, requestTimeout: config.getCurrent().DEFAULT_REQUEST_TIMEOUT } ) as ISegmentFetcher; + const initSegment = content.representation.index.getInitSegment(); + const initSegmentUniqueId = initSegment !== null ? + content.representation.uniqueId : + null; const segmentBufferProm = prepareSourceBuffer( this._videoElement, content.representation.getMimeTypeString(), - cleaner.signal + lastRepInfoCleaner.signal ).then(async (segmentBuffer) => { - const initSegment = content.representation.index.getInitSegment(); - if (initSegment === null) { + if (initSegment === null || initSegmentUniqueId === null) { + lastRepInfo.initSegmentUniqueId = null; return segmentBuffer; } const segmentInfo = objectAssign({ segment: initSegment }, @@ -193,13 +197,18 @@ export default class VideoThumbnailLoader { await loadAndPushSegment(segmentInfo, segmentBuffer, lastRepInfo.segmentFetcher, - cleaner.signal); + initSegmentUniqueId, + lastRepInfoCleaner.signal); + lastRepInfoCleaner.signal.register(() => { + segmentBuffer.freeInitSegment(initSegmentUniqueId); + }); return segmentBuffer; }); lastRepInfo = { - cleaner, + cleaner: lastRepInfoCleaner, segmentBuffer: segmentBufferProm, content, + initSegmentUniqueId, segmentFetcher, pendingRequests: [], }; @@ -251,6 +260,7 @@ export default class VideoThumbnailLoader { const prom = loadAndPushSegment(segmentInfo, segmentBuffer, lastRepInfo.segmentFetcher, + lastRepInfo.initSegmentUniqueId, requestCanceller.signal) .then(unlinkSignal, (err) => { unlinkSignal(); @@ -390,6 +400,7 @@ interface IVideoThumbnailLoaderRepresentationInfo { * `pendingRequests`. */ pendingRequests : IPendingRequestInfo[]; + initSegmentUniqueId : string | null; } interface IPendingRequestInfo { diff --git a/src/manifest/__tests__/manifest.test.ts b/src/manifest/__tests__/manifest.test.ts index 9faaa611ec..935b270756 100644 --- a/src/manifest/__tests__/manifest.test.ts +++ b/src/manifest/__tests__/manifest.test.ts @@ -72,7 +72,7 @@ describe("Manifest - Manifest", () => { expect(manifest.suggestedPresentationDelay).toEqual(undefined); expect(manifest.uris).toEqual([]); - expect(fakeIdGenerator).toHaveBeenCalledTimes(2); + expect(fakeIdGenerator).toHaveBeenCalled(); expect(fakeGenerateNewId).toHaveBeenCalledTimes(1); expect(fakeLogger.info).not.toHaveBeenCalled(); expect(fakeLogger.warn).not.toHaveBeenCalled(); @@ -114,7 +114,7 @@ describe("Manifest - Manifest", () => { contentWarnings: [] } ]); expect(manifest.adaptations).toEqual({}); - expect(fakeIdGenerator).toHaveBeenCalledTimes(2); + expect(fakeIdGenerator).toHaveBeenCalled(); expect(fakeGenerateNewId).toHaveBeenCalledTimes(1); expect(fakeLogger.info).not.toHaveBeenCalled(); expect(fakeLogger.warn).not.toHaveBeenCalled(); @@ -152,7 +152,7 @@ describe("Manifest - Manifest", () => { expect(fakePeriod).toHaveBeenCalledTimes(2); expect(fakePeriod).toHaveBeenCalledWith(period1, representationFilter); expect(fakePeriod).toHaveBeenCalledWith(period2, representationFilter); - expect(fakeIdGenerator).toHaveBeenCalledTimes(2); + expect(fakeIdGenerator).toHaveBeenCalled(); expect(fakeGenerateNewId).toHaveBeenCalledTimes(1); expect(fakeLogger.info).not.toHaveBeenCalled(); expect(fakeLogger.warn).not.toHaveBeenCalled(); @@ -194,7 +194,7 @@ describe("Manifest - Manifest", () => { ]); expect(manifest.adaptations).toBe(adapP1); - expect(fakeIdGenerator).toHaveBeenCalledTimes(2); + expect(fakeIdGenerator).toHaveBeenCalled(); expect(fakeGenerateNewId).toHaveBeenCalledTimes(1); expect(fakeLogger.info).not.toHaveBeenCalled(); expect(fakeLogger.warn).not.toHaveBeenCalled(); @@ -234,7 +234,7 @@ describe("Manifest - Manifest", () => { expect(manifest.contentWarnings).toContainEqual(new Error("0")); expect(manifest.contentWarnings).toContainEqual(new Error("1")); - expect(fakeIdGenerator).toHaveBeenCalledTimes(2); + expect(fakeIdGenerator).toHaveBeenCalled(); expect(fakeGenerateNewId).toHaveBeenCalledTimes(1); expect(fakeLogger.info).not.toHaveBeenCalled(); expect(fakeLogger.warn).not.toHaveBeenCalled(); @@ -285,7 +285,7 @@ describe("Manifest - Manifest", () => { ]); expect(manifest.suggestedPresentationDelay).toEqual(99); expect(manifest.uris).toEqual(["url1", "url2"]); - expect(fakeIdGenerator).toHaveBeenCalledTimes(2); + expect(fakeIdGenerator).toHaveBeenCalled(); expect(fakeGenerateNewId).toHaveBeenCalledTimes(1); expect(fakeLogger.info).not.toHaveBeenCalled(); expect(fakeLogger.warn).not.toHaveBeenCalled(); @@ -419,7 +419,7 @@ describe("Manifest - Manifest", () => { .toHaveBeenCalledWith(manifest.periods, newManifest.periods); expect(mockTrigger).toHaveBeenCalledTimes(1); expect(mockTrigger).toHaveBeenCalledWith("manifestUpdate", fakeReplacePeriodsRes); - expect(fakeIdGenerator).toHaveBeenCalledTimes(2); + expect(fakeIdGenerator).toHaveBeenCalled(); expect(fakeGenerateNewId).toHaveBeenCalledTimes(1); expect(fakeLogger.info).not.toHaveBeenCalled(); expect(fakeLogger.warn).not.toHaveBeenCalled(); diff --git a/src/manifest/__tests__/period.test.ts b/src/manifest/__tests__/period.test.ts index fe09ea7c27..4264cdc5da 100644 --- a/src/manifest/__tests__/period.test.ts +++ b/src/manifest/__tests__/period.test.ts @@ -149,25 +149,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "57", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "58", isSupported: true, - representations: [] }; + representations: [], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "59", isSupported: true, - representations: [] }; + representations: [], + toAudioTrack() { return audioAda2; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -204,25 +209,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "57", isSupported: false, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "58", isSupported: false, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -259,25 +269,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "56", isSupported: true, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "58", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "59", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda2; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -314,25 +329,30 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "56", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "58", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audioAda2 = { type: "audio", id: "59", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda2; } }; const audio = [audioAda1, audioAda2]; const args = { id: "12", adaptations: { video, audio }, start: 0 }; let period = null; @@ -372,12 +392,14 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const video = [videoAda1]; const videoAda2 = { type: "video", id: "55", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video2 = [videoAda2]; const args = { id: "12", adaptations: { video, video2 }, start: 0 }; const period = new Period(args); @@ -408,7 +430,8 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const video = [videoAda1]; const bar = undefined; const args = { id: "12", adaptations: { bar, video }, start: 0 }; @@ -435,11 +458,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 0 }; const period = new Period(args, representationFilter); @@ -467,11 +492,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: false, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const fooAda1 = { type: "foo", id: "12", isSupported: false, @@ -503,11 +530,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: false, - representations: [] }; + representations: [], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const fooAda1 = { type: "foo", id: "12", isSupported: false, @@ -532,11 +561,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 72 }; const period = new Period(args); @@ -557,11 +588,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 0, duration: 12 }; const period = new Period(args); @@ -582,11 +615,13 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const args = { id: "12", adaptations: { video }, start: 50, duration: 12 }; const period = new Period(args); @@ -607,17 +642,20 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const audioAda1 = { type: "audio", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1]; const args = { id: "12", adaptations: { video, audio }, start: 50, duration: 12 }; @@ -643,17 +681,20 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const video = [videoAda1, videoAda2]; const audioAda1 = { type: "audio", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1]; const args = { id: "12", adaptations: { video, audio }, start: 50, duration: 12 }; @@ -686,21 +727,25 @@ describe("Manifest - Period", () => { const videoAda1 = { type: "video", id: "54", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda1; } }; const videoAda2 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda2; } }; const videoAda3 = { type: "video", id: "55", isSupported: true, - representations: [{}] }; + representations: [{}], + toVideoTrack() { return videoAda3; } }; const video = [videoAda1, videoAda2, videoAda3]; const audioAda1 = { type: "audio", id: "56", isSupported: true, - representations: [{}] }; + representations: [{}], + toAudioTrack() { return audioAda1; } }; const audio = [audioAda1]; const args = { id: "12", adaptations: { video, audio }, start: 50, duration: 12 }; diff --git a/src/manifest/adaptation.ts b/src/manifest/adaptation.ts index 128a4a87a5..277df1e3bf 100644 --- a/src/manifest/adaptation.ts +++ b/src/manifest/adaptation.ts @@ -16,7 +16,12 @@ import log from "../log"; import { IParsedAdaptation } from "../parsers/manifest"; -import { IRepresentationFilter } from "../public_types"; +import { + IAudioTrack, + IRepresentationFilter, + ITextTrack, + IVideoTrack, +} from "../public_types"; import arrayFind from "../utils/array_find"; import isNullOrUndefined from "../utils/is_null_or_undefined"; import normalizeLanguage from "../utils/languages"; @@ -32,7 +37,7 @@ export const SUPPORTED_ADAPTATIONS_TYPE: IAdaptationType[] = [ "audio", /** * Normalized Adaptation structure. - * An Adaptation describes a single `Track`. For example a specific audio + * An `Adaptation` describes a single `Track`. For example a specific audio * track (in a given language) or a specific video track. * It istelf can be represented in different qualities, which we call here * `Representation`. @@ -216,4 +221,74 @@ export default class Adaptation { getRepresentation(wantedId : number|string) : Representation|undefined { return arrayFind(this.representations, ({ id }) => wantedId === id); } + + /** + * Format an `Adaptation`, generally of type `"audio"`, as an `IAudioTrack`. + * @returns {Object} + */ + public toAudioTrack() : IAudioTrack { + const formatted : IAudioTrack = { + language: this.language ?? "", + normalized: this.normalizedLanguage ?? "", + audioDescription: this.isAudioDescription === true, + id: this.id, + representations: this.representations.map(r => r.toAudioRepresentation()), + label: this.label, + }; + if (this.isDub === true) { + formatted.dub = true; + } + return formatted; + } + + /** + * Format an `Adaptation`, generally of type `"audio"`, as an `IAudioTrack`. + * @returns {Object} + */ + public toTextTrack() : ITextTrack { + return { + language: this.language ?? "", + normalized: this.normalizedLanguage ?? "", + closedCaption: this.isClosedCaption === true, + id: this.id, + label: this.label, + forced: this.isForcedSubtitles, + }; + } + + /** + * Format an `Adaptation`, generally of type `"video"`, as an `IAudioTrack`. + * @returns {Object} + */ + public toVideoTrack() : IVideoTrack { + const trickModeTracks = this.trickModeTracks !== undefined ? + this.trickModeTracks.map((trickModeAdaptation) => { + const representations = trickModeAdaptation.representations + .map(r => r.toVideoRepresentation()); + const trickMode : IVideoTrack = { id: trickModeAdaptation.id, + representations, + isTrickModeTrack: true }; + if (trickModeAdaptation.isSignInterpreted === true) { + trickMode.signInterpreted = true; + } + return trickMode; + }) : + undefined; + + const videoTrack: IVideoTrack = { + id: this.id, + representations: this.representations.map(r => r.toVideoRepresentation()), + label: this.label, + }; + if (this.isSignInterpreted === true) { + videoTrack.signInterpreted = true; + } + if (this.isTrickModeTrack === true) { + videoTrack.isTrickModeTrack = true; + } + if (trickModeTracks !== undefined) { + videoTrack.trickModeTracks = trickModeTracks; + } + return videoTrack; + } } diff --git a/src/manifest/manifest.ts b/src/manifest/manifest.ts index 9619585fd7..11cd1344e7 100644 --- a/src/manifest/manifest.ts +++ b/src/manifest/manifest.ts @@ -632,7 +632,8 @@ export default class Manifest extends EventEmitter { if (newAdaptation.representations.length > 0 && !newAdaptation.isSupported) { const error = new MediaError("MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs."); + "An Adaptation contains only incompatible codecs.", + { adaptation: newAdaptation }); this.contentWarnings.push(error); } return newAdaptation; @@ -694,7 +695,8 @@ export default class Manifest extends EventEmitter { if (newAdaptation.representations.length > 0 && !newAdaptation.isSupported) { const error = new MediaError("MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs."); + "An Adaptation contains only incompatible codecs.", + { adaptation: newAdaptation }); this.contentWarnings.push(error); } return newAdaptation; @@ -711,7 +713,7 @@ export default class Manifest extends EventEmitter { /** * @param {Object} newManifest - * @param {number} type + * @param {number} updateType */ private _performUpdate( newManifest : Manifest, diff --git a/src/manifest/period.ts b/src/manifest/period.ts index d019e32958..ff11856760 100644 --- a/src/manifest/period.ts +++ b/src/manifest/period.ts @@ -89,7 +89,8 @@ export default class Period { if (newAdaptation.representations.length > 0 && !newAdaptation.isSupported) { const error = new MediaError("MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs."); + "An Adaptation contains only incompatible codecs.", + { adaptation: newAdaptation }); this.contentWarnings.push(error); } return newAdaptation; diff --git a/src/manifest/representation.ts b/src/manifest/representation.ts index ae89e915d3..bb371353e5 100644 --- a/src/manifest/representation.ts +++ b/src/manifest/representation.ts @@ -21,21 +21,50 @@ import { IContentProtections, IParsedRepresentation, } from "../parsers/manifest"; -import { IHDRInformation } from "../public_types"; +import { + IAudioRepresentation, + IHDRInformation, + IVideoRepresentation, +} from "../public_types"; import areArraysOfNumbersEqual from "../utils/are_arrays_of_numbers_equal"; +import idGenerator from "../utils/id_generator"; import { IRepresentationIndex } from "./representation_index"; import { IAdaptationType, } from "./types"; +const generateRepresentationUniqueId = idGenerator(); + /** * Normalized Representation structure. * @class Representation */ class Representation { - /** ID uniquely identifying the Representation in the Adaptation. */ + /** + * ID uniquely identifying the `Representation` in its parent `Adaptation`. + * + * This identifier might be linked to an identifier present in the original + * Manifest file, it is thus the identifier to use to determine if a + * `Representation` from a refreshed `Manifest` is actually the same one than + * one in the previously loaded Manifest (as long as the `Adaptation` and + * `Period` are also the same). + * + * For a globally unique identifier regardless of the `Adaptation`, `Period` + * or even `Manifest`, you can rely on `uniqueId` instead. + */ public readonly id : string; + /** + * Globally unique identifier for this `Representation` object. + * + * This identifier is guaranteed to be unique for any `Representation`s of all + * `Manifest` objects created in the current JS Realm. + * As such, it can be used as an identifier for the JS object itself, whereas + * `id` is the identifier for the original Manifest's Representation in the + * scope of its parent `Adaptation`. + */ + public readonly uniqueId : string; + /** * Interface allowing to get information about segments available for this * Representation. @@ -115,6 +144,7 @@ class Representation { */ constructor(args : IParsedRepresentation, opts : { type : IAdaptationType }) { this.id = args.id; + this.uniqueId = generateRepresentationUniqueId(); this.bitrate = args.bitrate; this.codec = args.codecs; @@ -334,6 +364,24 @@ class Representation { values: data }); return true; } + + /** + * Format Representation as an `IAudioRepresentation`. + * @returns {Object} + */ + public toAudioRepresentation(): IAudioRepresentation { + const { id, bitrate, codec } = this; + return { id, bitrate, codec }; + } + + /** + * Format Representation as an `IVideoRepresentation`. + * @returns {Object} + */ + public toVideoRepresentation(): IVideoRepresentation { + const { id, bitrate, frameRate, width, height, codec, hdrInfo } = this; + return { id, bitrate, frameRate, width, height, codec, hdrInfo }; + } } /** Protection data as returned by a Representation. */ diff --git a/src/manifest/utils.ts b/src/manifest/utils.ts index cc76fdd875..c73fc49b24 100644 --- a/src/manifest/utils.ts +++ b/src/manifest/utils.ts @@ -37,9 +37,7 @@ export function areSameContent( content2: IBufferedChunkInfos ): boolean { return (content1.segment.id === content2.segment.id && - content1.representation.id === content2.representation.id && - content1.adaptation.id === content2.adaptation.id && - content1.period.id === content2.period.id); + content1.representation.uniqueId === content2.representation.uniqueId); } /** diff --git a/src/public_types.ts b/src/public_types.ts index bed8f24abf..1a8c79a5dc 100644 --- a/src/public_types.ts +++ b/src/public_types.ts @@ -24,6 +24,7 @@ import { } from "./core/decrypt"; import { IBufferType } from "./core/segment_buffers"; import { + IMediaErrorTrackContext, EncryptedMediaError, MediaError, NetworkError, @@ -33,6 +34,8 @@ import Manifest from "./manifest"; import { ILocalManifest } from "./parsers/manifest/local"; import { IMetaPlaylist } from "./parsers/manifest/metaplaylist/metaplaylist_parser"; +export { IMediaErrorTrackContext }; + /** * This file defines and exports types we want to expose to library users. * Those types are considered as part of the API. @@ -235,11 +238,13 @@ export interface IPeriod { image? : IAdaptation[]; }; } +export type IAdaptationType = "video" | "audio" | "text" | "image"; + /** Adaptation (represents a track), as documented in the API documentation. */ export interface IAdaptation { /** String identifying the Adaptation, unique per Period. */ id : string; - type : "video" | "audio" | "text" | "image"; + type : IAdaptationType; language? : string | undefined; normalizedLanguage? : string | undefined; isAudioDescription? : boolean | undefined; diff --git a/src/tools/TextTrackRenderer/text_track_renderer.ts b/src/tools/TextTrackRenderer/text_track_renderer.ts index 2638aa30fb..c60ba4f75e 100644 --- a/src/tools/TextTrackRenderer/text_track_renderer.ts +++ b/src/tools/TextTrackRenderer/text_track_renderer.ts @@ -78,7 +78,7 @@ export default class TextTrackRenderer { args.timeOffset : 0; this._segmentBuffer.pushChunkSync({ inventoryInfos: null, - data: { initSegment: null, + data: { initSegmentUniqueId: null, codec: args.type, timestampOffset, appendWindow: [0, Infinity], diff --git a/src/transports/smooth/isobmff/create_boxes.ts b/src/transports/smooth/isobmff/create_boxes.ts index 105353fb8e..483fb71ff1 100644 --- a/src/transports/smooth/isobmff/create_boxes.ts +++ b/src/transports/smooth/isobmff/create_boxes.ts @@ -33,7 +33,7 @@ import { * @param {Number} height * @param {Number} hRes - horizontal resolution, eg 72 * @param {Number} vRes - vertical resolution, eg 72 - * @param {string} encDepth + * @param {string} encName * @param {Number} colorDepth - eg 24 * @param {Uint8Array} avcc - Uint8Array representing the avcC atom * @returns {Uint8Array} @@ -68,7 +68,7 @@ function createAVC1Box( * @param {Number} height * @param {Number} hRes - horizontal resolution, eg 72 * @param {Number} vRes - vertical resolution, eg 72 - * @param {string} encDepth + * @param {string} encName * @param {Number} colorDepth - eg 24 * @param {Uint8Array} avcc - Uint8Array representing the avcC atom * @param {Uint8Array} sinf - Uint8Array representing the sinf atom @@ -108,8 +108,6 @@ function createENCVBox( * @param {Number} packetSize * @param {Number} sampleRate * @param {Uint8Array} esds - Uint8Array representing the esds atom - * @param {Uint8Array} [sinf] - Uint8Array representing the sinf atom, - * only if name == "enca" * @returns {Uint8Array} */ function createMP4ABox( @@ -164,7 +162,7 @@ function createENCABox( } /** - * @param {url} Uint8Array + * @param {Uint8Array} url * @returns {Uint8Array} */ function createDREFBox(url : Uint8Array) : Uint8Array { @@ -415,7 +413,7 @@ function createSMHDBox() : Uint8Array { } /** - * @param {Array.} representations - arrays of Uint8Array, + * @param {Array.} reps - arrays of Uint8Array, * typically [avc1] or [encv, avc1] * @returns {Uint8Array} */