-
Notifications
You must be signed in to change notification settings - Fork 57
/
Copy pathmedia-source-respec.html
5580 lines (5540 loc) · 261 KB
/
media-source-respec.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>
Media Source Extensions™
</title>
<script src="https://www.w3.org/Tools/respec/respec-w3c" class="remove"></script>
<script src="media-source.js" class="remove"></script>
<script class="remove">
var respecConfig = {
specStatus: "ED",
shortName: "media-source-2",
prevRecURI: "https://www.w3.org/TR/2016/REC-media-source-20161117/",
editors: [
{
name: "Jean-Yves Avenard",
mailto: "[email protected]",
company: "Apple Inc.",
companyURL: "https://www.apple.com/",
w3cid: "115886",
},
{
name: "Mark Watson",
company: "Netflix Inc.",
companyURL: "https://www.netflix.com/",
w3cid: "46379",
},
],
formerEditors: [
{
name: "Matthew Wolenetz",
retiredDate: "2024-02-01",
mailto: "[email protected]",
company: "W3C Invited Expert",
},
{
name: "Jerry Smith",
retiredDate: "2017-09-01",
company: "Microsoft Corporation",
companyURL: "https://www.microsoft.com/",
},
{
name: "Aaron Colwell",
retiredDate: "2015-04-01",
company: "Google Inc.",
companyURL: "https://www.google.com/",
},
{
name: "Adrian Bateman",
retiredDate: "2015-04-01",
company: "Microsoft Corporation",
companyURL: "https://www.microsoft.com/",
},
],
mseDefGroupName: "media-source",
// name of the WG
group: "media",
github: "w3c/media-source",
caniuse: "mediasource",
testsuiteURI: "https://wpt.live/media-source/",
wgPublicList: "public-media-wg",
preProcess: [mediaSourcePreProcessor],
postProcess: [mediaSourcePostProcessor],
};
</script>
<link rel="stylesheet" href="mse.css">
</head>
<body data-cite="html dom url fileapi infra">
<section id="abstract">
This specification extends {{HTMLMediaElement}} [[HTML]] to allow JavaScript to generate
media streams for playback. Allowing JavaScript to generate streams facilitates a variety of
use cases like adaptive streaming and time shifting live streams.
</section>
<section id="sotd">
<p>
On top of editorial updates, substantive changes since publication as a W3C Recommendation
in <a href="https://www.w3.org/TR/2016/REC-media-source-20161117/">November 2016</a> are:
</p>
<ul>
<li>the addition of a {{SourceBuffer/changeType()}} method to switch among codecs or
bytestreams
</li>
<li>the possibility to create and use {{MediaSource}} objects off the main thread in
dedicated workers
</li>
<li>the removal of the {{URL/createObjectURL()}} extension to the {{URL}} object following
its integration in the File API [[FILEAPI]]
</li>
<li>the addition of {{ManagedMediaSource}}, {{ManagedSourceBuffer}}, and
{{BufferedChangeEvent}} interfaces supporting power-efficient streaming and active buffered
media cleanup by the user agent
</li>
</ul>
<p>
For a full list of changes made since the previous version, see the <a href=
"https://github.com/w3c/media-source/commits/main">commits</a>.
</p>
<p>
The working group maintains <a href="https://github.com/w3c/media-source/issues">a list of
all bug reports that the editors have not yet tried to address</a>.
</p>
<p>
Implementors should be aware that this specification is not stable. <strong>Implementors
who are not taking part in the discussions are likely to find the specification changing
out from under them in incompatible ways.</strong> Vendors interested in implementing this
specification before it eventually reaches the Candidate Recommendation stage should track
the <a href="https://github.com/w3c/media-source">GitHub repository</a> and take part in
the discussions.
</p>
</section>
<section id="introduction" class="informative">
<h2>
Introduction
</h2>
<p>
This specification allows JavaScript to dynamically construct media streams for
<audio> and <video>. It defines a MediaSource object that can serve as a source
of media data for an HTMLMediaElement. MediaSource objects have one or more
{{SourceBuffer}} objects. Applications append data segments to the {{SourceBuffer}}
objects, and can adapt the quality of appended data based on system performance and other
factors. Data from the {{SourceBuffer}} objects is managed as track buffers for audio,
video and text data that is decoded and played. Byte stream specifications used with these
extensions are available in the byte stream format registry [[MSE-REGISTRY]].
</p>
<figure>
<a href=
'https://w3c.github.io/media-source/pipeline_model_description.html#pipelinedesc'><picture><img src="pipeline_model.svg"
alt="Media Source Pipeline Model Diagram"></picture></a>
<figcaption>
Media Source Pipeline Model Diagram
</figcaption>
</figure>
<section>
<h3>
Goals
</h3>
<p>
This specification was designed with the following goals in mind:
</p>
<ul>
<li>Allow JavaScript to construct media streams independent of how the media is fetched.
</li>
<li>Define a splicing and buffering model that facilitates use cases like adaptive
streaming, ad-insertion, time-shifting, and video editing.
</li>
<li>Minimize the need for media parsing in JavaScript.
</li>
<li>Leverage the browser cache as much as possible.
</li>
<li>Provide requirements for byte stream format specifications.
</li>
<li>Not require support for any particular media format or codec.
</li>
</ul>
<p>
This specification defines:
</p>
<ul>
<li>Normative behavior for user agents to enable interoperability between user agents and
web applications when processing media data.
</li>
<li>Normative requirements to enable other specifications to define media formats to be
used within this specification.
</li>
</ul>
</section>
</section>
<section>
<h2>
Definitions
</h2>
<dl>
<dt>
<dfn>Active Track Buffers</dfn>
</dt>
<dd>
<p>
The [=track buffers=] that provide [=coded frames=] for the {{AudioTrack/enabled}}
{{HTMLMediaElement/audioTracks}}, the {{VideoTrack/selected}}
{{HTMLMediaElement/videoTracks}}, and the <a def-id="texttrackmode-showing"></a> or
<a def-id="texttrackmode-hidden"></a> {{HTMLMediaElement/textTracks}}. All these
tracks are associated with {{SourceBuffer}} objects in the
{{MediaSource/activeSourceBuffers}} list.
</p>
</dd>
<dt>
<dfn>Append Window</dfn>
</dt>
<dd>
<p>
A [=presentation timestamp=] range used to filter out [=coded frames=] while
appending. The append window represents a single continuous time range with a single
start time and end time. Coded frames with [=presentation timestamp=] within this
range are allowed to be appended to the {{SourceBuffer}} while coded frames outside
this range are filtered out. The append window start and end times are controlled by
the {{SourceBuffer/appendWindowStart}} and {{SourceBuffer/appendWindowEnd}}
attributes respectively.
</p>
</dd>
<dt>
<dfn class="export">Coded Frame</dfn>
</dt>
<dd>
<p>
A unit of media data that has a [=presentation timestamp=], a [=decode timestamp=],
and a [=coded frame duration=].
</p>
</dd>
<dt>
<dfn>Coded Frame Duration</dfn>
</dt>
<dd>
<p>
The duration of a [=coded frame=]. For video and text, the duration indicates how
long the video frame or text SHOULD be displayed. For audio, the duration represents
the sum of all the samples contained within the coded frame. For example, if an audio
frame contained 441 samples @44100Hz the frame duration would be 10 milliseconds.
</p>
</dd>
<dt>
<dfn>Coded Frame End Timestamp</dfn>
</dt>
<dd>
<p>
The sum of a [=coded frame=] [=presentation timestamp=] and its [=coded frame
duration=]. It represents the [=presentation timestamp=] that immediately follows the
coded frame.
</p>
</dd>
<dt>
<dfn>Coded Frame Group</dfn>
</dt>
<dd>
<p>
A group of [=coded frames=] that are adjacent and have monotonically increasing
[=decode timestamps=] without any gaps. Discontinuities detected by the [=coded frame
processing=] algorithm and {{SourceBuffer/abort()}} calls trigger the start of a new
coded frame group.
</p>
</dd>
<dt>
<dfn>Decode Timestamp</dfn>
</dt>
<dd>
<p>
The decode timestamp indicates the latest time at which the frame needs to be decoded
assuming instantaneous decoding and rendering of this and any dependant frames (this
is equal to the [=presentation timestamp=] of the earliest frame, in [=presentation
order=], that is dependant on this frame). If frames can be decoded out of
[=presentation order=], then the decode timestamp MUST be present in or derivable
from the byte stream. The user agent MUST run the [=append error=] algorithm if this
is not the case. If frames cannot be decoded out of [=presentation order=] and a
decode timestamp is not present in the byte stream, then the decode timestamp is
equal to the [=presentation timestamp=].
</p>
</dd>
<dt>
<dfn class="export">Initialization Segment</dfn>
</dt>
<dd>
<p>
A sequence of bytes that contain all of the initialization information required to
decode a sequence of [=media segments=]. This includes codec initialization data,
[=Track ID=] mappings for multiplexed segments, and timestamp offsets (e.g., edit
lists).
</p>
<p class="note">
The [=byte stream format specifications=] in the byte stream format registry
[[MSE-REGISTRY]] contain format specific examples.
</p>
</dd>
<dt>
<dfn class="export">Media Segment</dfn>
</dt>
<dd>
<p>
A sequence of bytes that contain packetized & timestamped media data for a portion of
the <a def-id="media-timeline"></a>. Media segments are always associated with the
most recently appended [=initialization segment=].
</p>
<p class="note">
The [=byte stream format specifications=] in the byte stream format registry
[[MSE-REGISTRY]] contain format specific examples.
</p>
</dd>
<dt>
<dfn id="mediasource-object-url">MediaSource object URL</dfn>
</dt>
<dd>
<p>
A {{MediaSource}} object URL is a unique [=blob URL=] created by
{{URL/createObjectURL()}}. It is used to attach a {{MediaSource}} object to an
HTMLMediaElement.
</p>
<p>
These URLs are the same as a [=blob URLs=], except that anything in the definition of
that feature that refers to {{File}} and {{Blob}} objects is hereby extended to also
apply to {{MediaSource}} objects.
</p>
<p>
The [=origin=] of the MediaSource object URL is the [=relevant settings object=] of
[=this=] during the call to {{URL/createObjectURL()}}.
</p>
<p class="note">
For example, the [=origin=] of the MediaSource object URL affects the way that the
media element is <a href=
"https://html.spec.whatwg.org/multipage/canvas.html#security-with-canvas-elements">consumed
by canvas</a>.
</p>
</dd>
<dt>
<dfn id="parent-media-source">Parent Media Source</dfn>
</dt>
<dd>
<p>
The parent media source of a {{SourceBuffer}} object is the {{MediaSource}} object
that created it.
</p>
</dd>
<dt>
<dfn id="presentation-start-time">Presentation Start Time</dfn>
</dt>
<dd>
<p>
The presentation start time is the earliest time point in the presentation and
specifies the initial <a def-id="videoref" name="current-playback-position" id=
"current-playback-position">playback position</a> and <a def-id="videoref" name=
"earliest-possible-position" id="earliest-possible-position">earliest possible
position</a>. All presentations created using this specification have a presentation
start time of 0.
</p>
<p class="note">
For the purposes of determining if {{HTMLMediaElement}}'s
{{HTMLMediaElement/buffered}} contains a {{TimeRanges}} that includes the current
playback position, implementations MAY choose to allow a current playback position at
or after [=presentation start time=] and before the first {{TimeRanges}} to play the
first {{TimeRanges}} if that {{TimeRanges}} starts within a reasonably short time,
like 1 second, after [=presentation start time=]. This allowance accommodates the
reality that muxed streams commonly do not begin all tracks precisely at
[=presentation start time=]. Implementations MUST report the actual buffered range,
regardless of this allowance.
</p>
</dd>
<dt>
<dfn id="presentation-interval">Presentation Interval</dfn>
</dt>
<dd>
<p>
The presentation interval of a [=coded frame=] is the time interval from its
[=presentation timestamp=] to the [=presentation timestamp=] plus the [=coded frame
duration|coded frame's duration=]. For example, if a coded frame has a presentation
timestamp of 10 seconds and a [=coded frame duration=] of 100 milliseconds, then the
presentation interval would be [10-10.1). Note that the start of the range is
inclusive, but the end of the range is exclusive.
</p>
</dd>
<dt>
<dfn id="presentation-order">Presentation Order</dfn>
</dt>
<dd>
<p>
The order that [=coded frames=] are rendered in the presentation. The presentation
order is achieved by ordering [=coded frames=] in monotonically increasing order by
their [=presentation timestamps=].
</p>
</dd>
<dt>
<dfn id="presentation-timestamp" data-export="">Presentation Timestamp</dfn>
</dt>
<dd>
<p>
A reference to a specific time in the presentation. The presentation timestamp in a
[=coded frame=] indicates when the frame SHOULD be rendered.
</p>
</dd>
<dt>
<dfn id="random-access-point" data-export="">Random Access Point</dfn>
</dt>
<dd>
<p>
A position in a [=media segment=] where decoding and continuous playback can begin
without relying on any previous data in the segment. For video this tends to be the
location of I-frames. In the case of audio, most audio frames can be treated as a
random access point. Since video tracks tend to have a more sparse distribution of
random access points, the location of these points are usually considered the random
access points for multiplexed streams.
</p>
</dd>
<dt>
<dfn>SourceBuffer byte stream format specification</dfn>
</dt>
<dd>
<p>
The specific [=byte stream format specification=] that describes the format of the
byte stream accepted by a {{SourceBuffer}} instance. The [=byte stream format
specification=], for a {{SourceBuffer}} object, is initially selected based on the
|type:DOMString| passed to the {{MediaSource/addSourceBuffer()}} call that created
the object, and can be updated by {{SourceBuffer/changeType()}} calls on the object.
</p>
</dd>
<dt>
<dfn>`SourceBuffer` configuration</dfn>
</dt>
<dd>
<p>
A specific set of tracks distributed across one or more {{SourceBuffer}} objects
owned by a single {{MediaSource}} instance.
</p>
<p>
Implementations MUST support at least 1 {{MediaSource}} object with the following
configurations:
</p>
<ul>
<li>A single SourceBuffer with 1 audio track and/or 1 video track.
</li>
<li>Two SourceBuffers with one handling a single audio track and the other handling a
single video track.
</li>
</ul>
<p>
MediaSource objects MUST support each of the configurations above, but they are only
required to support one configuration at a time. Supporting multiple configurations
at once or additional configurations is a quality of implementation issue.
</p>
</dd>
<dt>
<dfn>Track Description</dfn>
</dt>
<dd>
<p>
A byte stream format specific structure that provides the [=Track ID=], codec
configuration, and other metadata for a single track. Each track description inside a
single [=initialization segment=] has a unique [=Track ID=]. The user agent MUST run
the [=append error=] algorithm if the [=Track ID=] is not unique within the
[=initialization segment=].
</p>
</dd>
<dt>
<dfn>Track ID</dfn>
</dt>
<dd>
<p>
A Track ID is a byte stream format specific identifier that marks sections of the
byte stream as being part of a specific track. The Track ID in a [=track
description=] identifies which sections of a [=media segment=] belong to that track.
</p>
</dd>
</dl>
</section>
<section id="mediasource" data-dfn-for="MediaSource">
<h2>
<dfn>MediaSource</dfn> interface
</h2>
<p>
The {{MediaSource}} interface represents a source of media data for an
{{HTMLMediaElement}}. It keeps track of the {{MediaSource/readyState}} for this source as
well as a list of <a>SourceBuffer</a> objects that can be used to add media data to the
presentation. MediaSource objects are created by the web application and then attached to
an HTMLMediaElement. The application uses the <a>SourceBuffer</a> objects in
{{MediaSource/sourceBuffers}} to add media data to this source. The HTMLMediaElement
fetches this media data from the <a>MediaSource</a> object when it is needed during
playback.
</p>
<p>
Each {{MediaSource}} object has a <dfn data-dfn-for="MediaSource">[[\live seekable
range]]</dfn> internal slot that stores a <a def-id="normalized-timeranges-object"></a>. It
is initialized to an empty {{TimeRanges}} object when the {{MediaSource}} object is
created, is maintained by {{MediaSource/setLiveSeekableRange()}} and
{{MediaSource/clearLiveSeekableRange()}}, and is used in [[[#htmlmediaelement-extensions]]]
to modify {{HTMLMediaElement}}'s {{HTMLMediaElement/seekable}} behavior.
</p>
<p>
Each {{MediaSource}} object has a <dfn data-dfn-for="MediaSource">[[\has ever been
attached]]</dfn> internal slot that stores a {{boolean}}. It is initialized to false when
the {{MediaSource}} object is created, and is set true in the extended
{{HTMLMediaElement}}'s [=resource fetch algorithm=] as described in the [=attaching to a
media element=] algorithm. The extended [=resource fetch algorithm=] uses this internal
slot to conditionally fail attachment of a {{MediaSource}} using a {{MediaSourceHandle}}
set on a {{HTMLMediaElement}}'s {{HTMLMediaElement/srcObject}} attribute.
</p>
<pre class="idl">
enum ReadyState {
"closed",
"open",
"ended",
};
</pre>
<dl data-dfn-for="ReadyState">
<dt>
<dfn>closed</dfn>
</dt>
<dd>
Indicates the source is not currently attached to a media element.
</dd>
<dt>
<dfn>open</dfn>
</dt>
<dd>
The source has been opened by a media element and is ready for data to be appended to the
{{SourceBuffer}} objects in {{MediaSource}}'s {{MediaSource/sourceBuffers}}.
</dd>
<dt>
<dfn>ended</dfn>
</dt>
<dd>
The source is still attached to a media element, but {{MediaSource}}'s
{{MediaSource/endOfStream()}} has been called.
</dd>
</dl>
<aside class="issue" data-number="276">
<p>
Consider adding a "`closing`" {{MediaSource/ReadyState}} to indicate the source is in the
process of being concurrently detached from a media element. This would be useful for
some implementations of {{MediaSource}} and {{SourceBuffer}} in
{{DedicatedWorkerGlobalScope}}.
</p>
</aside>
<pre class="idl">
enum EndOfStreamError {
"network",
"decode",
};
</pre>
<dl data-dfn-for="EndOfStreamError">
<dt>
<dfn>network</dfn>
</dt>
<dd>
<p>
Terminates playback and signals that a network error has occurred.
</p>
<p class="note">
JavaScript applications SHOULD use this status code to terminate playback with a
network error. For example, if a network error occurs while fetching media data.
</p>
</dd>
<dt>
<dfn>decode</dfn>
</dt>
<dd>
<p>
Terminates playback and signals that a decoding error has occurred.
</p>
<p class="note">
JavaScript applications SHOULD use this status code to terminate playback with a decode
error. For example, if a parsing error occurs while processing out-of-band media data.
</p>
</dd>
</dl>
<pre class="idl">
[Exposed=(Window,DedicatedWorker)]
interface MediaSource : EventTarget {
constructor();
[SameObject, Exposed=DedicatedWorker]
readonly attribute MediaSourceHandle handle;
readonly attribute SourceBufferList sourceBuffers;
readonly attribute SourceBufferList activeSourceBuffers;
readonly attribute ReadyState readyState;
attribute unrestricted double duration;
attribute EventHandler onsourceopen;
attribute EventHandler onsourceended;
attribute EventHandler onsourceclose;
static readonly attribute boolean canConstructInDedicatedWorker;
SourceBuffer addSourceBuffer(DOMString type);
undefined removeSourceBuffer(SourceBuffer sourceBuffer);
undefined endOfStream(optional EndOfStreamError error);
undefined setLiveSeekableRange(double start, double end);
undefined clearLiveSeekableRange();
static boolean isTypeSupported(DOMString type);
};
</pre>
<h3>
<dfn>handle</dfn> attribute
</h3>
<p>
Contains a handle useful for attachment of a dedicated worker {{MediaSource}} object to an
{{HTMLMediaElement}} via {{HTMLMediaElement/srcObject}}. The handle remains the same object
for this {{MediaSource}} object across accesses of this attribute, but it is distinct for
each {{MediaSource}} object.
</p>
<p class="note">
This specification may eventually enable visibility of this attribute on {{MediaSource}}
objects on the main Window context. If so, specification care will be necessary to prevent
potential backwards incompatible changes, such as could happen if exceptions were thrown on
accesses to this attribute.
</p>
<p>
On getting, run the following steps:
</p>
<ol>
<li>If the handle for this {{MediaSource}} object has not yet been created, then run the
following steps:
<ol>
<li>Let |created handle:MediaSourceHandle| be the result of creating a new
{{MediaSourceHandle}} object and associated resources, linked internally to this
{{MediaSource}}.
</li>
<li>Update the attribute to be |created handle|.
</li>
</ol>
</li>
<li>Return the {{MediaSourceHandle}} object that is this attribute's value.
</li>
</ol>
<h3>
<dfn>sourceBuffers</dfn> attribute
</h3>
<p>
Contains the list of {{SourceBuffer}} objects associated with this {{MediaSource}}. When
{{MediaSource}}'s {{MediaSource/readyState}} equals {{ReadyState/""closed""}} this list
will be empty. Once {{MediaSource/readyState}} transitions to {{ReadyState/""open""}}
SourceBuffer objects can be added to this list by using {{MediaSource/addSourceBuffer()}}.
</p>
<h3>
<dfn>activeSourceBuffers</dfn> attribute
</h3>
<p>
Contains the subset of {{MediaSource/sourceBuffers}} that are providing the
{{VideoTrack/selected}} video track, the {{AudioTrack/enabled}} audio track(s), and the
<a def-id="texttrackmode-showing"></a> or <a def-id="texttrackmode-hidden"></a> text
track(s).
</p>
<p>
{{SourceBuffer}} objects in this list MUST appear in the same order as they appear in the
{{MediaSource/sourceBuffers}} attribute; e.g., if only sourceBuffers[0] and
sourceBuffers[3] are in {{MediaSource/activeSourceBuffers}}, then activeSourceBuffers[0]
MUST equal sourceBuffers[0] and activeSourceBuffers[1] MUST equal sourceBuffers[3].
</p>
<p class="note">
Section [[[#active-source-buffer-changes]]] describes how this attribute gets updated.
</p>
<h3>
<dfn>readyState</dfn> attribute
</h3>
<p>
Indicates the current state of the <a>MediaSource</a> object. When the <a>MediaSource</a>
is created {{MediaSource/readyState}} MUST be set to {{ReadyState/""closed""}}.
</p>
<h3>
<dfn>duration</dfn> attribute
</h3>
<p>
Allows the web application to set the presentation duration. The duration is initially set
to NaN when the <a>MediaSource</a> object is created.
</p>
<p>
On getting, run the following steps:
</p>
<ol>
<li>If the {{MediaSource/readyState}} attribute is {{ReadyState/""closed""}} then return
NaN and abort these steps.
</li>
<li>Return the current value of the attribute.
</li>
</ol>
<p>
On setting, run the following steps:
</p>
<ol>
<li>If the value being set is negative or NaN then throw a {{TypeError}} exception and
abort these steps.
</li>
<li>If the {{MediaSource/readyState}} attribute is not {{ReadyState/""open""}} then throw
an {{InvalidStateError}} exception and abort these steps.
</li>
<li>If the {{SourceBuffer/updating}} attribute equals true on any {{SourceBuffer}} in
{{MediaSource/sourceBuffers}}, then throw an {{InvalidStateError}} exception and abort
these steps.
</li>
<li>Run the [=duration change=] algorithm with |new duration:unrestricted double| set to
the value being assigned to this attribute.
<p class="note">
The [=duration change=] algorithm will adjust |new duration| higher if there is any
currently buffered coded frame with a higher end time.
</p>
<p class="note">
{{SourceBuffer/appendBuffer()}} and {{MediaSource/endOfStream()}} can update the
duration under certain circumstances.
</p>
</li>
</ol>
<h3>
<dfn>canConstructInDedicatedWorker</dfn> attribute
</h3>
<p>
Returns true.
</p>
<p class="note">
This attribute enables main thread and dedicated worker feature detection of support for
creating and using a {{MediaSource}} object in a dedicated worker, and mitigates the need
for higher latency detection polyfills like attempting creation of a {{MediaSource}} object
from a dedicated worker, especially if the feature is not supported.
</p>
<h3>
<dfn>addSourceBuffer()</dfn> method
</h3>
<p>
Adds a new <a>SourceBuffer</a> to {{MediaSource/sourceBuffers}}.
</p>
<ol class="algorithm">
<li>If |type:DOMString| is an empty string then throw a {{TypeError}} exception and abort
these steps.
</li>
<li>If |type| contains a MIME type that is not supported or contains a MIME type that is
not supported with the types specified for the other {{SourceBuffer}} objects in
{{MediaSource/sourceBuffers}}, then throw a {{NotSupportedError}} exception and abort these
steps.
</li>
<li>If the user agent can't handle any more SourceBuffer objects or if creating a
SourceBuffer based on |type| would result in an unsupported [=SourceBuffer configuration=],
then throw a {{QuotaExceededError}} exception and abort these steps.
<p class="note">
For example, a user agent MAY throw a {{QuotaExceededError}} exception if the media
element has reached the {{HTMLMediaElement/HAVE_METADATA}} readyState. This can occur
if the user agent's media engine does not support adding more tracks during playback.
</p>
</li>
<li>If the {{MediaSource/readyState}} attribute is not in the {{ReadyState/""open""}} state
then throw an {{InvalidStateError}} exception and abort these steps.
</li>
<li>Let |buffer| be a new instance of a {{ManagedSourceBuffer}} if [=this=] is a
{{ManagedMediaSource}}, or a {{SourceBuffer}} otherwise, with their respective associated
resources.
</li>
<li>Set |buffer|'s {{SourceBuffer/[[generate timestamps flag]]}} to the value in the
"Generate Timestamps Flag" column of the [[[MSE-REGISTRY]]] entry that is associated with
|type|.
</li>
<li>If |buffer|'s {{SourceBuffer/[[generate timestamps flag]]}} is true, set |buffer|'s
{{SourceBuffer/mode}} to {{AppendMode/"sequence"}}. Otherwise, set |buffer|'s
{{SourceBuffer/mode}} to {{AppendMode/"segments"}}.
</li>
<li>[=List/Append=] |buffer| to [=this=]'s {{MediaSource/sourceBuffers}}.
</li>
<li>[=Queue a task=] to [=fire an event=] named {{addsourcebuffer}} at [=this=]'s
{{MediaSource/sourceBuffers}}.
</li>
<li>Return |buffer|.
</li>
</ol>
<h3>
<dfn>removeSourceBuffer()</dfn> method
</h3>
<p>
Removes a {{SourceBuffer}} from {{MediaSource/sourceBuffers}}.
</p>
<ol class="algorithm">
<li>If |sourceBuffer:SourceBuffer| specifies an object that is not in
{{MediaSource/sourceBuffers}} then throw a {{NotFoundError}} exception and abort these
steps.
</li>
<li>If the |sourceBuffer|.{{SourceBuffer/updating}} attribute equals true, then run the
following steps:
<ol>
<li>Abort the [=buffer append=] algorithm if it is running.
</li>
<li>Set the |sourceBuffer|.{{SourceBuffer/updating}} attribute to false.
</li>
<li>[=Queue a task=] to [=fire an event=] named {{abort}} at |sourceBuffer|.
</li>
<li>[=Queue a task=] to [=fire an event=] named {{updateend}} at |sourceBuffer|.
</li>
</ol>
</li>
<li>Let |SourceBuffer audioTracks list:AudioTrackList| equal the {{AudioTrackList}} object
returned by |sourceBuffer|.{{SourceBuffer/audioTracks}}.
</li>
<li>If the |SourceBuffer audioTracks list| is not empty, then run the following steps:
<ol>
<li>For each {{AudioTrack}} object in the |SourceBuffer audioTracks list|, run the
following steps:
<ol>
<li>Set the {{AudioTrack/sourceBuffer}} attribute on the {{AudioTrack}} object to
null.
</li>
<li>Remove the {{AudioTrack}} object from the |SourceBuffer audioTracks list|.
<p class="note">
This should trigger {{AudioTrackList}} [[HTML]] logic to [=queue a task=] to
[=fire an event=] named [=AudioTrackList/removetrack=] using {{TrackEvent}}
with the {{TrackEvent/track}} attribute initialized to the {{AudioTrack}}
object, at the |SourceBuffer audioTracks list|. If the {{AudioTrack/enabled}}
attribute on the {{AudioTrack}} object was true at the beginning of this
removal step, then this should also trigger {{AudioTrackList}} [[HTML]] logic
to [=queue a task=] to [=fire an event=] named [=AudioTrackList/change=] at the
|SourceBuffer audioTracks list|.
</p>
</li>
<li>Use the [=mirror if necessary=] algorithm to run the following steps in
{{Window}}, to remove the {{AudioTrack}} object (or instead, the {{Window}} mirror
of it if the {{MediaSource}} object was constructed in a
{{DedicatedWorkerGlobalScope}}) from the media element:
<ol>
<li>Let |HTMLMediaElement audioTracks list:AudioTrackList| equal the
{{AudioTrackList}} object returned by the {{HTMLMediaElement/audioTracks}}
attribute on the HTMLMediaElement.
</li>
<li>Remove the {{AudioTrack}} object from the |HTMLMediaElement audioTracks
list|.
<p class="note">
This should trigger {{AudioTrackList}} [[HTML]] logic to [=queue a task=]
to [=fire an event=] named [=AudioTrackList/removetrack=] using
{{TrackEvent}} with the {{TrackEvent/track}} attribute initialized to the
{{AudioTrack}} object, at the |HTMLMediaElement audioTracks list|. If the
{{AudioTrack/enabled}} attribute on the {{AudioTrack}} object was true at
the beginning of this removal step, then this should also trigger
{{AudioTrackList}} [[HTML]] logic to [=queue a task=] to [=fire an event=]
named [=AudioTrackList/change=] at the |HTMLMediaElement audioTracks list|.
</p>
</li>
</ol>
</li>
</ol>
</li>
</ol>
</li>
<li>Let |SourceBuffer videoTracks list:VideoTrackList| equal the {{VideoTrackList}} object
returned by |sourceBuffer|.{{SourceBuffer/videoTracks}}.
</li>
<li>If the |SourceBuffer videoTracks list| is not empty, then run the following steps:
<ol>
<li>For each {{VideoTrack}} object in the |SourceBuffer videoTracks list|, run the
following steps:
<ol>
<li>Set the {{VideoTrack/sourceBuffer}} attribute on the {{VideoTrack}} object to
null.
</li>
<li>Remove the {{VideoTrack}} object from the |SourceBuffer videoTracks list|.
<p class="note">
This should trigger {{VideoTrackList}} [[HTML]] logic to [=queue a task=] to
[=fire an event=] named [=VideoTrackList/removetrack=] using {{TrackEvent}}
with the {{TrackEvent/track}} attribute initialized to the {{VideoTrack}}
object, at the |SourceBuffer videoTracks list|. If the {{VideoTrack/selected}}
attribute on the {{VideoTrack}} object was true at the beginning of this
removal step, then this should also trigger {{VideoTrackList}} [[HTML]] logic
to [=queue a task=] to [=fire an event=] named [=VideoTrackList/change=] at the
|SourceBuffer videoTracks list|.
</p>
</li>
<li>Use the [=mirror if necessary=] algorithm to run the following steps in
{{Window}}, to remove the {{VideoTrack}} object (or instead, the {{Window}} mirror
of it if the {{MediaSource}} object was constructed in a
{{DedicatedWorkerGlobalScope}}) from the media element:
<ol>
<li>Let |HTMLMediaElement videoTracks list:VideoTrackList| equal the
{{VideoTrackList}} object returned by the {{HTMLMediaElement/videoTracks}}
attribute on the HTMLMediaElement.
</li>
<li>Remove the {{VideoTrack}} object from the |HTMLMediaElement videoTracks
list|.
<p class="note">
This should trigger {{VideoTrackList}} [[HTML]] logic to [=queue a task=]
to [=fire an event=] named [=VideoTrackList/removetrack=] using
{{TrackEvent}} with the {{TrackEvent/track}} attribute initialized to the
{{VideoTrack}} object, at the |HTMLMediaElement videoTracks list|. If the
{{VideoTrack/selected}} attribute on the {{VideoTrack}} object was true at
the beginning of this removal step, then this should also trigger
{{VideoTrackList}} [[HTML]] logic to [=queue a task=] to [=fire an event=]
named [=VideoTrackList/change=] at the |HTMLMediaElement videoTracks list|.
</p>
</li>
</ol>
</li>
</ol>
</li>
</ol>
</li>
<li>Let |SourceBuffer textTracks list:TextTrackList| equal the {{TextTrackList}} object
returned by |sourceBuffer|.{{SourceBuffer/textTracks}}.
</li>
<li>If the |SourceBuffer textTracks list| is not empty, then run the following steps:
<ol>
<li>For each {{TextTrack}} object in the |SourceBuffer textTracks list|, run the
following steps:
<ol>
<li>Set the {{TextTrack/sourceBuffer}} attribute on the {{TextTrack}} object to
null.
</li>
<li>Remove the {{TextTrack}} object from the |SourceBuffer textTracks list|.
<p class="note">
This should trigger {{TextTrackList}} [[HTML]] logic to [=queue a task=] to
[=fire an event=] named [=TextTrackList/removetrack=] using {{TrackEvent}} with
the {{TrackEvent/track}} attribute initialized to the {{TextTrack}} object, at
the |SourceBuffer textTracks list|. If the {{TextTrack/mode}} attribute on the
{{TextTrack}} object was <a def-id="texttrackmode-showing"></a> or <a def-id=
"texttrackmode-hidden"></a> at the beginning of this removal step, then this
should also trigger {{TextTrackList}} [[HTML]] logic to [=queue a task=] to
[=fire an event=] named [=TextTrackList/change=] at the |SourceBuffer
textTracks list|.
</p>
</li>
<li>Use the [=mirror if necessary=] algorithm to run the following steps in
{{Window}}, to remove the {{TextTrack}} object (or instead, the {{Window}} mirror
of it if the {{MediaSource}} object was constructed in a
{{DedicatedWorkerGlobalScope}}) from the media element:
<ol>
<li>Let |HTMLMediaElement textTracks list:TextTrackList| equal the
{{TextTrackList}} object returned by the {{HTMLMediaElement/textTracks}}
attribute on the HTMLMediaElement.
</li>
<li>Remove the {{TextTrack}} object from the |HTMLMediaElement textTracks
list|.
<p class="note">
This should trigger {{TextTrackList}} [[HTML]] logic to [=queue a task=] to
[=fire an event=] named [=TextTrackList/removetrack=] using {{TrackEvent}}
with the {{TrackEvent/track}} attribute initialized to the {{TextTrack}}
object, at the |HTMLMediaElement textTracks list|. If the
{{TextTrack/mode}} attribute on the {{TextTrack}} object was <a def-id=
"texttrackmode-showing"></a> or <a def-id="texttrackmode-hidden"></a> at
the beginning of this removal step, then this should also trigger
{{TextTrackList}} [[HTML]] logic to [=queue a task=] to [=fire an event=]
named [=TextTrackList/change=] at the |HTMLMediaElement textTracks list|.
</p>
</li>
</ol>
</li>
</ol>
</li>
</ol>
</li>
<li>If |sourceBuffer| is in {{MediaSource/activeSourceBuffers}}, then remove |sourceBuffer|
from {{MediaSource/activeSourceBuffers}} and [=queue a task=] to [=fire an event=] named
{{removesourcebuffer}} at the <a>SourceBufferList</a> returned by
{{MediaSource/activeSourceBuffers}}.
</li>
<li>Remove |sourceBuffer| from {{MediaSource/sourceBuffers}} and [=queue a task=] to [=fire
an event=] named {{removesourcebuffer}} at the <a>SourceBufferList</a> returned by
{{MediaSource/sourceBuffers}}.
</li>
<li>Destroy all resources for |sourceBuffer|.
</li>
</ol>
<h3>
<dfn>endOfStream()</dfn> method
</h3>
<p>
Signals the end of the stream.
</p>
<ol class="algorithm">
<li>If the {{MediaSource/readyState}} attribute is not in the {{ReadyState/""open""}} state
then throw an {{InvalidStateError}} exception and abort these steps.
</li>
<li>If the {{SourceBuffer/updating}} attribute equals true on any {{SourceBuffer}} in
{{MediaSource/sourceBuffers}}, then throw an {{InvalidStateError}} exception and abort
these steps.
</li>
<li>Run the [=end of stream=] algorithm with the error parameter set to
|error:EndOfStreamError|.
</li>
</ol>
<h3>
<dfn>setLiveSeekableRange()</dfn> method
</h3>
<p>
Updates {{MediaSource/[[live seekable range]]}} that is used in section
[[[#htmlmediaelement-extensions]]] to modify {{HTMLMediaElement}}'s
{{HTMLMediaElement/seekable}} behavior.
</p>
<ol class="method-algorithm">
<li>If the {{MediaSource/readyState}} attribute is not {{ReadyState/""open""}} then throw
an {{InvalidStateError}} exception and abort these steps.
</li>
<li>If |start:double| is negative or greater than |end:double|, then throw a {{TypeError}}
exception and abort these steps.
</li>
<li>Set {{MediaSource/[[live seekable range]]}} to be a new <a def-id=
"normalized-timeranges-object"></a> containing a single range whose start position is
|start| and end position is |end|.
</li>
</ol>
<h3>
<dfn>clearLiveSeekableRange()</dfn> method
</h3>
<p>
Updates {{MediaSource/[[live seekable range]]}} that is used in section
[[[#htmlmediaelement-extensions]]] to modify {{HTMLMediaElement}}'s
{{HTMLMediaElement/seekable}} behavior.
</p>
<ol class="method-algorithm">
<li>If the {{MediaSource/readyState}} attribute is not {{ReadyState/""open""}} then throw
an {{InvalidStateError}} exception and abort these steps.
</li>
<li>If {{MediaSource/[[live seekable range]]}} contains a range, then set
{{MediaSource/[[live seekable range]]}} to be a new empty {{TimeRanges}} object.
</li>
</ol>
<h3>
<dfn>isTypeSupported()</dfn> method
</h3>
<p>
Check to see whether the <a>MediaSource</a> is capable of creating <a>SourceBuffer</a>
objects for the specified MIME type.
</p>
<ol class="method-algorithm">
<li>If |type:DOMString| is an empty string, then return false.
</li>