diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..5be2b41
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,5 @@
+.DS_Store
+Thumbs.db
+.idea/
+cover*
+test
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..93968bf
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+go:
+ - 1.x
+ - tip
+install:
+ - go get -t ./...
+ - go get golang.org/x/tools/cmd/cover
+ - go get github.com/mattn/goveralls
+matrix:
+ allow_failures:
+ - go: tip
+script:
+ - go test -race -v -coverprofile=coverage.out
+ - $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..d9954a4
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Quentin Renard
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..7b15a63
--- /dev/null
+++ b/README.md
@@ -0,0 +1,94 @@
+[![GoReportCard](http://goreportcard.com/badge/github.com/asticode/go-astits)](http://goreportcard.com/report/github.com/asticode/go-astits)
+[![GoDoc](https://godoc.org/github.com/asticode/go-astits?status.svg)](https://godoc.org/github.com/asticode/go-astits)
+[![Travis](https://travis-ci.org/asticode/go-astits.svg?branch=master)](https://travis-ci.org/asticode/go-astits#)
+[![Coveralls](https://coveralls.io/repos/github/asticode/go-astits/badge.svg?branch=master)](https://coveralls.io/github/asticode/go-astits)
+
+This is a Golang library to natively parse and demux MPEG Transport Streams (ts) in GO.
+
+# Installation
+
+To install the library use the following:
+
+ go get -u github.com/asticode/go-astits/...
+
+# Before looking at the code...
+
+The transport stream is made of packets.
+Each packet has a header, an optional adaptation field and a payload.
+Several payloads can be appended and parsed as a data.
+
+```
+ TRANSPORT STREAM
+ +--------------------------------------------------------------------------------------------------+
+ | |
+
+ PACKET PACKET
+ +----------------------------------------------+----------------------------------------------+----
+ | | |
+
+ +--------+---------------------------+---------+--------+---------------------------+---------+
+ | HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | ...
+ +--------+---------------------------+---------+--------+---------------------------+---------+
+
+ | | | |
+ +---------+ +---------+
+ | |
+ +----------------------------------------------+
+ DATA
+```
+
+# Using the library in your code
+
+WARNING: the code below doesn't handle errors for readibility purposes. However you SHOULD!
+
+```go
+// Create a cancellable context in case you want to stop reading packets/data any time you want
+ctx, cancel := context.WithCancel(context.Background())
+
+// Handle SIGTERM signal
+ch := make(chan os.Signal, 1)
+signal.Notify(ch, syscall.SIGTERM)
+go func() {
+ for s := range ch {
+ cancel()
+ return
+ }
+}()
+
+// Open your file or initialize any kind of io.Reader
+f, _ := os.Open("/path/to/file.ts")
+defer f.Close()
+
+// Create the demuxer
+dmx := astits.New(ctx, f)
+for {
+ // Get the next data
+ d, _ := dmx.NextData()
+
+ // Data is a PMT data
+ if d.PMT != nil {
+ // Loop through elementary streams
+ for _, es := range d.PMT.ElementaryStreams {
+ fmt.Printf("Stream detected: %d\n", es.ElementaryPID)
+ }
+ return
+ }
+}
+```
+
+# Features and roadmap
+
+- [x] Parse PES packets
+- [x] Parse PAT packets
+- [x] Parse PMT packets
+- [x] Parse EIT packets
+- [x] Parse NIT packets
+- [x] Parse SDT packets
+- [x] Parse TOT packets
+- [ ] Parse BAT packets
+- [ ] Parse DIT packets
+- [ ] Parse RST packets
+- [ ] Parse SIT packets
+- [ ] Parse ST packets
+- [ ] Parse TDT packets
+- [ ] Parse TSDT packets
diff --git a/astits/main.go b/astits/main.go
new file mode 100644
index 0000000..c956465
--- /dev/null
+++ b/astits/main.go
@@ -0,0 +1,295 @@
+package main
+
+import (
+ "context"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "io"
+ "net"
+ "net/url"
+ "os"
+ "os/signal"
+ "strconv"
+ "strings"
+ "syscall"
+
+ "github.com/asticode/go-astilog"
+ "github.com/asticode/go-astits"
+ "github.com/pkg/errors"
+)
+
+// Flags
+var (
+ ctx, cancel = context.WithCancel(context.Background())
+ format = flag.String("f", "", "the format")
+ inputPath = flag.String("i", "", "the input path")
+)
+
+func main() {
+ // Init
+ flag.Parse()
+ astilog.FlagInit()
+
+ // Handle signals
+ handleSignals()
+
+ // Build the reader
+ var r io.Reader
+ var err error
+ if r, err = buildReader(); err != nil {
+ astilog.Fatal(errors.Wrap(err, "astits: parsing input failed"))
+ }
+
+ // Make sure the reader is closed properly
+ if c, ok := r.(io.Closer); ok {
+ defer c.Close()
+ }
+
+ // Create the demuxer
+ var dmx = astits.New(ctx, r)
+
+ // Fetch the programs
+ var pgms []*Program
+ if pgms, err = programs(dmx); err != nil {
+ astilog.Fatal(errors.Wrap(err, "astits: fetching programs failed"))
+ }
+
+ // Print
+ switch *format {
+ case "json":
+ var e = json.NewEncoder(os.Stdout)
+ e.SetIndent("", " ")
+ if err = e.Encode(pgms); err != nil {
+ astilog.Fatal(errors.Wrap(err, "astits: json encoding to stdout failed"))
+ }
+ default:
+ fmt.Println("Programs are:")
+ for _, pgm := range pgms {
+ fmt.Printf("* %s\n", pgm)
+ }
+ }
+}
+
+func handleSignals() {
+ ch := make(chan os.Signal, 1)
+ signal.Notify(ch)
+ go func() {
+ for s := range ch {
+ astilog.Debugf("Received signal %s", s)
+ switch s {
+ case syscall.SIGABRT, syscall.SIGINT, syscall.SIGQUIT, syscall.SIGTERM:
+ cancel()
+ }
+ return
+ }
+ }()
+}
+
+func buildReader() (r io.Reader, err error) {
+ // Validate input
+ if len(*inputPath) <= 0 {
+ err = errors.New("Use -i to indicate an input path")
+ return
+ }
+
+ // Parse input
+ var u *url.URL
+ if u, err = url.Parse(*inputPath); err != nil {
+ err = errors.Wrap(err, "astits: parsing input path failed")
+ return
+ }
+
+ // Switch on scheme
+ switch u.Scheme {
+ case "udp":
+ // Resolve addr
+ var addr *net.UDPAddr
+ if addr, err = net.ResolveUDPAddr("udp", u.Host); err != nil {
+ err = errors.Wrapf(err, "astits: resolving udp addr %s failed", u.Host)
+ return
+ }
+
+ // Listen to multicast UDP
+ var c *net.UDPConn
+ if c, err = net.ListenMulticastUDP("udp", nil, addr); err != nil {
+ err = errors.Wrapf(err, "astits: listening on multicast udp addr %s failed", u.Host)
+ return
+ }
+ c.SetReadBuffer(1000)
+ r = c
+ default:
+ // Open file
+ var f *os.File
+ if f, err = os.Open(*inputPath); err != nil {
+ err = errors.Wrapf(err, "astits: opening %s failed", *inputPath)
+ return
+ }
+ r = f
+ }
+ return
+}
+
+func programs(dmx *astits.Demuxer) (o []*Program, err error) {
+ // Loop through data
+ var d *astits.Data
+ var pgmsToProcess = make(map[uint16]bool)
+ var pgms = make(map[uint16]*Program)
+ astilog.Debug("Fetching data...")
+ for {
+ // Get next data
+ if d, err = dmx.NextData(); err != nil {
+ if err == astits.ErrNoMorePackets {
+ var pgmsNotProcessed []string
+ for n := range pgms {
+ pgmsNotProcessed = append(pgmsNotProcessed, strconv.Itoa(int(n)))
+ }
+ err = fmt.Errorf("astits: no PMT found for program(s) %s", strings.Join(pgmsNotProcessed, ", "))
+ } else {
+ err = errors.Wrap(err, "astits: getting next packet failed")
+ }
+ return
+ }
+
+ // Check data
+ if d.PAT != nil && len(pgms) == 0 {
+ // Build programs list
+ for _, p := range d.PAT.Programs {
+ // Program number 0 is reserved to NIT
+ if p.ProgramNumber > 0 {
+ pgmsToProcess[p.ProgramNumber] = true
+ pgms[p.ProgramNumber] = newProgram(p.ProgramNumber, p.ProgramMapID)
+ }
+ }
+ } else if d.PMT != nil {
+ // Program has already been processed
+ if _, ok := pgmsToProcess[d.PMT.ProgramNumber]; !ok {
+ continue
+ }
+
+ // Update program
+ for _, dsc := range d.PMT.ProgramDescriptors {
+ pgms[d.PMT.ProgramNumber].Descriptors = append(pgms[d.PMT.ProgramNumber].Descriptors, descriptorToString(dsc))
+ }
+
+ // Add elementary streams
+ for _, es := range d.PMT.ElementaryStreams {
+ var s = newStream(es.ElementaryPID, es.StreamType)
+ for _, d := range es.ElementaryStreamDescriptors {
+ s.Descriptors = append(s.Descriptors, descriptorToString(d))
+ }
+ pgms[d.PMT.ProgramNumber].Streams = append(pgms[d.PMT.ProgramNumber].Streams, s)
+ }
+
+ // Update list of programs to process
+ delete(pgmsToProcess, d.PMT.ProgramNumber)
+
+ // All PMTs have been processed
+ if len(pgmsToProcess) == 0 {
+ break
+ }
+ }
+ }
+
+ // Build final data
+ for _, p := range pgms {
+ o = append(o, p)
+ }
+ return
+}
+
+// Program represents a program
+type Program struct {
+ Descriptors []string `json:"descriptors,omitempty"`
+ ID uint16 `json:"id,omitempty"`
+ MapID uint16 `json:"map_id,omitempty"`
+ Streams []*Stream `json:"streams,omitempty"`
+}
+
+// Stream represents a stream
+type Stream struct {
+ Descriptors []string `json:"descriptors,omitempty"`
+ ID uint16 `json:"id,omitempty"`
+ Type uint8 `json:"type,omitempty"`
+}
+
+func newProgram(id, mapID uint16) *Program {
+ return &Program{
+ ID: id,
+ MapID: mapID,
+ }
+}
+
+func newStream(id uint16, _type uint8) *Stream {
+ return &Stream{
+ ID: id,
+ Type: _type,
+ }
+}
+
+// String implements the Stringer interface
+func (p Program) String() (o string) {
+ o = fmt.Sprintf("[%d] - Map ID: %d", p.ID, p.MapID)
+ for _, d := range p.Descriptors {
+ o += fmt.Sprintf(" - %s", d)
+ }
+ for _, s := range p.Streams {
+ o += fmt.Sprintf("\n * %s", s.String())
+ }
+ return
+}
+
+// String implements the Stringer interface
+func (s Stream) String() (o string) {
+ // Get type
+ var t = fmt.Sprintf("unlisted stream type %d", s.Type)
+ switch s.Type {
+ case astits.StreamTypeLowerBitrateVideo:
+ t = "Lower bitrate video"
+ case astits.StreamTypeMPEG1Audio:
+ t = "MPEG-1 audio"
+ case astits.StreamTypeMPEG2HalvedSampleRateAudio:
+ t = "MPEG-2 halved sample rate audio"
+ case astits.StreamTypeMPEG2PacketizedData:
+ t = "DVB subtitles/VBI or AC-3"
+ }
+
+ // Output
+ o = fmt.Sprintf("[%d] - Type: %s", s.ID, t)
+ for _, d := range s.Descriptors {
+ o += fmt.Sprintf(" - %s", d)
+ }
+ return
+}
+
+func descriptorToString(d *astits.Descriptor) string {
+ switch d.Tag {
+ case astits.DescriptorTagAC3:
+ return fmt.Sprintf("AC3 asvc: %d | bsid: %d | component type: %d | mainid: %d | info: %s", d.AC3.ASVC, d.AC3.BSID, d.AC3.ComponentType, d.AC3.MainID, d.AC3.AdditionalInfo)
+ case astits.DescriptorTagISO639LanguageAndAudioType:
+ return fmt.Sprintf("Language %s and audio type %d", d.ISO639LanguageAndAudioType.Language, d.ISO639LanguageAndAudioType.Type)
+ case astits.DescriptorTagMaximumBitrate:
+ return fmt.Sprintf("Maximum bitrate: %d", d.MaximumBitrate.Bitrate)
+ case astits.DescriptorTagNetworkName:
+ return fmt.Sprintf("Network name: %s", d.NetworkName.Name)
+ case astits.DescriptorTagService:
+ return fmt.Sprintf("Service %s from provider %s", d.Service.Name, d.Service.Provider)
+ case astits.DescriptorTagShortEvent:
+ return fmt.Sprintf("Short event %s for language %s with text %s", d.ShortEvent.EventName, d.ShortEvent.Language, d.ShortEvent.Text)
+ case astits.DescriptorTagStreamIdentifier:
+ return fmt.Sprintf("Stream identifier component tag: %d", d.StreamIdentifier.ComponentTag)
+ case astits.DescriptorTagSubtitling:
+ var os []string
+ for _, i := range d.Subtitling.Items {
+ os = append(os, fmt.Sprintf("Subtitling composition page %d and ancillary page %d: %s", i.CompositionPageID, i.AncillaryPageID, i.Language))
+ }
+ return strings.Join(os, " - ")
+ case astits.DescriptorTagTeletext:
+ var os []string
+ for _, t := range d.Teletext.Items {
+ os = append(os, fmt.Sprintf("Teletext page %01d%02d: %s", t.Magazine, t.Page, t.Language))
+ }
+ return strings.Join(os, " - ")
+ }
+ return fmt.Sprintf("unlisted descriptor tag 0x%x", d.Tag)
+}
diff --git a/clock_reference.go b/clock_reference.go
new file mode 100644
index 0000000..9d31c67
--- /dev/null
+++ b/clock_reference.go
@@ -0,0 +1,29 @@
+package astits
+
+import (
+ "time"
+)
+
+// ClockReference represents a clock reference
+// Base is based on a 90 kHz clock and extension is based on a 27 MHz clock
+type ClockReference struct {
+ Base, Extension int
+}
+
+// newClockReference builds a new clock reference
+func newClockReference(base, extension int) *ClockReference {
+ return &ClockReference{
+ Base: base,
+ Extension: extension,
+ }
+}
+
+// Duration converts the clock reference into duration
+func (p ClockReference) Duration() time.Duration {
+ return time.Duration(p.Base*1e9/90000) + time.Duration(p.Extension*1e9/27000000)
+}
+
+// Time converts the clock reference into time
+func (p ClockReference) Time() time.Time {
+ return time.Unix(0, p.Duration().Nanoseconds())
+}
diff --git a/clock_reference_test.go b/clock_reference_test.go
new file mode 100644
index 0000000..3177510
--- /dev/null
+++ b/clock_reference_test.go
@@ -0,0 +1,15 @@
+package astits
+
+import (
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+var clockReference = newClockReference(3271034319, 58)
+
+func TestClockReference(t *testing.T) {
+ assert.Equal(t, 36344825768814*time.Nanosecond, clockReference.Duration())
+ assert.Equal(t, int64(36344), clockReference.Time().Unix())
+}
diff --git a/data.go b/data.go
new file mode 100644
index 0000000..981546d
--- /dev/null
+++ b/data.go
@@ -0,0 +1,80 @@
+package astits
+
+import "github.com/pkg/errors"
+
+// PIDs
+const (
+ PIDPAT = 0x0 // Program Association Table (PAT) contains a directory listing of all Program Map Tables.
+ PIDCAT = 0x1 // Conditional Access Table (CAT) contains a directory listing of all ITU-T Rec. H.222 entitlement management message streams used by Program Map Tables.
+ PIDTSDT = 0x2 // Transport Stream Description Table (TSDT) contains descriptors related to the overall transport stream
+ PIDNull = 0x1fff // Null Packet (used for fixed bandwidth padding)
+)
+
+// Data represents a data
+type Data struct {
+ EIT *EITData
+ NIT *NITData
+ PAT *PATData
+ PES *PESData
+ PID uint16
+ PMT *PMTData
+ SDT *SDTData
+ TOT *TOTData
+}
+
+// parseData parses a payload spanning over multiple packets and returns a set of data
+func parseData(ps []*Packet, prs PacketsParser, pm programMap) (ds []*Data, err error) {
+ // Use custom parser first
+ if prs != nil {
+ var skip bool
+ if ds, skip, err = prs(ps); err != nil {
+ err = errors.Wrap(err, "astits: custom packets parsing failed")
+ return
+ } else if skip {
+ return
+ }
+ }
+
+ // Reconstruct payload
+ var payload []byte
+ for _, p := range ps {
+ payload = append(payload, p.Payload...)
+ }
+
+ // Parse PID
+ var pid = ps[0].Header.PID
+
+ // Parse payload
+ if pid == PIDCAT {
+ // Information in a CAT payload is private and dependent on the CA system. Use the PacketsParser
+ // to parse this type of payload
+ } else if isPSIPayload(pid, pm) {
+ var psiData *PSIData
+ if psiData, err = parsePSIData(payload); err != nil {
+ err = errors.Wrap(err, "astits: parsing PSI data failed")
+ return
+ }
+ ds = psiData.toData(pid)
+ } else if isPESPayload(payload) {
+ ds = append(ds, &Data{PES: parsePESData(payload), PID: pid})
+ }
+ return
+}
+
+// isPSIPayload checks whether the payload is a PSI one
+func isPSIPayload(pid uint16, pm programMap) bool {
+ return pid == PIDPAT || // PAT
+ pm.exists(pid) || // PMT
+ ((pid >= 0x10 && pid <= 0x14) || (pid >= 0x1e && pid <= 0x1f)) //DVB
+}
+
+// isPESPayload checks whether the payload is a PES one
+func isPESPayload(i []byte) bool {
+ // Packet is not big enough
+ if len(i) < 3 {
+ return false
+ }
+
+ // Check prefix
+ return uint32(i[0])<<16|uint32(i[1])<<8|uint32(i[2]) == 1
+}
diff --git a/data_eit.go b/data_eit.go
new file mode 100644
index 0000000..383e4bf
--- /dev/null
+++ b/data_eit.go
@@ -0,0 +1,73 @@
+package astits
+
+import "time"
+
+// EITData represents an EIT data
+// Page: 36 | Chapter: 5.2.4 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type EITData struct {
+ Events []*EITDataEvent
+ LastTableID uint8
+ OriginalNetworkID uint16
+ SegmentLastSectionNumber uint8
+ ServiceID uint16
+ TransportStreamID uint16
+}
+
+// EITDataEvent represents an EIT data event
+type EITDataEvent struct {
+ Descriptors []*Descriptor
+ Duration time.Duration
+ EventID uint16
+ HasFreeCSAMode bool // When true indicates that access to one or more streams may be controlled by a CA system.
+ RunningStatus uint8
+ StartTime time.Time
+}
+
+// parseEITSection parses an EIT section
+func parseEITSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *EITData) {
+ // Init
+ d = &EITData{ServiceID: tableIDExtension}
+
+ // Transport stream ID
+ d.TransportStreamID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Original network ID
+ d.OriginalNetworkID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Segment last section number
+ d.SegmentLastSectionNumber = uint8(i[*offset])
+ *offset += 1
+
+ // Last table ID
+ d.LastTableID = uint8(i[*offset])
+ *offset += 1
+
+ // Loop until end of section data is reached
+ for *offset < offsetSectionsEnd {
+ // Event ID
+ var e = &EITDataEvent{}
+ e.EventID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Start time
+ e.StartTime = parseDVBTime(i, offset)
+
+ // Duration
+ e.Duration = parseDVBDuration(i, offset)
+
+ // Running status
+ e.RunningStatus = uint8(i[*offset]) >> 5
+
+ // Free CA mode
+ e.HasFreeCSAMode = uint8(i[*offset]&0x10) > 0
+
+ // Descriptors
+ e.Descriptors = parseDescriptors(i, offset)
+
+ // Add event
+ d.Events = append(d.Events, e)
+ }
+ return
+}
diff --git a/data_eit_test.go b/data_eit_test.go
new file mode 100644
index 0000000..0775763
--- /dev/null
+++ b/data_eit_test.go
@@ -0,0 +1,46 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var eit = &EITData{
+ Events: []*EITDataEvent{{
+ Descriptors: descriptors,
+ Duration: dvbDuration,
+ EventID: 6,
+ HasFreeCSAMode: true,
+ RunningStatus: 7,
+ StartTime: dvbTime,
+ }},
+ LastTableID: 5,
+ OriginalNetworkID: 3,
+ SegmentLastSectionNumber: 4,
+ ServiceID: 1,
+ TransportStreamID: 2,
+}
+
+func eitBytes() []byte {
+ w := astibinary.New()
+ w.Write(uint16(2)) // Transport stream ID
+ w.Write(uint16(3)) // Original network ID
+ w.Write(uint8(4)) // Segment last section number
+ w.Write(uint8(5)) // Last table id
+ w.Write(uint16(6)) // Event #1 id
+ w.Write(dvbTimeBytes) // Event #1 start time
+ w.Write(dvbDurationBytes) // Event #1 duration
+ w.Write("111") // Event #1 running status
+ w.Write("1") // Event #1 free CA mode
+ descriptorsBytes(w) // Event #1 descriptors
+ return w.Bytes()
+}
+
+func TestParseEITSection(t *testing.T) {
+ var offset int
+ var b = eitBytes()
+ d := parseEITSection(b, &offset, len(b), uint16(1))
+ assert.Equal(t, d, eit)
+}
diff --git a/data_nit.go b/data_nit.go
new file mode 100644
index 0000000..80e9fde
--- /dev/null
+++ b/data_nit.go
@@ -0,0 +1,49 @@
+package astits
+
+// NITData represents a NIT data
+// Page: 29 | Chapter: 5.2.1 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type NITData struct {
+ NetworkDescriptors []*Descriptor
+ NetworkID uint16
+ TransportStreams []*NITDataTransportStream
+}
+
+// NITDataTransportStream represents a NIT data transport stream
+type NITDataTransportStream struct {
+ OriginalNetworkID uint16
+ TransportDescriptors []*Descriptor
+ TransportStreamID uint16
+}
+
+// parseNITSection parses a NIT section
+func parseNITSection(i []byte, offset *int, tableIDExtension uint16) (d *NITData) {
+ // Init
+ d = &NITData{NetworkID: tableIDExtension}
+
+ // Network descriptors
+ d.NetworkDescriptors = parseDescriptors(i, offset)
+
+ // Transport stream loop length
+ var transportStreamLoopLength = int(uint16(i[*offset]&0xf)<<8 | uint16(i[*offset+1]))
+ *offset += 2
+
+ // Transport stream loop
+ transportStreamLoopLength += *offset
+ for *offset < transportStreamLoopLength {
+ // Transport stream ID
+ var ts = &NITDataTransportStream{}
+ ts.TransportStreamID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Original network ID
+ ts.OriginalNetworkID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Transport descriptors
+ ts.TransportDescriptors = parseDescriptors(i, offset)
+
+ // Append transport stream
+ d.TransportStreams = append(d.TransportStreams, ts)
+ }
+ return
+}
diff --git a/data_nit_test.go b/data_nit_test.go
new file mode 100644
index 0000000..4216486
--- /dev/null
+++ b/data_nit_test.go
@@ -0,0 +1,38 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var nit = &NITData{
+ NetworkDescriptors: descriptors,
+ NetworkID: 1,
+ TransportStreams: []*NITDataTransportStream{{
+ OriginalNetworkID: 3,
+ TransportDescriptors: descriptors,
+ TransportStreamID: 2,
+ }},
+}
+
+func nitBytes() []byte {
+ w := astibinary.New()
+ w.Write("0000") // Reserved for future use
+ descriptorsBytes(w) // Network descriptors
+ w.Write("0000") // Reserved for future use
+ w.Write("000000001001") // Transport stream loop length
+ w.Write(uint16(2)) // Transport stream #1 id
+ w.Write(uint16(3)) // Transport stream #1 original network id
+ w.Write("0000") // Transport stream #1 reserved for future use
+ descriptorsBytes(w) // Transport stream #1 descriptors
+ return w.Bytes()
+}
+
+func TestParseNITSection(t *testing.T) {
+ var offset int
+ var b = nitBytes()
+ d := parseNITSection(b, &offset, uint16(1))
+ assert.Equal(t, d, nit)
+}
diff --git a/data_pat.go b/data_pat.go
new file mode 100644
index 0000000..5576515
--- /dev/null
+++ b/data_pat.go
@@ -0,0 +1,30 @@
+package astits
+
+// PATData represents a PAT data
+// https://en.wikipedia.org/wiki/Program-specific_information
+type PATData struct {
+ Programs []*PATProgram
+ TransportStreamID uint16
+}
+
+// PATProgram represents a PAT program
+type PATProgram struct {
+ ProgramMapID uint16 // The packet identifier that contains the associated PMT
+ ProgramNumber uint16 // Relates to the Table ID extension in the associated PMT. A value of 0 is reserved for a NIT packet identifier.
+}
+
+// parsePATSection parses a PAT section
+func parsePATSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *PATData) {
+ // Init
+ d = &PATData{TransportStreamID: tableIDExtension}
+
+ // Loop until end of section data is reached
+ for *offset < offsetSectionsEnd {
+ d.Programs = append(d.Programs, &PATProgram{
+ ProgramMapID: uint16(i[*offset+2]&0x1f)<<8 | uint16(i[*offset+3]),
+ ProgramNumber: uint16(i[*offset])<<8 | uint16(i[*offset+1]),
+ })
+ *offset += 4
+ }
+ return
+}
diff --git a/data_pat_test.go b/data_pat_test.go
new file mode 100644
index 0000000..f040e07
--- /dev/null
+++ b/data_pat_test.go
@@ -0,0 +1,34 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var pat = &PATData{
+ Programs: []*PATProgram{
+ {ProgramMapID: 3, ProgramNumber: 2},
+ {ProgramMapID: 5, ProgramNumber: 4},
+ },
+ TransportStreamID: 1,
+}
+
+func patBytes() []byte {
+ w := astibinary.New()
+ w.Write(uint16(2)) // Program #1 number
+ w.Write("111") // Program #1 reserved bits
+ w.Write("0000000000011") // Program #1 map ID
+ w.Write(uint16(4)) // Program #2 number
+ w.Write("111") // Program #2 reserved bits
+ w.Write("0000000000101") // Program #3 map ID
+ return w.Bytes()
+}
+
+func TestParsePATSection(t *testing.T) {
+ var offset int
+ var b = patBytes()
+ d := parsePATSection(b, &offset, len(b), uint16(1))
+ assert.Equal(t, d, pat)
+}
diff --git a/data_pes.go b/data_pes.go
new file mode 100644
index 0000000..fc91009
--- /dev/null
+++ b/data_pes.go
@@ -0,0 +1,306 @@
+package astits
+
+// P-STD buffer scales
+const (
+ PSTDBufferScale128Bytes = 0
+ PSTDBufferScale1024Bytes = 1
+)
+
+// PTS DTS indicator
+const (
+ PTSDTSIndicatorBothPresent = 3
+ PTSDTSIndicatorIsForbidden = 1
+ PTSDTSIndicatorNoPTSOrDTS = 0
+ PTSDTSIndicatorOnlyPTS = 2
+)
+
+// Stream IDs
+const (
+ StreamIDPaddingStream = 190
+ StreamIDPrivateStream2 = 191
+)
+
+// Trick mode controls
+const (
+ TrickModeControlFastForward = 0
+ TrickModeControlFastReverse = 3
+ TrickModeControlFreezeFrame = 2
+ TrickModeControlSlowMotion = 1
+ TrickModeControlSlowReverse = 4
+)
+
+// PESData represents a PES data
+// https://en.wikipedia.org/wiki/Packetized_elementary_stream
+// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
+// http://happy.emu.id.au/lab/tut/dttb/dtbtut4b.htm
+type PESData struct {
+ Data []byte
+ Header *PESHeader
+}
+
+// PESHeader represents a packet PES header
+type PESHeader struct {
+ OptionalHeader *PESOptionalHeader
+ PacketLength uint16 // Specifies the number of bytes remaining in the packet after this field. Can be zero. If the PES packet length is set to zero, the PES packet can be of any length. A value of zero for the PES packet length can be used only when the PES packet payload is a video elementary stream.
+ StreamID uint8 // Examples: Audio streams (0xC0-0xDF), Video streams (0xE0-0xEF)
+}
+
+// PESOptionalHeader represents a PES optional header
+type PESOptionalHeader struct {
+ AdditionalCopyInfo uint8
+ CRC uint16
+ DataAlignmentIndicator bool // True indicates that the PES packet header is immediately followed by the video start code or audio syncword
+ DSMTrickMode *DSMTrickMode
+ DTS *ClockReference
+ ESCR *ClockReference
+ ESRate uint32
+ Extension2Data []byte
+ Extension2Length uint8
+ HasAdditionalCopyInfo bool
+ HasCRC bool
+ HasDSMTrickMode bool
+ HasESCR bool
+ HasESRate bool
+ HasExtension bool
+ HasExtension2 bool
+ HasOptionalFields bool
+ HasPackHeaderField bool
+ HasPrivateData bool
+ HasProgramPacketSequenceCounter bool
+ HasPSTDBuffer bool
+ HeaderLength uint8
+ IsCopyrighted bool
+ IsOriginal bool
+ MarkerBits uint8
+ MPEG1OrMPEG2ID uint8
+ OriginalStuffingLength uint8
+ PacketSequenceCounter uint8
+ PackField uint8
+ Priority bool
+ PrivateData []byte
+ PSTDBufferScale uint8
+ PSTDBufferSize uint16
+ PTS *ClockReference
+ PTSDTSIndicator uint8
+ ScramblingControl uint8
+}
+
+// DSMTrickMode represents a DSM trick mode
+// https://books.google.fr/books?id=vwUrAwAAQBAJ&pg=PT501&lpg=PT501&dq=dsm+trick+mode+control&source=bl&ots=fI-9IHXMRL&sig=PWnhxrsoMWNQcl1rMCPmJGNO9Ds&hl=fr&sa=X&ved=0ahUKEwjogafD8bjXAhVQ3KQKHeHKD5oQ6AEINDAB#v=onepage&q=dsm%20trick%20mode%20control&f=false
+type DSMTrickMode struct {
+ FieldID uint8
+ FrequencyTruncation uint8
+ IntraSliceRefresh uint8
+ RepeatControl uint8
+ TrickModeControl uint8
+}
+
+// parsePESData parses a PES data
+func parsePESData(i []byte) (d *PESData) {
+ // Init
+ d = &PESData{}
+
+ // Parse header
+ var offset, dataStart, dataEnd = 3, 0, 0
+ d.Header, dataStart, dataEnd = parsePESHeader(i, &offset)
+
+ // Parse data
+ d.Data = i[dataStart:dataEnd]
+ return
+}
+
+// hasPESOptionalHeader checks whether the data has a PES optional header
+func hasPESOptionalHeader(streamID uint8) bool {
+ return streamID != StreamIDPaddingStream && streamID != StreamIDPrivateStream2
+}
+
+// parsePESData parses a PES header
+func parsePESHeader(i []byte, offset *int) (h *PESHeader, dataStart, dataEnd int) {
+ // Init
+ h = &PESHeader{}
+
+ // Stream ID
+ h.StreamID = uint8(i[*offset])
+ *offset += 1
+
+ // Length
+ h.PacketLength = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Data end
+ if h.PacketLength > 0 {
+ dataEnd = *offset + int(h.PacketLength)
+ } else {
+ dataEnd = len(i)
+ }
+
+ // Optional header
+ if hasPESOptionalHeader(h.StreamID) {
+ h.OptionalHeader, dataStart = parsePESOptionalHeader(i, offset)
+ } else {
+ dataStart = *offset
+ }
+ return
+}
+
+// parsePESOptionalHeader parses a PES optional header
+func parsePESOptionalHeader(i []byte, offset *int) (h *PESOptionalHeader, dataStart int) {
+ // Init
+ h = &PESOptionalHeader{}
+
+ // Marker bits
+ h.MarkerBits = uint8(i[*offset]) >> 6
+
+ // Scrambling control
+ h.ScramblingControl = uint8(i[*offset]) >> 4 & 0x3
+
+ // Priority
+ h.Priority = uint8(i[*offset])&0x8 > 0
+
+ // Data alignment indicator
+ h.DataAlignmentIndicator = uint8(i[*offset])&0x4 > 0
+
+ // Copyrighted
+ h.IsCopyrighted = uint(i[*offset])&0x2 > 0
+
+ // Original or copy
+ h.IsOriginal = uint8(i[*offset])&0x1 > 0
+ *offset += 1
+
+ // PTS DST indicator
+ h.PTSDTSIndicator = uint8(i[*offset]) >> 6 & 0x3
+
+ // Flags
+ h.HasESCR = uint8(i[*offset])&0x20 > 0
+ h.HasESRate = uint8(i[*offset])&0x10 > 0
+ h.HasDSMTrickMode = uint8(i[*offset])&0x8 > 0
+ h.HasAdditionalCopyInfo = uint8(i[*offset])&0x4 > 0
+ h.HasCRC = uint8(i[*offset])&0x2 > 0
+ h.HasExtension = uint8(i[*offset])&0x1 > 0
+ *offset += 1
+
+ // Header length
+ h.HeaderLength = uint8(i[*offset])
+ *offset += 1
+
+ // Data start
+ dataStart = *offset + int(h.HeaderLength)
+
+ // PTS/DTS
+ if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS {
+ h.PTS = parsePTSOrDTS(i[*offset:])
+ *offset += 5
+ } else if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent {
+ h.PTS = parsePTSOrDTS(i[*offset:])
+ *offset += 5
+ h.DTS = parsePTSOrDTS(i[*offset:])
+ *offset += 5
+ }
+
+ // ESCR
+ if h.HasESCR {
+ h.ESCR = parseESCR(i[*offset:])
+ *offset += 6
+ }
+
+ // ES rate
+ if h.HasESRate {
+ h.ESRate = uint32(i[*offset])&0x7f<<15 | uint32(i[*offset+1])<<7 | uint32(i[*offset+2])>>1
+ *offset += 3
+ }
+
+ // Trick mode
+ if h.HasDSMTrickMode {
+ h.DSMTrickMode = parseDSMTrickMode(i[*offset])
+ *offset += 1
+ }
+
+ // Additional copy info
+ if h.HasAdditionalCopyInfo {
+ h.AdditionalCopyInfo = i[*offset] & 0x7f
+ *offset += 1
+ }
+
+ // CRC
+ if h.HasCRC {
+ h.CRC = uint16(i[*offset])>>8 | uint16(i[*offset+1])
+ *offset += 2
+ }
+
+ // Extension
+ if h.HasExtension {
+ // Flags
+ h.HasPrivateData = i[*offset]&0x80 > 0
+ h.HasPackHeaderField = i[*offset]&0x40 > 0
+ h.HasProgramPacketSequenceCounter = i[*offset]&0x20 > 0
+ h.HasPSTDBuffer = i[*offset]&0x10 > 0
+ h.HasExtension2 = i[*offset]&0x1 > 0
+ *offset += 1
+
+ // Private data
+ if h.HasPrivateData {
+ h.PrivateData = i[*offset : *offset+16]
+ *offset += 16
+ }
+
+ // Pack field length
+ if h.HasPackHeaderField {
+ h.PackField = uint8(i[*offset])
+ *offset += 1
+ }
+
+ // Program packet sequence counter
+ if h.HasProgramPacketSequenceCounter {
+ h.PacketSequenceCounter = uint8(i[*offset]) & 0x7f
+ h.MPEG1OrMPEG2ID = uint8(i[*offset+1]) >> 6 & 0x1
+ h.OriginalStuffingLength = uint8(i[*offset+1]) & 0x3f
+ *offset += 2
+ }
+
+ // P-STD buffer
+ if h.HasPSTDBuffer {
+ h.PSTDBufferScale = i[*offset] >> 5 & 0x1
+ h.PSTDBufferSize = uint16(i[*offset])&0x1f<<8 | uint16(i[*offset+1])
+ *offset += 2
+ }
+
+ // Extension 2
+ if h.HasExtension2 {
+ // Length
+ h.Extension2Length = uint8(i[*offset]) & 0x7f
+ *offset += 2
+
+ // Data
+ h.Extension2Data = i[*offset : *offset+int(h.Extension2Length)]
+ *offset += int(h.Extension2Length)
+ }
+ }
+ return
+}
+
+// parseDSMTrickMode parses a DSM trick mode
+func parseDSMTrickMode(i byte) (m *DSMTrickMode) {
+ m = &DSMTrickMode{}
+ m.TrickModeControl = i >> 5
+ if m.TrickModeControl == TrickModeControlFastForward || m.TrickModeControl == TrickModeControlFastReverse {
+ m.FieldID = i >> 3 & 0x3
+ m.IntraSliceRefresh = i >> 2 & 0x1
+ m.FrequencyTruncation = i & 0x3
+ } else if m.TrickModeControl == TrickModeControlFreezeFrame {
+ m.FieldID = i >> 3 & 0x3
+ } else if m.TrickModeControl == TrickModeControlSlowMotion || m.TrickModeControl == TrickModeControlSlowReverse {
+ m.RepeatControl = i & 0x1f
+ }
+ return
+}
+
+// parsePTSOrDTS parses a PTS or a DTS
+func parsePTSOrDTS(i []byte) *ClockReference {
+ return newClockReference(int(uint64(i[0])>>1&0x7<<30|uint64(i[1])<<22|uint64(i[2])>>1&0x7f<<15|uint64(i[3])<<7|uint64(i[4])>>1&0x7f), 0)
+}
+
+// parseESCR parses an ESCR
+func parseESCR(i []byte) *ClockReference {
+ var escr = uint64(i[0])>>3&0x7<<39 | uint64(i[0])&0x3<<37 | uint64(i[1])<<29 | uint64(i[2])>>3<<24 | uint64(i[2])&0x3<<22 | uint64(i[3])<<14 | uint64(i[4])>>3<<9 | uint64(i[4])&0x3<<7 | uint64(i[5])>>1
+ return newClockReference(int(escr>>9), int(escr&0x1ff))
+}
diff --git a/data_pes_test.go b/data_pes_test.go
new file mode 100644
index 0000000..0a2fd66
--- /dev/null
+++ b/data_pes_test.go
@@ -0,0 +1,224 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHasPESOptionalHeader(t *testing.T) {
+ var a []int
+ for i := 0; i <= 255; i++ {
+ if !hasPESOptionalHeader(uint8(i)) {
+ a = append(a, i)
+ }
+ }
+ assert.Equal(t, []int{StreamIDPaddingStream, StreamIDPrivateStream2}, a)
+}
+
+var dsmTrickModeSlow = &DSMTrickMode{
+ RepeatControl: 21,
+ TrickModeControl: TrickModeControlSlowMotion,
+}
+
+func dsmTrickModeSlowBytes() []byte {
+ w := astibinary.New()
+ w.Write("001") // Control
+ w.Write("10101") // Repeat control
+ return w.Bytes()
+}
+
+func TestParseDSMTrickMode(t *testing.T) {
+ // Fast
+ w := astibinary.New()
+ w.Write("011") // Control
+ w.Write("10") // Field ID
+ w.Write("1") // Intra slice refresh
+ w.Write("11") // Frequency truncation
+ assert.Equal(t, parseDSMTrickMode(w.Bytes()[0]), &DSMTrickMode{
+ FieldID: 2,
+ FrequencyTruncation: 3,
+ IntraSliceRefresh: 1,
+ TrickModeControl: TrickModeControlFastReverse,
+ })
+
+ // Freeze
+ w.Reset()
+ w.Write("010") // Control
+ w.Write("10") // Field ID
+ w.Write("000") // Reserved
+ assert.Equal(t, parseDSMTrickMode(w.Bytes()[0]), &DSMTrickMode{
+ FieldID: 2,
+ TrickModeControl: TrickModeControlFreezeFrame,
+ })
+
+ // Slow
+ assert.Equal(t, parseDSMTrickMode(dsmTrickModeSlowBytes()[0]), dsmTrickModeSlow)
+}
+
+var ptsClockReference = &ClockReference{Base: 5726623061}
+
+func ptsBytes() []byte {
+ w := astibinary.New()
+ w.Write("0010") // Flag
+ w.Write("101") // 32...30
+ w.Write("0") // Dummy
+ w.Write("010101010101010") // 29...15
+ w.Write("0") // Dummy
+ w.Write("101010101010101") // 14...0
+ w.Write("0") // Dummy
+ return w.Bytes()
+}
+
+var dtsClockReference = &ClockReference{Base: 5726623060}
+
+func dtsBytes() []byte {
+ w := astibinary.New()
+ w.Write("0010") // Flag
+ w.Write("101") // 32...30
+ w.Write("0") // Dummy
+ w.Write("010101010101010") // 29...15
+ w.Write("0") // Dummy
+ w.Write("101010101010100") // 14...0
+ w.Write("0") // Dummy
+ return w.Bytes()
+}
+
+func TestParsePTSOrDTS(t *testing.T) {
+ assert.Equal(t, parsePTSOrDTS(ptsBytes()), ptsClockReference)
+}
+
+func escrBytes() []byte {
+ w := astibinary.New()
+ w.Write("00") // Dummy
+ w.Write("011") // 32...30
+ w.Write("1") // Dummy
+ w.Write("000010111110000") // 29...15
+ w.Write("1") // Dummy
+ w.Write("000010111001111") // 14...0
+ w.Write("1") // Dummy
+ w.Write("000111010") // Ext
+ w.Write("1") // Dummy
+ return w.Bytes()
+}
+
+func TestParseESCR(t *testing.T) {
+ assert.Equal(t, parseESCR(escrBytes()), clockReference)
+}
+
+var pesWithoutHeader = &PESData{
+ Data: []byte("stuffdata"),
+ Header: &PESHeader{
+ PacketLength: 9,
+ StreamID: StreamIDPaddingStream,
+ },
+}
+
+func pesWithoutHeaderBytes() []byte {
+ w := astibinary.New()
+ w.Write("000000000000000000000001") // Prefix
+ w.Write(uint8(StreamIDPaddingStream)) // Stream ID
+ w.Write(uint16(9)) // Packet length
+ w.Write([]byte("stuff")) // Stuffing bytes
+ w.Write([]byte("datadummy")) // Data
+ return w.Bytes()
+}
+
+var pesWithHeader = &PESData{
+ Data: []byte("stuffdata"),
+ Header: &PESHeader{
+ OptionalHeader: &PESOptionalHeader{
+ AdditionalCopyInfo: 127,
+ CRC: 4,
+ DataAlignmentIndicator: true,
+ DSMTrickMode: dsmTrickModeSlow,
+ DTS: dtsClockReference,
+ ESCR: clockReference,
+ ESRate: 1398101,
+ Extension2Data: []byte("extension2"),
+ Extension2Length: 10,
+ HasAdditionalCopyInfo: true,
+ HasCRC: true,
+ HasDSMTrickMode: true,
+ HasESCR: true,
+ HasESRate: true,
+ HasExtension: true,
+ HasExtension2: true,
+ HasPackHeaderField: true,
+ HasPrivateData: true,
+ HasProgramPacketSequenceCounter: true,
+ HasPSTDBuffer: true,
+ HeaderLength: 62,
+ IsCopyrighted: true,
+ IsOriginal: true,
+ MarkerBits: 2,
+ MPEG1OrMPEG2ID: 1,
+ OriginalStuffingLength: 21,
+ PacketSequenceCounter: 85,
+ PackField: 5,
+ Priority: true,
+ PrivateData: []byte("1234567890123456"),
+ PSTDBufferScale: 1,
+ PSTDBufferSize: 5461,
+ PTSDTSIndicator: 3,
+ PTS: ptsClockReference,
+ ScramblingControl: 1,
+ },
+ StreamID: 1,
+ },
+}
+
+func pesWithHeaderBytes() []byte {
+ w := astibinary.New()
+ w.Write("000000000000000000000001") // Prefix
+ w.Write(uint8(1)) // Stream ID
+ w.Write(uint16(0)) // Packet length
+ w.Write("10") // Marker bits
+ w.Write("01") // Scrambling control
+ w.Write("1") // Priority
+ w.Write("1") // Data alignment indicator
+ w.Write("1") // Copyright
+ w.Write("1") // Original or copy
+ w.Write("11") // PTS/DTS indicator
+ w.Write("1") // ESCR flag
+ w.Write("1") // ES rate flag
+ w.Write("1") // DSM trick mode flag
+ w.Write("1") // Additional copy flag
+ w.Write("1") // CRC flag
+ w.Write("1") // Extension flag
+ w.Write(uint8(62)) // Header length
+ w.Write(ptsBytes()) // PTS
+ w.Write(dtsBytes()) // DTS
+ w.Write(escrBytes()) // ESCR
+ w.Write("101010101010101010101010") // ES rate
+ w.Write(dsmTrickModeSlowBytes()) // DSM trick mode
+ w.Write("11111111") // Additional copy info
+ w.Write(uint16(4)) // CRC
+ w.Write("1") // Private data flag
+ w.Write("1") // Pack header field flag
+ w.Write("1") // Program packet sequence counter flag
+ w.Write("1") // PSTD buffer flag
+ w.Write("000") // Dummy
+ w.Write("1") // Extension 2 flag
+ w.Write([]byte("1234567890123456")) // Private data
+ w.Write(uint8(5)) // Pack field
+ w.Write("0101010101010101") // Packet sequence counter
+ w.Write("0111010101010101") // PSTD buffer
+ w.Write("0000101000000000") // Extension 2 header
+ w.Write([]byte("extension2")) // Extension 2 data
+ w.Write([]byte("stuff")) // Optional header stuffing bytes
+ w.Write([]byte("stuff")) // Stuffing bytes
+ w.Write([]byte("data")) // Data
+ return w.Bytes()
+}
+
+func TestParsePESSection(t *testing.T) {
+ // No optional header and specific packet length
+ d := parsePESData(pesWithoutHeaderBytes())
+ assert.Equal(t, d, pesWithoutHeader)
+
+ // Optional header and no specific header length
+ d = parsePESData(pesWithHeaderBytes())
+ assert.Equal(t, d, pesWithHeader)
+}
diff --git a/data_pmt.go b/data_pmt.go
new file mode 100644
index 0000000..86f1de3
--- /dev/null
+++ b/data_pmt.go
@@ -0,0 +1,57 @@
+package astits
+
+// Stream types
+const (
+ StreamTypeLowerBitrateVideo = 27 // ITU-T Rec. H.264 and ISO/IEC 14496-10
+ StreamTypeMPEG1Audio = 3 // ISO/IEC 11172-3
+ StreamTypeMPEG2HalvedSampleRateAudio = 4 // ISO/IEC 13818-3
+ StreamTypeMPEG2PacketizedData = 6 // ITU-T Rec. H.222 and ISO/IEC 13818-1 i.e., DVB subtitles/VBI and AC-3
+)
+
+// PMTData represents a PMT data
+// https://en.wikipedia.org/wiki/Program-specific_information
+type PMTData struct {
+ ElementaryStreams []*PMTElementaryStream
+ PCRPID uint16 // The packet identifier that contains the program clock reference used to improve the random access accuracy of the stream's timing that is derived from the program timestamp. If this is unused. then it is set to 0x1FFF (all bits on).
+ ProgramDescriptors []*Descriptor // Program descriptors
+ ProgramNumber uint16
+}
+
+// PMTElementaryStream represents a PMT elementary stream
+type PMTElementaryStream struct {
+ ElementaryPID uint16 // The packet identifier that contains the stream type data.
+ ElementaryStreamDescriptors []*Descriptor // Elementary stream descriptors
+ StreamType uint8 // This defines the structure of the data contained within the elementary packet identifier.
+}
+
+// parsePMTSection parses a PMT section
+func parsePMTSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *PMTData) {
+ // Init
+ d = &PMTData{ProgramNumber: tableIDExtension}
+
+ // PCR PID
+ d.PCRPID = uint16(i[*offset]&0x1f)<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Program descriptors
+ d.ProgramDescriptors = parseDescriptors(i, offset)
+
+ // Loop until end of section data is reached
+ for *offset < offsetSectionsEnd {
+ // Stream type
+ var e = &PMTElementaryStream{}
+ e.StreamType = uint8(i[*offset])
+ *offset += 1
+
+ // Elementary PID
+ e.ElementaryPID = uint16(i[*offset]&0x1f)<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Elementary descriptors
+ e.ElementaryStreamDescriptors = parseDescriptors(i, offset)
+
+ // Add elementary stream
+ d.ElementaryStreams = append(d.ElementaryStreams, e)
+ }
+ return
+}
diff --git a/data_pmt_test.go b/data_pmt_test.go
new file mode 100644
index 0000000..53106b5
--- /dev/null
+++ b/data_pmt_test.go
@@ -0,0 +1,40 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var pmt = &PMTData{
+ ElementaryStreams: []*PMTElementaryStream{{
+ ElementaryPID: 2730,
+ ElementaryStreamDescriptors: descriptors,
+ StreamType: StreamTypeMPEG1Audio,
+ }},
+ PCRPID: 5461,
+ ProgramDescriptors: descriptors,
+ ProgramNumber: 1,
+}
+
+func pmtBytes() []byte {
+ w := astibinary.New()
+ w.Write("111") // Reserved bits
+ w.Write("1010101010101") // PCR PID
+ w.Write("1111") // Reserved
+ descriptorsBytes(w) // Program descriptors
+ w.Write(uint8(StreamTypeMPEG1Audio)) // Stream #1 stream type
+ w.Write("111") // Stream #1 reserved
+ w.Write("0101010101010") // Stream #1 PID
+ w.Write("1111") // Stream #1 reserved
+ descriptorsBytes(w) // Stream #1 descriptors
+ return w.Bytes()
+}
+
+func TestParsePMTSection(t *testing.T) {
+ var offset int
+ var b = pmtBytes()
+ d := parsePMTSection(b, &offset, len(b), uint16(1))
+ assert.Equal(t, d, pmt)
+}
diff --git a/data_psi.go b/data_psi.go
new file mode 100644
index 0000000..bcd3e6d
--- /dev/null
+++ b/data_psi.go
@@ -0,0 +1,355 @@
+package astits
+
+import (
+ "fmt"
+
+ "github.com/asticode/go-astilog"
+ "github.com/pkg/errors"
+)
+
+// PSI table IDs
+const (
+ PSITableTypeBAT = "BAT"
+ PSITableTypeDIT = "DIT"
+ PSITableTypeEIT = "EIT"
+ PSITableTypeNIT = "NIT"
+ PSITableTypeNull = "Null"
+ PSITableTypePAT = "PAT"
+ PSITableTypePMT = "PMT"
+ PSITableTypeRST = "RST"
+ PSITableTypeSDT = "SDT"
+ PSITableTypeSIT = "SIT"
+ PSITableTypeST = "ST"
+ PSITableTypeTDT = "TDT"
+ PSITableTypeTOT = "TOT"
+ PSITableTypeUnknown = "Unknown"
+)
+
+// PSIData represents a PSI data
+// https://en.wikipedia.org/wiki/Program-specific_information
+type PSIData struct {
+ PointerField int // Present at the start of the TS packet payload signaled by the payload_unit_start_indicator bit in the TS header. Used to set packet alignment bytes or content before the start of tabled payload data.
+ Sections []*PSISection
+}
+
+// PSISection represents a PSI section
+type PSISection struct {
+ CRC32 uint32 // A checksum of the entire table excluding the pointer field, pointer filler bytes and the trailing CRC32.
+ Header *PSISectionHeader
+ Syntax *PSISectionSyntax
+}
+
+// PSISectionHeader represents a PSI section header
+type PSISectionHeader struct {
+ PrivateBit bool // The PAT, PMT, and CAT all set this to 0. Other tables set this to 1.
+ SectionLength uint16 // The number of bytes that follow for the syntax section (with CRC value) and/or table data. These bytes must not exceed a value of 1021.
+ SectionSyntaxIndicator bool // A flag that indicates if the syntax section follows the section length. The PAT, PMT, and CAT all set this to 1.
+ TableID int // Table Identifier, that defines the structure of the syntax section and other contained data. As an exception, if this is the byte that immediately follow previous table section and is set to 0xFF, then it indicates that the repeat of table section end here and the rest of TS data payload shall be stuffed with 0xFF. Consequently the value 0xFF shall not be used for the Table Identifier.
+ TableType string
+}
+
+// PSISectionSyntax represents a PSI section syntax
+type PSISectionSyntax struct {
+ Data *PSISectionSyntaxData
+ Header *PSISectionSyntaxHeader
+}
+
+// PSISectionSyntaxHeader represents a PSI section syntax header
+type PSISectionSyntaxHeader struct {
+ CurrentNextIndicator bool // Indicates if data is current in effect or is for future use. If the bit is flagged on, then the data is to be used at the present moment.
+ LastSectionNumber uint8 // This indicates which table is the last table in the sequence of tables.
+ SectionNumber uint8 // This is an index indicating which table this is in a related sequence of tables. The first table starts from 0.
+ TableIDExtension uint16 // Informational only identifier. The PAT uses this for the transport stream identifier and the PMT uses this for the Program number.
+ VersionNumber uint8 // Syntax version number. Incremented when data is changed and wrapped around on overflow for values greater than 32.
+}
+
+// PSISectionSyntaxData represents a PSI section syntax data
+type PSISectionSyntaxData struct {
+ EIT *EITData
+ NIT *NITData
+ PAT *PATData
+ PMT *PMTData
+ SDT *SDTData
+ TOT *TOTData
+}
+
+// parsePSIData parses a PSI data
+func parsePSIData(i []byte) (d *PSIData, err error) {
+ // Init data
+ d = &PSIData{}
+ var offset int
+
+ // Pointer field
+ d.PointerField = int(i[offset])
+ offset += 1
+
+ // Pointer filler bytes
+ offset += d.PointerField
+
+ // Parse sections
+ var s *PSISection
+ var stop bool
+ for offset < len(i) && !stop {
+ if s, stop, err = parsePSISection(i, &offset); err != nil {
+ err = errors.Wrap(err, "astits: parsing PSI table failed")
+ return
+ }
+ d.Sections = append(d.Sections, s)
+ }
+ return
+}
+
+// parsePSISection parses a PSI section
+func parsePSISection(i []byte, offset *int) (s *PSISection, stop bool, err error) {
+ // Init section
+ s = &PSISection{}
+
+ // Parse header
+ var offsetStart, offsetSectionsEnd, offsetEnd int
+ s.Header, offsetStart, _, offsetSectionsEnd, offsetEnd = parsePSISectionHeader(i, offset)
+
+ // Check whether we need to stop the parsing
+ if shouldStopPSIParsing(s.Header.TableType) {
+ stop = true
+ return
+ }
+
+ // Check whether there's a syntax section
+ if s.Header.SectionLength > 0 && s.Header.SectionSyntaxIndicator {
+ // Parse syntax
+ s.Syntax = parsePSISectionSyntax(i, offset, s.Header, offsetSectionsEnd)
+
+ // Process CRC32
+ if hasCRC32(s.Header.TableType) {
+ // Parse CRC32
+ s.CRC32 = parseCRC32(i[offsetSectionsEnd:offsetEnd])
+ *offset += 4
+
+ // Check CRC32
+ var c = computeCRC32(i[offsetStart:offsetSectionsEnd])
+ if c != s.CRC32 {
+ err = fmt.Errorf("astits: Table CRC32 %x != computed CRC32 %x", s.CRC32, c)
+ return
+ }
+ }
+ }
+ return
+}
+
+// parseCRC32 parses a CRC32
+func parseCRC32(i []byte) uint32 {
+ return uint32(i[len(i)-4])<<24 | uint32(i[len(i)-3])<<16 | uint32(i[len(i)-2])<<8 | uint32(i[len(i)-1])
+}
+
+// computeCRC32 computes a CRC32
+// https://stackoverflow.com/questions/35034042/how-to-calculate-crc32-in-psi-si-packet
+func computeCRC32(i []byte) (o uint32) {
+ o = uint32(0xffffffff)
+ for _, b := range i {
+ for i := 0; i < 8; i++ {
+ if (o >= uint32(0x80000000)) != (b >= uint8(0x80)) {
+ o = (o << 1) ^ 0x04C11DB7
+ } else {
+ o = o << 1
+ }
+ b <<= 1
+ }
+ }
+ return
+}
+
+// shouldStopPSIParsing checks whether the PSI parsing should be stopped
+func shouldStopPSIParsing(tableType string) bool {
+ return tableType == PSITableTypeNull || tableType == PSITableTypeUnknown
+}
+
+// parsePSISectionHeader parses a PSI section header
+func parsePSISectionHeader(i []byte, offset *int) (h *PSISectionHeader, offsetStart, offsetSectionsStart, offsetSectionsEnd, offsetEnd int) {
+ // Init
+ h = &PSISectionHeader{}
+ offsetStart = *offset
+
+ // Table ID
+ h.TableID = int(i[*offset])
+ *offset += 1
+
+ // Table type
+ h.TableType = psiTableType(h.TableID)
+
+ // Check whether we need to stop the parsing
+ if shouldStopPSIParsing(h.TableType) {
+ return
+ }
+
+ // Section syntax indicator
+ h.SectionSyntaxIndicator = i[*offset]&0x80 > 0
+
+ // Private bit
+ h.PrivateBit = i[*offset]&0x40 > 0
+
+ // Section length
+ h.SectionLength = uint16(i[*offset]&0xf)<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Offsets
+ offsetSectionsStart = *offset
+ offsetEnd = offsetSectionsStart + int(h.SectionLength)
+ offsetSectionsEnd = offsetEnd
+ if hasCRC32(h.TableType) {
+ offsetSectionsEnd -= 4
+ }
+ return
+}
+
+// hasCRC32 checks whether the table has a CRC32
+func hasCRC32(tableType string) bool {
+ return tableType == PSITableTypePAT ||
+ tableType == PSITableTypePMT ||
+ tableType == PSITableTypeEIT ||
+ tableType == PSITableTypeNIT ||
+ tableType == PSITableTypeTOT ||
+ tableType == PSITableTypeSDT
+}
+
+// psiTableType returns the psi table type based on the table id
+func psiTableType(tableID int) string {
+ switch {
+ case tableID == 0x4a:
+ return PSITableTypeBAT
+ case tableID >= 0x4e && tableID <= 0x6f:
+ return PSITableTypeEIT
+ case tableID == 0x7e:
+ return PSITableTypeDIT
+ case tableID == 0x40, tableID == 0x41:
+ return PSITableTypeNIT
+ case tableID == 0xff:
+ return PSITableTypeNull
+ case tableID == 0:
+ return PSITableTypePAT
+ case tableID == 2:
+ return PSITableTypePMT
+ case tableID == 0x71:
+ return PSITableTypeRST
+ case tableID == 0x42, tableID == 0x46:
+ return PSITableTypeSDT
+ case tableID == 0x7f:
+ return PSITableTypeSIT
+ case tableID == 0x72:
+ return PSITableTypeST
+ case tableID == 0x70:
+ return PSITableTypeTDT
+ case tableID == 0x73:
+ return PSITableTypeTOT
+ }
+ // TODO Remove this log
+ astilog.Debugf("unlisted PSI table ID %d", tableID)
+ return PSITableTypeUnknown
+}
+
+// parsePSISectionSyntax parses a PSI section syntax
+func parsePSISectionSyntax(i []byte, offset *int, h *PSISectionHeader, offsetSectionsEnd int) (s *PSISectionSyntax) {
+ // Init
+ s = &PSISectionSyntax{}
+
+ // Header
+ if hasPSISyntaxHeader(h.TableType) {
+ s.Header = parsePSISectionSyntaxHeader(i, offset)
+ }
+
+ // Parse data
+ s.Data = parsePSISectionSyntaxData(i, offset, h, s.Header, offsetSectionsEnd)
+ return
+}
+
+// hasPSISyntaxHeader checks whether the section has a syntax header
+func hasPSISyntaxHeader(tableType string) bool {
+ return tableType == PSITableTypeEIT ||
+ tableType == PSITableTypeNIT ||
+ tableType == PSITableTypePAT ||
+ tableType == PSITableTypePMT ||
+ tableType == PSITableTypeSDT
+}
+
+// parsePSISectionSyntaxHeader parses a PSI section syntax header
+func parsePSISectionSyntaxHeader(i []byte, offset *int) (h *PSISectionSyntaxHeader) {
+ // Init
+ h = &PSISectionSyntaxHeader{}
+
+ // Table ID extension
+ h.TableIDExtension = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Version number
+ h.VersionNumber = uint8(i[*offset]&0x3f) >> 1
+
+ // Current/Next indicator
+ h.CurrentNextIndicator = i[*offset]&0x1 > 0
+ *offset += 1
+
+ // Section number
+ h.SectionNumber = uint8(i[*offset])
+ *offset += 1
+
+ // Last section number
+ h.LastSectionNumber = uint8(i[*offset])
+ *offset += 1
+ return
+}
+
+// parsePSISectionSyntaxData parses a PSI section data
+func parsePSISectionSyntaxData(i []byte, offset *int, h *PSISectionHeader, sh *PSISectionSyntaxHeader, offsetSectionsEnd int) (d *PSISectionSyntaxData) {
+ // Init
+ d = &PSISectionSyntaxData{}
+
+ // Switch on table type
+ switch h.TableType {
+ case PSITableTypeBAT:
+ // TODO Parse BAT
+ case PSITableTypeDIT:
+ // TODO Parse DIT
+ case PSITableTypeEIT:
+ d.EIT = parseEITSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
+ case PSITableTypeNIT:
+ d.NIT = parseNITSection(i, offset, sh.TableIDExtension)
+ case PSITableTypePAT:
+ d.PAT = parsePATSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
+ case PSITableTypePMT:
+ d.PMT = parsePMTSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
+ case PSITableTypeRST:
+ // TODO Parse RST
+ case PSITableTypeSDT:
+ d.SDT = parseSDTSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
+ case PSITableTypeSIT:
+ // TODO Parse SIT
+ case PSITableTypeST:
+ // TODO Parse ST
+ case PSITableTypeTOT:
+ d.TOT = parseTOTSection(i, offset)
+ case PSITableTypeTDT:
+ // TODO Parse TDT
+ }
+ return
+}
+
+// toData parses the PSI tables and returns a set of Data
+func (d *PSIData) toData(pid uint16) (ds []*Data) {
+ // Loop through sections
+ for _, s := range d.Sections {
+ // Switch on table type
+ switch s.Header.TableType {
+ case PSITableTypeEIT:
+ ds = append(ds, &Data{EIT: s.Syntax.Data.EIT, PID: pid})
+ case PSITableTypeNIT:
+ ds = append(ds, &Data{NIT: s.Syntax.Data.NIT, PID: pid})
+ case PSITableTypePAT:
+ ds = append(ds, &Data{PAT: s.Syntax.Data.PAT, PID: pid})
+ case PSITableTypePMT:
+ ds = append(ds, &Data{PID: pid, PMT: s.Syntax.Data.PMT})
+ case PSITableTypeSDT:
+ ds = append(ds, &Data{PID: pid, SDT: s.Syntax.Data.SDT})
+ case PSITableTypeTOT:
+ ds = append(ds, &Data{PID: pid, TOT: s.Syntax.Data.TOT})
+ }
+ }
+ return
+}
diff --git a/data_psi_test.go b/data_psi_test.go
new file mode 100644
index 0000000..e286702
--- /dev/null
+++ b/data_psi_test.go
@@ -0,0 +1,271 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var psi = &PSIData{
+ PointerField: 4,
+ Sections: []*PSISection{
+ {
+ CRC32: uint32(0x7ffc6102),
+ Header: &PSISectionHeader{
+ PrivateBit: true,
+ SectionLength: 30,
+ SectionSyntaxIndicator: true,
+ TableID: 78,
+ TableType: PSITableTypeEIT,
+ },
+ Syntax: &PSISectionSyntax{
+ Data: &PSISectionSyntaxData{EIT: eit},
+ Header: psiSectionSyntaxHeader,
+ },
+ },
+ {
+ CRC32: uint32(0xfebaa941),
+ Header: &PSISectionHeader{
+ PrivateBit: true,
+ SectionLength: 25,
+ SectionSyntaxIndicator: true,
+ TableID: 64,
+ TableType: PSITableTypeNIT,
+ },
+ Syntax: &PSISectionSyntax{
+ Data: &PSISectionSyntaxData{NIT: nit},
+ Header: psiSectionSyntaxHeader,
+ },
+ },
+ {
+ CRC32: uint32(0x60739f61),
+ Header: &PSISectionHeader{
+ PrivateBit: true,
+ SectionLength: 17,
+ SectionSyntaxIndicator: true,
+ TableID: 0,
+ TableType: PSITableTypePAT,
+ },
+ Syntax: &PSISectionSyntax{
+ Data: &PSISectionSyntaxData{PAT: pat},
+ Header: psiSectionSyntaxHeader,
+ },
+ },
+ {
+ CRC32: uint32(0xc68442e8),
+ Header: &PSISectionHeader{
+ PrivateBit: true,
+ SectionLength: 24,
+ SectionSyntaxIndicator: true,
+ TableID: 2,
+ TableType: PSITableTypePMT,
+ },
+ Syntax: &PSISectionSyntax{
+ Data: &PSISectionSyntaxData{PMT: pmt},
+ Header: psiSectionSyntaxHeader,
+ },
+ },
+ {
+ CRC32: uint32(0xef3751d6),
+ Header: &PSISectionHeader{
+ PrivateBit: true,
+ SectionLength: 20,
+ SectionSyntaxIndicator: true,
+ TableID: 66,
+ TableType: PSITableTypeSDT,
+ },
+ Syntax: &PSISectionSyntax{
+ Data: &PSISectionSyntaxData{SDT: sdt},
+ Header: psiSectionSyntaxHeader,
+ },
+ },
+ {
+ CRC32: uint32(0x6969b13),
+ Header: &PSISectionHeader{
+ PrivateBit: true,
+ SectionLength: 14,
+ SectionSyntaxIndicator: true,
+ TableID: 115,
+ TableType: PSITableTypeTOT,
+ },
+ Syntax: &PSISectionSyntax{
+ Data: &PSISectionSyntaxData{TOT: tot},
+ },
+ },
+ {Header: &PSISectionHeader{TableID: 254, TableType: PSITableTypeUnknown}},
+ },
+}
+
+func psiBytes() []byte {
+ w := astibinary.New()
+ w.Write(uint8(4)) // Pointer field
+ w.Write([]byte("test")) // Pointer field bytes
+ w.Write(uint8(78)) // EIT table ID
+ w.Write("1") // EIT syntax section indicator
+ w.Write("1") // EIT private bit
+ w.Write("11") // EIT reserved
+ w.Write("000000011110") // EIT section length
+ w.Write(psiSectionSyntaxHeaderBytes()) // EIT syntax section header
+ w.Write(eitBytes()) // EIT data
+ w.Write(uint32(0x7ffc6102)) // EIT CRC32
+ w.Write(uint8(64)) // NIT table ID
+ w.Write("1") // NIT syntax section indicator
+ w.Write("1") // NIT private bit
+ w.Write("11") // NIT reserved
+ w.Write("000000011001") // NIT section length
+ w.Write(psiSectionSyntaxHeaderBytes()) // NIT syntax section header
+ w.Write(nitBytes()) // NIT data
+ w.Write(uint32(0xfebaa941)) // NIT CRC32
+ w.Write(uint8(0)) // PAT table ID
+ w.Write("1") // PAT syntax section indicator
+ w.Write("1") // PAT private bit
+ w.Write("11") // PAT reserved
+ w.Write("000000010001") // PAT section length
+ w.Write(psiSectionSyntaxHeaderBytes()) // PAT syntax section header
+ w.Write(patBytes()) // PAT data
+ w.Write(uint32(0x60739f61)) // PAT CRC32
+ w.Write(uint8(2)) // PMT table ID
+ w.Write("1") // PMT syntax section indicator
+ w.Write("1") // PMT private bit
+ w.Write("11") // PMT reserved
+ w.Write("000000011000") // PMT section length
+ w.Write(psiSectionSyntaxHeaderBytes()) // PMT syntax section header
+ w.Write(pmtBytes()) // PMT data
+ w.Write(uint32(0xc68442e8)) // PMT CRC32
+ w.Write(uint8(66)) // SDT table ID
+ w.Write("1") // SDT syntax section indicator
+ w.Write("1") // SDT private bit
+ w.Write("11") // SDT reserved
+ w.Write("000000010100") // SDT section length
+ w.Write(psiSectionSyntaxHeaderBytes()) // SDT syntax section header
+ w.Write(sdtBytes()) // SDT data
+ w.Write(uint32(0xef3751d6)) // SDT CRC32
+ w.Write(uint8(115)) // TOT table ID
+ w.Write("1") // TOT syntax section indicator
+ w.Write("1") // TOT private bit
+ w.Write("11") // TOT reserved
+ w.Write("000000001110") // TOT section length
+ w.Write(totBytes()) // TOT data
+ w.Write(uint32(0x6969b13)) // TOT CRC32
+ w.Write(uint8(254)) // Unknown table ID
+ w.Write(uint8(0)) // PAT table ID
+ return w.Bytes()
+}
+
+func TestParsePSIData(t *testing.T) {
+ // Invalid CRC32
+ w := astibinary.New()
+ w.Write(uint8(0)) // Pointer field
+ w.Write(uint8(115)) // TOT table ID
+ w.Write("1") // TOT syntax section indicator
+ w.Write("1") // TOT private bit
+ w.Write("11") // TOT reserved
+ w.Write("000000001110") // TOT section length
+ w.Write(totBytes()) // TOT data
+ w.Write(uint32(32)) // TOT CRC32
+ _, err := parsePSIData(w.Bytes())
+ assert.EqualError(t, err, "astits: parsing PSI table failed: astits: Table CRC32 20 != computed CRC32 6969b13")
+
+ // Valid
+ d, err := parsePSIData(psiBytes())
+ assert.NoError(t, err)
+ assert.Equal(t, d, psi)
+}
+
+var psiSectionHeader = &PSISectionHeader{
+ PrivateBit: true,
+ SectionLength: 2730,
+ SectionSyntaxIndicator: true,
+ TableID: 0,
+ TableType: PSITableTypePAT,
+}
+
+func psiSectionHeaderBytes() []byte {
+ w := astibinary.New()
+ w.Write(uint8(0)) // Table ID
+ w.Write("1") // Syntax section indicator
+ w.Write("1") // Private bit
+ w.Write("11") // Reserved
+ w.Write("101010101010") // Section length
+ return w.Bytes()
+}
+
+func TestParsePSISectionHeader(t *testing.T) {
+ // Unknown table type
+ w := astibinary.New()
+ w.Write(uint8(254)) // Table ID
+ w.Write("1") // Syntax section indicator
+ w.Write("0000000") // Finish the byte
+ var offset int
+ d, _, _, _, _ := parsePSISectionHeader(w.Bytes(), &offset)
+ assert.Equal(t, d, &PSISectionHeader{
+ TableID: 254,
+ TableType: PSITableTypeUnknown,
+ })
+
+ // Valid table type
+ offset = 0
+ d, offsetStart, offsetSectionsStart, offsetSectionsEnd, offsetEnd := parsePSISectionHeader(psiSectionHeaderBytes(), &offset)
+ assert.Equal(t, d, psiSectionHeader)
+ assert.Equal(t, 0, offsetStart)
+ assert.Equal(t, 3, offsetSectionsStart)
+ assert.Equal(t, 2729, offsetSectionsEnd)
+ assert.Equal(t, 2733, offsetEnd)
+}
+
+func TestPSITableType(t *testing.T) {
+ assert.Equal(t, PSITableTypeBAT, psiTableType(74))
+ for i := 78; i <= 111; i++ {
+ assert.Equal(t, PSITableTypeEIT, psiTableType(i))
+ }
+ assert.Equal(t, PSITableTypeDIT, psiTableType(126))
+ for i := 64; i <= 65; i++ {
+ assert.Equal(t, PSITableTypeNIT, psiTableType(i))
+ }
+ assert.Equal(t, PSITableTypeNull, psiTableType(255))
+ assert.Equal(t, PSITableTypePAT, psiTableType(0))
+ assert.Equal(t, PSITableTypePMT, psiTableType(2))
+ assert.Equal(t, PSITableTypeRST, psiTableType(113))
+ assert.Equal(t, PSITableTypeSDT, psiTableType(66))
+ assert.Equal(t, PSITableTypeSDT, psiTableType(70))
+ assert.Equal(t, PSITableTypeSIT, psiTableType(127))
+ assert.Equal(t, PSITableTypeST, psiTableType(114))
+ assert.Equal(t, PSITableTypeTDT, psiTableType(112))
+ assert.Equal(t, PSITableTypeTOT, psiTableType(115))
+}
+
+var psiSectionSyntaxHeader = &PSISectionSyntaxHeader{
+ CurrentNextIndicator: true,
+ LastSectionNumber: 3,
+ SectionNumber: 2,
+ TableIDExtension: 1,
+ VersionNumber: 21,
+}
+
+func psiSectionSyntaxHeaderBytes() []byte {
+ w := astibinary.New()
+ w.Write(uint16(1)) // Table ID extension
+ w.Write("11") // Reserved bits
+ w.Write("10101") // Version number
+ w.Write("1") // Current/next indicator
+ w.Write(uint8(2)) // Section number
+ w.Write(uint8(3)) // Last section number
+ return w.Bytes()
+}
+
+func TestParsePSISectionSyntaxHeader(t *testing.T) {
+ var offset int
+ assert.Equal(t, psiSectionSyntaxHeader, parsePSISectionSyntaxHeader(psiSectionSyntaxHeaderBytes(), &offset))
+}
+
+func TestPSIToData(t *testing.T) {
+ assert.Equal(t, []*Data{
+ {EIT: eit, PID: 2},
+ {NIT: nit, PID: 2},
+ {PAT: pat, PID: 2},
+ {PMT: pmt, PID: 2},
+ {SDT: sdt, PID: 2},
+ {TOT: tot, PID: 2},
+ }, psi.toData(uint16(2)))
+}
diff --git a/data_sdt.go b/data_sdt.go
new file mode 100644
index 0000000..6433dc1
--- /dev/null
+++ b/data_sdt.go
@@ -0,0 +1,70 @@
+package astits
+
+// Running statuses
+const (
+ RunningStatusNotRunning = 1
+ RunningStatusPausing = 3
+ RunningStatusRunning = 4
+ RunningStatusServiceOffAir = 5
+ RunningStatusStartsInAFewSeconds = 2
+ RunningStatusUndefined = 0
+)
+
+// SDTData represents an SDT data
+// Page: 33 | Chapter: 5.2.3 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type SDTData struct {
+ OriginalNetworkID uint16
+ Services []*SDTDataService
+ TransportStreamID uint16
+}
+
+// SDTDataService represents an SDT data service
+type SDTDataService struct {
+ Descriptors []*Descriptor
+ HasEITPresentFollowing bool // When true indicates that EIT present/following information for the service is present in the current TS
+ HasEITSchedule bool // When true indicates that EIT schedule information for the service is present in the current TS
+ HasFreeCSAMode bool // When true indicates that access to one or more streams may be controlled by a CA system.
+ RunningStatus uint8
+ ServiceID uint16
+}
+
+// parseSDTSection parses an SDT section
+func parseSDTSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *SDTData) {
+ // Init
+ d = &SDTData{TransportStreamID: tableIDExtension}
+
+ // Original network ID
+ d.OriginalNetworkID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // Reserved for future use
+ *offset += 1
+
+ // Loop until end of section data is reached
+ for *offset < offsetSectionsEnd {
+ // Service ID
+ var s = &SDTDataService{}
+ s.ServiceID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ *offset += 2
+
+ // EIT schedule flag
+ s.HasEITSchedule = uint8(i[*offset]&0x2) > 0
+
+ // EIT present/following flag
+ s.HasEITPresentFollowing = uint8(i[*offset]&0x1) > 0
+ *offset += 1
+
+ // Running status
+ s.RunningStatus = uint8(i[*offset]) >> 5
+
+ // Free CA mode
+ s.HasFreeCSAMode = uint8(i[*offset]&0x10) > 0
+
+ // Descriptors
+ s.Descriptors = parseDescriptors(i, offset)
+
+ // Append service
+ d.Services = append(d.Services, s)
+ }
+ return
+}
diff --git a/data_sdt_test.go b/data_sdt_test.go
new file mode 100644
index 0000000..51b8e8f
--- /dev/null
+++ b/data_sdt_test.go
@@ -0,0 +1,42 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var sdt = &SDTData{
+ OriginalNetworkID: 2,
+ Services: []*SDTDataService{{
+ Descriptors: descriptors,
+ HasEITPresentFollowing: true,
+ HasEITSchedule: true,
+ HasFreeCSAMode: true,
+ RunningStatus: 5,
+ ServiceID: 3,
+ }},
+ TransportStreamID: 1,
+}
+
+func sdtBytes() []byte {
+ w := astibinary.New()
+ w.Write(uint16(2)) // Original network ID
+ w.Write(uint8(0)) // Reserved for future use
+ w.Write(uint16(3)) // Service #1 id
+ w.Write("000000") // Service #1 reserved for future use
+ w.Write("1") // Service #1 EIT schedule flag
+ w.Write("1") // Service #1 EIT present/following flag
+ w.Write("101") // Service #1 running status
+ w.Write("1") // Service #1 free CA mode
+ descriptorsBytes(w) // Service #1 descriptors
+ return w.Bytes()
+}
+
+func TestParseSDTSection(t *testing.T) {
+ var offset int
+ var b = sdtBytes()
+ d := parseSDTSection(b, &offset, len(b), uint16(1))
+ assert.Equal(t, d, sdt)
+}
diff --git a/data_test.go b/data_test.go
new file mode 100644
index 0000000..2396e31
--- /dev/null
+++ b/data_test.go
@@ -0,0 +1,86 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestParseData(t *testing.T) {
+ // Init
+ pm := newProgramMap()
+ ps := []*Packet{}
+
+ // Custom parser
+ cds := []*Data{{PID: 1}}
+ var c = func(ps []*Packet) (o []*Data, skip bool, err error) {
+ o = cds
+ skip = true
+ return
+ }
+ ds, err := parseData(ps, c, pm)
+ assert.NoError(t, err)
+ assert.Equal(t, cds, ds)
+
+ // Do nothing for CAT
+ ps = []*Packet{{Header: &PacketHeader{PID: PIDCAT}}}
+ ds, err = parseData(ps, nil, pm)
+ assert.NoError(t, err)
+ assert.Empty(t, ds)
+
+ // PES
+ p := pesWithHeaderBytes()
+ ps = []*Packet{
+ {
+ Header: &PacketHeader{PID: uint16(256)},
+ Payload: p[:33],
+ },
+ {
+ Header: &PacketHeader{PID: uint16(256)},
+ Payload: p[33:],
+ },
+ }
+ ds, err = parseData(ps, nil, pm)
+ assert.NoError(t, err)
+ assert.Equal(t, []*Data{{PES: pesWithHeader, PID: uint16(256)}}, ds)
+
+ // PSI
+ pm.set(uint16(256), uint16(1))
+ p = psiBytes()
+ ps = []*Packet{
+ {
+ Header: &PacketHeader{PID: uint16(256)},
+ Payload: p[:33],
+ },
+ {
+ Header: &PacketHeader{PID: uint16(256)},
+ Payload: p[33:],
+ },
+ }
+ ds, err = parseData(ps, nil, pm)
+ assert.NoError(t, err)
+ assert.Equal(t, psi.toData(uint16(256)), ds)
+}
+
+func TestIsPSIPayload(t *testing.T) {
+ pm := newProgramMap()
+ var pids []int
+ for i := 0; i <= 255; i++ {
+ if isPSIPayload(uint16(i), pm) {
+ pids = append(pids, i)
+ }
+ }
+ assert.Equal(t, []int{0, 16, 17, 18, 19, 20, 30, 31}, pids)
+ pm.set(uint16(1), uint16(0))
+ assert.True(t, isPSIPayload(uint16(1), pm))
+}
+
+func TestIsPESPayload(t *testing.T) {
+ w := astibinary.New()
+ w.Write("0000000000000001")
+ assert.False(t, isPESPayload(w.Bytes()))
+ w.Reset()
+ w.Write("000000000000000000000001")
+ assert.True(t, isPESPayload(w.Bytes()))
+}
diff --git a/data_tot.go b/data_tot.go
new file mode 100644
index 0000000..97fad93
--- /dev/null
+++ b/data_tot.go
@@ -0,0 +1,23 @@
+package astits
+
+import "time"
+
+// TOTData represents a TOT data
+// Page: 39 | Chapter: 5.2.6 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type TOTData struct {
+ Descriptors []*Descriptor
+ UTCTime time.Time
+}
+
+// parseTOTSection parses a TOT section
+func parseTOTSection(i []byte, offset *int) (d *TOTData) {
+ // Init
+ d = &TOTData{}
+
+ // UTC time
+ d.UTCTime = parseDVBTime(i, offset)
+
+ // Descriptors
+ d.Descriptors = parseDescriptors(i, offset)
+ return
+}
diff --git a/data_tot_test.go b/data_tot_test.go
new file mode 100644
index 0000000..de7c8e8
--- /dev/null
+++ b/data_tot_test.go
@@ -0,0 +1,27 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var tot = &TOTData{
+ Descriptors: descriptors,
+ UTCTime: dvbTime,
+}
+
+func totBytes() []byte {
+ w := astibinary.New()
+ w.Write(dvbTimeBytes) // UTC time
+ w.Write("0000") // Reserved
+ descriptorsBytes(w) // Service #1 descriptors
+ return w.Bytes()
+}
+
+func TestParseTOTSection(t *testing.T) {
+ var offset int
+ d := parseTOTSection(totBytes(), &offset)
+ assert.Equal(t, d, tot)
+}
diff --git a/demuxer.go b/demuxer.go
new file mode 100644
index 0000000..9c8b070
--- /dev/null
+++ b/demuxer.go
@@ -0,0 +1,185 @@
+package astits
+
+import (
+ "context"
+ "fmt"
+ "io"
+
+ "github.com/pkg/errors"
+)
+
+// Sync byte
+const syncByte = '\x47'
+
+// Errors
+var (
+ ErrNoMorePackets = errors.New("astits: no more packets")
+ ErrPacketMustStartWithASyncByte = errors.New("astits: packet must start with a sync byte")
+)
+
+// Demuxer represents a demuxer
+// https://en.wikipedia.org/wiki/MPEG_transport_stream
+// http://seidl.cs.vsb.cz/download/dvb/DVB_Poster.pdf
+// http://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.13.01_40/en_300468v011301o.pdf
+type Demuxer struct {
+ ctx context.Context
+ dataBuffer []*Data
+ packetBuffer *packetBuffer
+ PacketSize int
+ PacketsParser PacketsParser
+ programMap programMap
+ r io.Reader
+}
+
+// PacketsParser represents an object capable of parsing a set of packets containing a unique payload spanning over those packets
+// Use the skip returned argument to indicate whether the default process should still be executed on the set of packets
+type PacketsParser func(ps []*Packet) (ds []*Data, skip bool, err error)
+
+// New creates a new transport stream based on a reader
+func New(ctx context.Context, r io.Reader) *Demuxer {
+ return &Demuxer{
+ ctx: ctx,
+ packetBuffer: newPacketBuffer(),
+ programMap: newProgramMap(),
+ r: r,
+ }
+}
+
+// autoDetectPacketSize updates the packet size based on the first bytes
+// Minimum packet size is 188 and is bounded by 2 sync bytes
+// Assumption is made that the first byte of the reader is a sync byte
+func (dmx *Demuxer) autoDetectPacketSize() (err error) {
+ // Read first bytes
+ const l = 193
+ var b = make([]byte, l)
+ if _, err = dmx.r.Read(b); err != nil {
+ err = errors.Wrapf(err, "astits: reading first %d bytes failed", l)
+ return
+ }
+
+ // Packet must start with a sync byte
+ if b[0] != syncByte {
+ err = ErrPacketMustStartWithASyncByte
+ return
+ }
+
+ // Look for sync bytes
+ for idx, b := range b {
+ if b == syncByte && idx >= 188 {
+ // Update packet size
+ dmx.PacketSize = idx
+
+ // Sync reader
+ var ls = dmx.PacketSize - (l - dmx.PacketSize)
+ if _, err = dmx.r.Read(make([]byte, ls)); err != nil {
+ err = errors.Wrapf(err, "astits: reading %d bytes to sync reader failed", ls)
+ return
+ }
+ return
+ }
+ }
+ err = fmt.Errorf("astits: only one sync byte detected in first %d bytes", l)
+ return
+}
+
+// NextPacket retrieves the next packet
+func (dmx *Demuxer) NextPacket() (p *Packet, err error) {
+ // Check ctx error
+ if err = dmx.ctx.Err(); err != nil {
+ return
+ }
+
+ // Auto detect packet size
+ if dmx.PacketSize == 0 {
+ // Auto detect packet size
+ if err = dmx.autoDetectPacketSize(); err != nil {
+ err = errors.Wrap(err, "astits: auto detecting packet size failed")
+ return
+ }
+
+ // Rewind if possible
+ if s, ok := dmx.r.(io.Seeker); ok {
+ if _, err = s.Seek(0, 0); err != nil {
+ err = errors.Wrap(err, "astits: seeking to 0 failed")
+ return
+ }
+ }
+ }
+
+ // Read
+ var b = make([]byte, dmx.PacketSize)
+ if _, err = io.ReadFull(dmx.r, b); err != nil {
+ if err == io.EOF || err == io.ErrUnexpectedEOF {
+ err = ErrNoMorePackets
+ } else {
+ err = errors.Wrapf(err, "astits: reading %d bytes failed", dmx.PacketSize)
+ }
+ return
+ }
+
+ // Parse packet
+ if p, err = parsePacket(b); err != nil {
+ err = errors.Wrap(err, "astits: building packet failed")
+ return
+ }
+ return
+}
+
+// NextData retrieves the next data
+func (dmx *Demuxer) NextData() (d *Data, err error) {
+ // Check data buffer
+ if len(dmx.dataBuffer) > 0 {
+ d = dmx.dataBuffer[0]
+ dmx.dataBuffer = dmx.dataBuffer[1:]
+ return
+ }
+
+ // Loop through packets
+ var p *Packet
+ var ps []*Packet
+ var ds []*Data
+ for {
+ // Get next packet
+ if p, err = dmx.NextPacket(); err != nil {
+ // If no more packets, we still need to dump the buffer
+ if ps = dmx.packetBuffer.dump(); err != ErrNoMorePackets || len(ps) == 0 {
+ if err == ErrNoMorePackets {
+ return
+ }
+ err = errors.Wrap(err, "astits: fetching next packet failed")
+ return
+ }
+ } else {
+ // Add packet to the buffer
+ if ps = dmx.packetBuffer.add(p); len(ps) == 0 {
+ continue
+ }
+ }
+
+ // Parse data
+ if ds, err = parseData(ps, dmx.PacketsParser, dmx.programMap); err != nil {
+ err = errors.Wrap(err, "astits: building new data failed")
+ return
+ }
+
+ // Check whether there is data to be processed
+ if len(ds) > 0 {
+ // Process data
+ d = ds[0]
+ dmx.dataBuffer = append(dmx.dataBuffer, ds[1:]...)
+
+ // Update program map
+ for _, v := range ds {
+ if v.PAT != nil {
+ for _, pgm := range v.PAT.Programs {
+ // Program number 0 is reserved to NIT
+ if pgm.ProgramNumber > 0 {
+ dmx.programMap.set(pgm.ProgramMapID, pgm.ProgramNumber)
+ }
+ }
+ }
+ }
+ return
+ }
+ }
+}
diff --git a/demuxer_test.go b/demuxer_test.go
new file mode 100644
index 0000000..9f649ba
--- /dev/null
+++ b/demuxer_test.go
@@ -0,0 +1,95 @@
+package astits
+
+import (
+ "bytes"
+ "context"
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDemuxerAutoDetectPacketSize(t *testing.T) {
+ // Packet should start with a sync byte
+ w := astibinary.New()
+ w.Write(uint8(2))
+ w.Write(byte(syncByte))
+ dmx := New(context.Background(), bytes.NewReader(w.Bytes()))
+ err := dmx.autoDetectPacketSize()
+ assert.EqualError(t, err, ErrPacketMustStartWithASyncByte.Error())
+
+ // Valid packet size
+ w.Reset()
+ w.Write(byte(syncByte))
+ w.Write(make([]byte, 20))
+ w.Write(byte(syncByte))
+ w.Write(make([]byte, 166))
+ w.Write(byte(syncByte))
+ w.Write(make([]byte, 187))
+ w.Write([]byte("test"))
+ r := bytes.NewReader(w.Bytes())
+ dmx = New(context.Background(), r)
+ err = dmx.autoDetectPacketSize()
+ assert.NoError(t, err)
+ assert.Equal(t, 188, dmx.PacketSize)
+ assert.Equal(t, 4, r.Len())
+}
+
+func TestDemuxerNextPacket(t *testing.T) {
+ // Ctx error
+ ctx, cancel := context.WithCancel(context.Background())
+ dmx := New(ctx, bytes.NewReader([]byte{}))
+ cancel()
+ _, err := dmx.NextPacket()
+ assert.Error(t, err)
+
+ // Valid
+ w := astibinary.New()
+ b1, p1 := packet(*packetHeader, *packetAdaptationField, []byte("1"))
+ w.Write(b1)
+ b2, p2 := packet(*packetHeader, *packetAdaptationField, []byte("2"))
+ w.Write(b2)
+ dmx = New(context.Background(), bytes.NewReader(w.Bytes()))
+
+ // First packet
+ p, err := dmx.NextPacket()
+ assert.NoError(t, err)
+ assert.Equal(t, p1, p)
+ assert.Equal(t, 192, dmx.PacketSize)
+
+ // Second packet
+ p, err = dmx.NextPacket()
+ assert.NoError(t, err)
+ assert.Equal(t, p2, p)
+
+ // EOF
+ _, err = dmx.NextPacket()
+ assert.EqualError(t, err, ErrNoMorePackets.Error())
+}
+
+func TestDemuxerNextData(t *testing.T) {
+ // Init
+ w := astibinary.New()
+ b := psiBytes()
+ b1, _ := packet(PacketHeader{ContinuityCounter: uint8(0), PayloadUnitStartIndicator: true, PID: PIDPAT}, PacketAdaptationField{}, b[:147])
+ w.Write(b1)
+ b2, _ := packet(PacketHeader{ContinuityCounter: uint8(1), PID: PIDPAT}, PacketAdaptationField{}, b[147:])
+ w.Write(b2)
+ dmx := New(context.Background(), bytes.NewReader(w.Bytes()))
+
+ // Next data
+ var ds []*Data
+ for _, s := range psi.Sections {
+ if s.Header.TableType != PSITableTypeUnknown {
+ d, err := dmx.NextData()
+ assert.NoError(t, err)
+ ds = append(ds, d)
+ }
+ }
+ assert.Equal(t, psi.toData(PIDPAT), ds)
+ assert.Equal(t, map[uint16]uint16{0x3: 0x2, 0x5: 0x4}, dmx.programMap.p)
+
+ // No more packets
+ _, err := dmx.NextData()
+ assert.EqualError(t, err, ErrNoMorePackets.Error())
+}
diff --git a/descriptor.go b/descriptor.go
new file mode 100644
index 0000000..7bdeed0
--- /dev/null
+++ b/descriptor.go
@@ -0,0 +1,300 @@
+package astits
+
+import "github.com/asticode/go-astilog"
+
+// Audio types
+// Page: 683 | https://books.google.fr/books?id=6dgWB3-rChYC&printsec=frontcover&hl=fr
+const (
+ AudioTypeCleanEffects = 0x1
+ AudioTypeHearingImpaired = 0x2
+ AudioTypeVisualImpairedCommentary = 0x3
+)
+
+// Descriptor tags
+// Page: 42 | Chapter: 6.1 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+const (
+ DescriptorTagAC3 = 0x6a
+ DescriptorTagISO639LanguageAndAudioType = 0xa
+ DescriptorTagMaximumBitrate = 0xe
+ DescriptorTagNetworkName = 0x40
+ DescriptorTagService = 0x48
+ DescriptorTagShortEvent = 0x4d
+ DescriptorTagStreamIdentifier = 0x52
+ DescriptorTagSubtitling = 0x59
+ DescriptorTagTeletext = 0x56
+)
+
+// Service types
+// Page: 97 | Chapter: 6.2.33 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+// https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf / page 97
+const (
+ ServiceTypeDigitalTelevisionService = 0x1
+)
+
+// Teletext types
+// Page: 106 | Chapter: 6.2.43 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+const (
+ TeletextTypeAdditionalInformationPage = 0x3
+ TeletextTypeInitialTeletextPage = 0x1
+ TeletextTypeProgramSchedulePage = 0x4
+ TeletextTypeTeletextSubtitlePage = 0x2
+ TeletextTypeTeletextSubtitlePageForHearingImpairedPeople = 0x5
+)
+
+// Descriptor represents a descriptor
+type Descriptor struct {
+ AC3 *DescriptorAC3
+ ISO639LanguageAndAudioType *DescriptorISO639LanguageAndAudioType
+ Length uint8
+ MaximumBitrate *DescriptorMaximumBitrate
+ NetworkName *DescriptorNetworkName
+ Service *DescriptorService
+ ShortEvent *DescriptorShortEvent
+ StreamIdentifier *DescriptorStreamIdentifier
+ Subtitling *DescriptorSubtitling
+ Tag uint8 // the tag defines the structure of the contained data following the descriptor length.
+ Teletext *DescriptorTeletext
+}
+
+// DescriptorAC3 represents an AC3 descriptor
+// Page: 165 | https://books.google.fr/books?id=6dgWB3-rChYC&printsec=frontcover&hl=fr
+type DescriptorAC3 struct {
+ AdditionalInfo []byte
+ ASVC uint8
+ BSID uint8
+ ComponentType uint8
+ HasASVC bool
+ HasBSID bool
+ HasComponentType bool
+ HasMainID bool
+ MainID uint8
+}
+
+func newDescriptorAC3(i []byte) (d *DescriptorAC3) {
+ var offset int
+ d = &DescriptorAC3{}
+ d.HasComponentType = uint8(i[offset]&0x80) > 0
+ d.HasBSID = uint8(i[offset]&0x40) > 0
+ d.HasMainID = uint8(i[offset]&0x20) > 0
+ d.HasASVC = uint8(i[offset]&0x10) > 0
+ offset += 1
+ if d.HasComponentType {
+ d.ComponentType = uint8(i[offset])
+ offset += 1
+ }
+ if d.HasBSID {
+ d.BSID = uint8(i[offset])
+ offset += 1
+ }
+ if d.HasMainID {
+ d.MainID = uint8(i[offset])
+ offset += 1
+ }
+ if d.HasASVC {
+ d.ASVC = uint8(i[offset])
+ offset += 1
+ }
+ for offset < len(i) {
+ d.AdditionalInfo = append(d.AdditionalInfo, i[offset])
+ offset += 1
+ }
+ return
+}
+
+// DescriptorISO639LanguageAndAudioType represents an ISO639 language descriptor
+type DescriptorISO639LanguageAndAudioType struct {
+ Language []byte
+ Type uint8
+}
+
+func newDescriptorISO639LanguageAndAudioType(i []byte) *DescriptorISO639LanguageAndAudioType {
+ return &DescriptorISO639LanguageAndAudioType{
+ Language: i[0:3],
+ Type: uint8(i[3]),
+ }
+}
+
+// DescriptorMaximumBitrate represents a maximum bitrate descriptor
+type DescriptorMaximumBitrate struct {
+ Bitrate uint32 // In bytes/second
+}
+
+func newDescriptorMaximumBitrate(i []byte) *DescriptorMaximumBitrate {
+ return &DescriptorMaximumBitrate{Bitrate: (uint32(i[0]&0x3f)<<16 | uint32(i[1])<<8 | uint32(i[2])) * 50}
+}
+
+// DescriptorNetworkName represents a network name descriptor
+// Page: 93 | Chapter: 6.2.27 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type DescriptorNetworkName struct{ Name []byte }
+
+func newDescriptorNetworkName(i []byte) *DescriptorNetworkName {
+ return &DescriptorNetworkName{Name: i}
+}
+
+// DescriptorService represents a service descriptor
+// Page: 96 | Chapter: 6.2.33 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type DescriptorService struct {
+ Name []byte
+ Provider []byte
+ Type uint8
+}
+
+func newDescriptorService(i []byte) (d *DescriptorService) {
+ var offset int
+ d = &DescriptorService{Type: uint8(i[offset])}
+ offset += 1
+ var providerLength = int(i[offset])
+ offset += 1
+ d.Provider = i[offset : offset+providerLength]
+ offset += providerLength
+ var nameLength = int(i[offset])
+ offset += 1
+ d.Name = i[offset : offset+nameLength]
+ return
+}
+
+// DescriptorShortEvent represents a short event descriptor
+// Page: 99 | Chapter: 6.2.37 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type DescriptorShortEvent struct {
+ EventName []byte
+ Language []byte
+ Text []byte
+}
+
+func newDescriptorShortEvent(i []byte) (d *DescriptorShortEvent) {
+ var offset int
+ d = &DescriptorShortEvent{}
+ d.Language = i[:3]
+ offset += 3
+ var length = int(i[offset])
+ offset += 1
+ d.EventName = i[offset : offset+length]
+ offset += length
+ length = int(i[offset])
+ offset += 1
+ d.Text = i[offset : offset+length]
+ return
+}
+
+// DescriptorStreamIdentifier represents a stream identifier descriptor
+// Page: 102 | Chapter: 6.2.39 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type DescriptorStreamIdentifier struct{ ComponentTag uint8 }
+
+func newDescriptorStreamIdentifier(i []byte) *DescriptorStreamIdentifier {
+ return &DescriptorStreamIdentifier{ComponentTag: uint8(i[0])}
+}
+
+// DescriptorSubtitling represents a subtitling descriptor
+// Page: 103 | Chapter: 6.2.41 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type DescriptorSubtitling struct {
+ Items []*DescriptorSubtitlingItem
+}
+
+// DescriptorSubtitlingItem represents subtitling descriptor item
+type DescriptorSubtitlingItem struct {
+ AncillaryPageID uint16
+ CompositionPageID uint16
+ Language []byte
+ Type uint8
+}
+
+func newDescriptorSubtitling(i []byte) (d *DescriptorSubtitling) {
+ d = &DescriptorSubtitling{}
+ var offset int
+ for offset < len(i) {
+ itm := &DescriptorSubtitlingItem{}
+ itm.Language = i[offset : offset+3]
+ offset += 3
+ itm.Type = uint8(i[offset])
+ offset += 1
+ itm.CompositionPageID = uint16(i[offset])<<8 | uint16(i[offset+1])
+ offset += 2
+ itm.AncillaryPageID = uint16(i[offset])<<8 | uint16(i[offset+1])
+ offset += 2
+ d.Items = append(d.Items, itm)
+ }
+ return
+}
+
+// DescriptorTeletext represents a teletext descriptor
+// Page: 105 | Chapter: 6.2.43 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+type DescriptorTeletext struct {
+ Items []*DescriptorTeletextItem
+}
+
+// DescriptorTeletextItem represents a teletext descriptor item
+type DescriptorTeletextItem struct {
+ Language []byte
+ Magazine uint8
+ Page uint8
+ Type uint8
+}
+
+func newDescriptorTeletext(i []byte) (d *DescriptorTeletext) {
+ var offset int
+ d = &DescriptorTeletext{}
+ for offset < len(i) {
+ itm := &DescriptorTeletextItem{}
+ itm.Language = i[offset : offset+3]
+ offset += 3
+ itm.Type = uint8(i[offset]) >> 3
+ itm.Magazine = uint8(i[offset] & 0x7)
+ offset += 1
+ itm.Page = uint8(i[offset])>>4*10 + uint8(i[offset]&0xf)
+ offset += 1
+ d.Items = append(d.Items, itm)
+ }
+ return
+}
+
+// parseDescriptors parses descriptors
+func parseDescriptors(i []byte, offset *int) (o []*Descriptor) {
+ // Get length
+ var length = int(uint16(i[*offset]&0xf)<<8 | uint16(i[*offset+1]))
+ *offset += 2
+
+ // Loop
+ if length > 0 {
+ length += *offset
+ for *offset < length {
+ // Init
+ var d = &Descriptor{
+ Length: uint8(i[*offset+1]),
+ Tag: uint8(i[*offset]),
+ }
+ *offset += 2
+
+ // Parse data
+ if d.Length > 0 {
+ // Switch on tag
+ var b = i[*offset : *offset+int(d.Length)]
+ switch d.Tag {
+ case DescriptorTagAC3:
+ d.AC3 = newDescriptorAC3(b)
+ case DescriptorTagISO639LanguageAndAudioType:
+ d.ISO639LanguageAndAudioType = newDescriptorISO639LanguageAndAudioType(b)
+ case DescriptorTagMaximumBitrate:
+ d.MaximumBitrate = newDescriptorMaximumBitrate(b)
+ case DescriptorTagNetworkName:
+ d.NetworkName = newDescriptorNetworkName(b)
+ case DescriptorTagService:
+ d.Service = newDescriptorService(b)
+ case DescriptorTagShortEvent:
+ d.ShortEvent = newDescriptorShortEvent(b)
+ case DescriptorTagStreamIdentifier:
+ d.StreamIdentifier = newDescriptorStreamIdentifier(b)
+ case DescriptorTagSubtitling:
+ d.Subtitling = newDescriptorSubtitling(b)
+ case DescriptorTagTeletext:
+ d.Teletext = newDescriptorTeletext(b)
+ default:
+ // TODO Remove this log
+ astilog.Debugf("unlisted descriptor tag %d", d.Tag)
+ }
+ *offset += int(d.Length)
+ }
+ o = append(o, d)
+ }
+ }
+ return
+}
diff --git a/descriptor_test.go b/descriptor_test.go
new file mode 100644
index 0000000..14ca6f5
--- /dev/null
+++ b/descriptor_test.go
@@ -0,0 +1,155 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+var descriptors = []*Descriptor{{
+ Length: 0x1,
+ StreamIdentifier: &DescriptorStreamIdentifier{ComponentTag: 0x7},
+ Tag: DescriptorTagStreamIdentifier,
+}}
+
+func descriptorsBytes(w *astibinary.Writer) {
+ w.Write("000000000011") // Overall length
+ w.Write(uint8(DescriptorTagStreamIdentifier)) // Tag
+ w.Write(uint8(1)) // Length
+ w.Write(uint8(7)) // Component tag
+}
+
+func TestParseDescriptor(t *testing.T) {
+ // Init
+ w := astibinary.New()
+ w.Write(uint16(97)) // Descriptors length
+ // AC3
+ w.Write(uint8(DescriptorTagAC3)) // Tag
+ w.Write(uint8(9)) // Length
+ w.Write("1") // Component type flag
+ w.Write("1") // BSID flag
+ w.Write("1") // MainID flag
+ w.Write("1") // ASVC flag
+ w.Write("0000") // Reserved flags
+ w.Write(uint8(1)) // Component type
+ w.Write(uint8(2)) // BSID
+ w.Write(uint8(3)) // MainID
+ w.Write(uint8(4)) // ASVC
+ w.Write([]byte("info")) // Additional info
+ // ISO639 language and audio type
+ w.Write(uint8(DescriptorTagISO639LanguageAndAudioType)) // Tag
+ w.Write(uint8(4)) // Length
+ w.Write([]byte("eng")) // Language
+ w.Write(uint8(AudioTypeCleanEffects)) // Audio type
+ // Maximum bitrate
+ w.Write(uint8(DescriptorTagMaximumBitrate)) // Tag
+ w.Write(uint8(3)) // Length
+ w.Write("000000000000000000000001") // Maximum bitrate
+ // Network name
+ w.Write(uint8(DescriptorTagNetworkName)) // Tag
+ w.Write(uint8(4)) // Length
+ w.Write([]byte("name")) // Name
+ // Service
+ w.Write(uint8(DescriptorTagService)) // Tag
+ w.Write(uint8(18)) // Length
+ w.Write(uint8(ServiceTypeDigitalTelevisionService)) // Type
+ w.Write(uint8(8)) // Provider name length
+ w.Write([]byte("provider")) // Provider name
+ w.Write(uint8(7)) // Service name length
+ w.Write([]byte("service")) // Service name
+ // Short event
+ w.Write(uint8(DescriptorTagShortEvent)) // Tag
+ w.Write(uint8(14)) // Length
+ w.Write([]byte("eng")) // Language code
+ w.Write(uint8(5)) // Event name length
+ w.Write([]byte("event")) // Event name
+ w.Write(uint8(4)) // Text length
+ w.Write([]byte("text"))
+ // Stream identifier
+ w.Write(uint8(DescriptorTagStreamIdentifier)) // Tag
+ w.Write(uint8(1)) // Length
+ w.Write(uint8(2)) // Component tag
+ // Subtitling
+ w.Write(uint8(DescriptorTagSubtitling)) // Tag
+ w.Write(uint8(16)) // Length
+ w.Write([]byte("lg1")) // Item #1 language
+ w.Write(uint8(1)) // Item #1 type
+ w.Write(uint16(2)) // Item #1 composition page
+ w.Write(uint16(3)) // Item #1 ancillary page
+ w.Write([]byte("lg2")) // Item #2 language
+ w.Write(uint8(4)) // Item #2 type
+ w.Write(uint16(5)) // Item #2 composition page
+ w.Write(uint16(6)) // Item #2 ancillary page
+ // Teletext
+ w.Write(uint8(DescriptorTagTeletext)) // Tag
+ w.Write(uint8(10)) // Length
+ w.Write([]byte("lg1")) // Item #1 language
+ w.Write("00001") // Item #1 type
+ w.Write("010") // Item #1 magazine
+ w.Write("00010010") // Item #1 page number
+ w.Write([]byte("lg2")) // Item #2 language
+ w.Write("00011") // Item #2 type
+ w.Write("100") // Item #2 magazine
+ w.Write("00100011") // Item #2 page number
+
+ // Assert
+ var offset int
+ ds := parseDescriptors(w.Bytes(), &offset)
+ assert.Equal(t, *ds[0].AC3, DescriptorAC3{
+ AdditionalInfo: []byte("info"),
+ ASVC: uint8(4),
+ BSID: uint8(2),
+ ComponentType: uint8(1),
+ HasASVC: true,
+ HasBSID: true,
+ HasComponentType: true,
+ HasMainID: true,
+ MainID: uint8(3),
+ })
+ assert.Equal(t, *ds[1].ISO639LanguageAndAudioType, DescriptorISO639LanguageAndAudioType{
+ Language: []byte("eng"),
+ Type: AudioTypeCleanEffects,
+ })
+ assert.Equal(t, *ds[2].MaximumBitrate, DescriptorMaximumBitrate{Bitrate: uint32(50)})
+ assert.Equal(t, *ds[3].NetworkName, DescriptorNetworkName{Name: []byte("name")})
+ assert.Equal(t, *ds[4].Service, DescriptorService{
+ Name: []byte("service"),
+ Provider: []byte("provider"),
+ Type: ServiceTypeDigitalTelevisionService,
+ })
+ assert.Equal(t, *ds[5].ShortEvent, DescriptorShortEvent{
+ EventName: []byte("event"),
+ Language: []byte("eng"),
+ Text: []byte("text"),
+ })
+ assert.Equal(t, *ds[6].StreamIdentifier, DescriptorStreamIdentifier{ComponentTag: 0x2})
+ assert.Equal(t, *ds[7].Subtitling, DescriptorSubtitling{Items: []*DescriptorSubtitlingItem{
+ {
+ AncillaryPageID: 3,
+ CompositionPageID: 2,
+ Language: []byte("lg1"),
+ Type: 1,
+ },
+ {
+ AncillaryPageID: 6,
+ CompositionPageID: 5,
+ Language: []byte("lg2"),
+ Type: 4,
+ },
+ }})
+ assert.Equal(t, *ds[8].Teletext, DescriptorTeletext{Items: []*DescriptorTeletextItem{
+ {
+ Language: []byte("lg1"),
+ Magazine: uint8(2),
+ Page: uint8(12),
+ Type: uint8(1),
+ },
+ {
+ Language: []byte("lg2"),
+ Magazine: uint8(4),
+ Page: uint8(23),
+ Type: uint8(3),
+ },
+ }})
+}
diff --git a/dvb.go b/dvb.go
new file mode 100644
index 0000000..b0b45c9
--- /dev/null
+++ b/dvb.go
@@ -0,0 +1,45 @@
+package astits
+
+import (
+ "fmt"
+ "time"
+)
+
+// parseDVBTime parses a DVB time
+// This field is coded as 16 bits giving the 16 LSBs of MJD followed by 24 bits coded as 6 digits in 4 - bit Binary
+// Coded Decimal (BCD). If the start time is undefined (e.g. for an event in a NVOD reference service) all bits of the
+// field are set to "1".
+// I apologize for the computation which is really messy but details are given in the documentation
+// Page: 160 | Annex C | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
+func parseDVBTime(i []byte, offset *int) (t time.Time) {
+ // Date
+ var mjd = uint16(i[*offset])<<8 | uint16(i[*offset+1])
+ var yt = int((float64(mjd) - 15078.2) / 365.25)
+ var mt = int((float64(mjd) - 14956.1 - float64(int(float64(yt)*365.25))) / 30.6001)
+ var d = int(float64(mjd) - 14956 - float64(int(float64(yt)*365.25)) - float64(int(float64(mt)*30.6001)))
+ var k int
+ if mt == 14 || mt == 15 {
+ k = 1
+ }
+ var y = yt + k
+ var m = mt - 1 - k*12
+ t, _ = time.Parse("06-01-02", fmt.Sprintf("%d-%d-%d", y, m, d))
+ *offset += 2
+
+ // Time
+ t = t.Add(parseDVBDuration(i, offset))
+ return
+}
+
+// parseDVBDuration parses a duration
+// 24 bit field containing the duration of the event in hours, minutes, seconds. format: 6 digits, 4 - bit BCD = 24 bit
+func parseDVBDuration(i []byte, offset *int) (d time.Duration) {
+ d = parseDVBDurationByte(i[*offset])*time.Hour + parseDVBDurationByte(i[*offset+1])*time.Minute + parseDVBDurationByte(i[*offset+2])*time.Second
+ *offset += 3
+ return
+}
+
+// parseDVBDurationByte parses a duration byte
+func parseDVBDurationByte(i byte) time.Duration {
+ return time.Duration(uint8(i)>>4*10 + uint8(i)&0xf)
+}
diff --git a/dvb_test.go b/dvb_test.go
new file mode 100644
index 0000000..b86abe9
--- /dev/null
+++ b/dvb_test.go
@@ -0,0 +1,29 @@
+package astits
+
+import (
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+var (
+ dvbDuration = time.Hour + 45*time.Minute + 30*time.Second
+ dvbDurationBytes = []byte{0x1, 0x45, 0x30} // 014530
+ dvbTime, _ = time.Parse("2006-01-02 15:04:05", "1993-10-13 12:45:00")
+ dvbTimeBytes = []byte{0xc0, 0x79, 0x12, 0x45, 0x0} // C079124500
+)
+
+func TestParseDVBTime(t *testing.T) {
+ var offset int
+ d := parseDVBTime(dvbTimeBytes, &offset)
+ assert.Equal(t, dvbTime, d)
+ assert.Equal(t, 5, offset)
+}
+
+func TestParseDVBDuration(t *testing.T) {
+ var offset int
+ d := parseDVBDuration(dvbDurationBytes, &offset)
+ assert.Equal(t, dvbDuration, d)
+ assert.Equal(t, 3, offset)
+}
diff --git a/packet.go b/packet.go
new file mode 100644
index 0000000..7e6b2e1
--- /dev/null
+++ b/packet.go
@@ -0,0 +1,207 @@
+package astits
+
+// Scrambling Controls
+const (
+ ScramblingControlNotScrambled = 0
+ ScramblingControlReservedForFutureUse = 1
+ ScramblingControlScrambledWithEvenKey = 2
+ ScramblingControlScrambledWithOddKey = 3
+)
+
+// Packet represents a packet
+// https://en.wikipedia.org/wiki/MPEG_transport_stream
+type Packet struct {
+ AdaptationField *PacketAdaptationField
+ Bytes []byte // This is the whole packet content
+ Header *PacketHeader
+ Payload []byte // This is only the payload content
+}
+
+// PacketHeader represents a packet header
+type PacketHeader struct {
+ ContinuityCounter uint8 // Sequence number of payload packets (0x00 to 0x0F) within each stream (except PID 8191)
+ HasAdaptationField bool
+ HasPayload bool
+ PayloadUnitStartIndicator bool // Set when a PES, PSI, or DVB-MIP packet begins immediately following the header.
+ PID uint16 // Packet Identifier, describing the payload data.
+ TransportErrorIndicator bool // Set when a demodulator can't correct errors from FEC data; indicating the packet is corrupt.
+ TransportPriority bool // Set when the current packet has a higher priority than other packets with the same PID.
+ TransportScramblingControl uint8
+}
+
+// PacketAdaptationField represents a packet adaptation field
+type PacketAdaptationField struct {
+ AdaptationExtensionField *PacketAdaptationExtensionField
+ DiscontinuityIndicator bool // Set if current TS packet is in a discontinuity state with respect to either the continuity counter or the program clock reference
+ ElementaryStreamPriorityIndicator bool // Set when this stream should be considered "high priority"
+ HasAdaptationExtensionField bool
+ HasOPCR bool
+ HasPCR bool
+ HasTransportPrivateData bool
+ HasSplicingCountdown bool
+ Length int
+ OPCR *ClockReference // Original Program clock reference. Helps when one TS is copied into another
+ PCR *ClockReference // Program clock reference
+ RandomAccessIndicator bool // Set when the stream may be decoded without errors from this point
+ SpliceCountdown int // Indicates how many TS packets from this one a splicing point occurs (Two's complement signed; may be negative)
+ TransportPrivateDataLength int
+ TransportPrivateData []byte
+}
+
+// PacketAdaptationExtensionField represents a packet adaptation extension field
+type PacketAdaptationExtensionField struct {
+ DTSNextAccessUnit *ClockReference // The PES DTS of the splice point. Split up as 3 bits, 1 marker bit (0x1), 15 bits, 1 marker bit, 15 bits, and 1 marker bit, for 33 data bits total.
+ HasLegalTimeWindow bool
+ HasPiecewiseRate bool
+ HasSeamlessSplice bool
+ LegalTimeWindowIsValid bool
+ LegalTimeWindowOffset uint16 // Extra information for rebroadcasters to determine the state of buffers when packets may be missing.
+ Length int
+ PiecewiseRate uint32 // The rate of the stream, measured in 188-byte packets, to define the end-time of the LTW.
+ SpliceType uint8 // Indicates the parameters of the H.262 splice.
+}
+
+// parsePacket parses a packet
+func parsePacket(i []byte) (p *Packet, err error) {
+ // Packet must start with a sync byte
+ if i[0] != syncByte {
+ err = ErrPacketMustStartWithASyncByte
+ return
+ }
+
+ // Init
+ p = &Packet{Bytes: i}
+
+ // In case packet size is bigger than 188 bytes, we don't care for the first bytes
+ i = i[len(i)-188+1:]
+
+ // Parse header
+ p.Header = parsePacketHeader(i)
+
+ // Parse adaptation field
+ if p.Header.HasAdaptationField {
+ p.AdaptationField = parsePacketAdaptationField(i[3:])
+ }
+
+ // Build payload
+ if p.Header.HasPayload {
+ p.Payload = i[payloadOffset(p.Header, p.AdaptationField):]
+ }
+ return
+}
+
+// payloadOffset returns the payload offset
+func payloadOffset(h *PacketHeader, a *PacketAdaptationField) (offset int) {
+ offset = 3
+ if h.HasAdaptationField {
+ offset += 1 + a.Length
+ }
+ return
+}
+
+// parsePacketHeader parses the packet header
+func parsePacketHeader(i []byte) *PacketHeader {
+ return &PacketHeader{
+ ContinuityCounter: uint8(i[2] & 0xf),
+ HasAdaptationField: i[2]&0x20 > 0,
+ HasPayload: i[2]&0x10 > 0,
+ PayloadUnitStartIndicator: i[0]&0x40 > 0,
+ PID: uint16(i[0]&0x1f)<<8 | uint16(i[1]),
+ TransportErrorIndicator: i[0]&0x80 > 0,
+ TransportPriority: i[0]&0x20 > 0,
+ TransportScramblingControl: uint8(i[2]) >> 6 & 0x3,
+ }
+}
+
+// parsePacketAdaptationField parses the packet adaptation field
+func parsePacketAdaptationField(i []byte) (a *PacketAdaptationField) {
+ // Init
+ a = &PacketAdaptationField{}
+ var offset int
+
+ // Length
+ a.Length = int(i[offset])
+ offset += 1
+
+ // Valid length
+ if a.Length > 0 {
+ // Flags
+ a.DiscontinuityIndicator = i[offset]&0x80 > 0
+ a.RandomAccessIndicator = i[offset]&0x40 > 0
+ a.ElementaryStreamPriorityIndicator = i[offset]&0x20 > 0
+ a.HasPCR = i[offset]&0x10 > 0
+ a.HasOPCR = i[offset]&0x08 > 0
+ a.HasSplicingCountdown = i[offset]&0x04 > 0
+ a.HasTransportPrivateData = i[offset]&0x02 > 0
+ a.HasAdaptationExtensionField = i[offset]&0x01 > 0
+ offset += 1
+
+ // PCR
+ if a.HasPCR {
+ a.PCR = parsePCR(i[offset:])
+ offset += 6
+ }
+
+ // OPCR
+ if a.HasOPCR {
+ a.OPCR = parsePCR(i[offset:])
+ offset += 6
+ }
+
+ // Splicing countdown
+ if a.HasSplicingCountdown {
+ a.SpliceCountdown = int(i[offset])
+ offset += 1
+ }
+
+ // Transport private data
+ if a.HasTransportPrivateData {
+ a.TransportPrivateDataLength = int(i[offset])
+ offset += 1
+ if a.TransportPrivateDataLength > 0 {
+ a.TransportPrivateData = i[offset : offset+a.TransportPrivateDataLength]
+ offset += a.TransportPrivateDataLength
+ }
+ }
+
+ // Adaptation extension
+ if a.HasAdaptationExtensionField {
+ a.AdaptationExtensionField = &PacketAdaptationExtensionField{Length: int(i[offset])}
+ offset += 1
+ if a.AdaptationExtensionField.Length > 0 {
+ // Basic
+ a.AdaptationExtensionField.HasLegalTimeWindow = i[offset]&0x80 > 0
+ a.AdaptationExtensionField.HasPiecewiseRate = i[offset]&0x40 > 0
+ a.AdaptationExtensionField.HasSeamlessSplice = i[offset]&0x20 > 0
+ offset += 1
+
+ // Legal time window
+ if a.AdaptationExtensionField.HasLegalTimeWindow {
+ a.AdaptationExtensionField.LegalTimeWindowIsValid = i[offset]&0x80 > 0
+ a.AdaptationExtensionField.LegalTimeWindowOffset = uint16(i[offset]&0x7f)<<8 | uint16(i[offset+1])
+ offset += 2
+ }
+
+ // Piecewise rate
+ if a.AdaptationExtensionField.HasPiecewiseRate {
+ a.AdaptationExtensionField.PiecewiseRate = uint32(i[offset]&0x3f)<<16 | uint32(i[offset+1])<<8 | uint32(i[offset+2])
+ offset += 3
+ }
+
+ // Seamless splice
+ if a.AdaptationExtensionField.HasSeamlessSplice {
+ a.AdaptationExtensionField.SpliceType = uint8(i[offset]&0xf0) >> 4
+ a.AdaptationExtensionField.DTSNextAccessUnit = parsePTSOrDTS(i[offset:])
+ }
+ }
+ }
+ }
+ return
+}
+
+// parsePCR parses a Program Clock Reference
+// Program clock reference, stored as 33 bits base, 6 bits reserved, 9 bits extension.
+func parsePCR(i []byte) *ClockReference {
+ var pcr = uint64(i[0])<<40 | uint64(i[1])<<32 | uint64(i[2])<<24 | uint64(i[3])<<16 | uint64(i[4])<<8 | uint64(i[5])
+ return newClockReference(int(pcr>>15), int(pcr&0x1ff))
+}
diff --git a/packet_buffer.go b/packet_buffer.go
new file mode 100644
index 0000000..6d33b18
--- /dev/null
+++ b/packet_buffer.go
@@ -0,0 +1,75 @@
+package astits
+
+import (
+ "sort"
+ "sync"
+)
+
+// packetBuffer represents a buffer of packets
+type packetBuffer struct {
+ b map[uint16][]*Packet // Indexed by PID
+ m *sync.Mutex
+}
+
+// newPacketBuffer creates a new packet buffer
+func newPacketBuffer() *packetBuffer {
+ return &packetBuffer{
+ b: make(map[uint16][]*Packet),
+ m: &sync.Mutex{},
+ }
+}
+
+// add adds a new packet to the buffer
+func (b *packetBuffer) add(p *Packet) (ps []*Packet) {
+ // Lock
+ b.m.Lock()
+ defer b.m.Unlock()
+
+ // Init buffer or empty buffer if discontinuity
+ if _, ok := b.b[p.Header.PID]; !ok || hasDiscontinuity(b.b[p.Header.PID], p) {
+ b.b[p.Header.PID] = []*Packet{}
+ }
+
+ // Add packet
+ if len(b.b[p.Header.PID]) > 0 || (len(b.b[p.Header.PID]) == 0 && p.Header.PayloadUnitStartIndicator) {
+ b.b[p.Header.PID] = append(b.b[p.Header.PID], p)
+ }
+
+ // Check payload unit start indicator
+ if p.Header.PayloadUnitStartIndicator {
+ // This is the first packet in the buffer
+ if len(b.b[p.Header.PID]) == 1 {
+ return
+ }
+
+ // Extract the set of packets
+ ps = b.b[p.Header.PID][:len(b.b[p.Header.PID])-1]
+ b.b[p.Header.PID] = []*Packet{p}
+ }
+ return
+}
+
+// dump dumps the packet buffer by looking for the first item with packets inside
+func (b *packetBuffer) dump() (ps []*Packet) {
+ b.m.Lock()
+ defer b.m.Unlock()
+ var keys []int
+ for k := range b.b {
+ keys = append(keys, int(k))
+ }
+ sort.Ints(keys)
+ for _, k := range keys {
+ ps = b.b[uint16(k)]
+ delete(b.b, uint16(k))
+ if len(ps) > 0 {
+ return
+ }
+ }
+ return
+}
+
+// hasDiscontinuity checks whether a packet is discontinuous with a set of packets
+func hasDiscontinuity(ps []*Packet, p *Packet) bool {
+ return (p.Header.HasAdaptationField && p.AdaptationField.DiscontinuityIndicator) ||
+ (len(ps) > 0 && p.Header.ContinuityCounter != (ps[len(ps)-1].Header.ContinuityCounter+1)%16)
+}
diff --git a/packet_buffer_test.go b/packet_buffer_test.go
new file mode 100644
index 0000000..e8ac391
--- /dev/null
+++ b/packet_buffer_test.go
@@ -0,0 +1,41 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHasDiscontinuity(t *testing.T) {
+ assert.False(t, hasDiscontinuity([]*Packet{{Header: &PacketHeader{ContinuityCounter: 15}}}, &Packet{Header: &PacketHeader{ContinuityCounter: 0}}))
+ assert.True(t, hasDiscontinuity([]*Packet{{Header: &PacketHeader{ContinuityCounter: 15}}}, &Packet{AdaptationField: &PacketAdaptationField{DiscontinuityIndicator: true}, Header: &PacketHeader{ContinuityCounter: 0, HasAdaptationField: true}}))
+ assert.True(t, hasDiscontinuity([]*Packet{{Header: &PacketHeader{ContinuityCounter: 15}}}, &Packet{Header: &PacketHeader{ContinuityCounter: 1}}))
+}
+
+func TestPacketBuffer(t *testing.T) {
+ b := newPacketBuffer()
+ ps := b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 0, PID: 1}})
+ assert.Len(t, ps, 0)
+ ps = b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 1, PayloadUnitStartIndicator: true, PID: 1}})
+ assert.Len(t, ps, 0)
+ ps = b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 1, PayloadUnitStartIndicator: true, PID: 2}})
+ assert.Len(t, ps, 0)
+ ps = b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 2, PID: 1}})
+ assert.Len(t, ps, 0)
+ ps = b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 3, PayloadUnitStartIndicator: true, PID: 1}})
+ assert.Len(t, ps, 2)
+ ps = b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 5, PID: 1}})
+ assert.Len(t, ps, 0)
+ ps = b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 6, PayloadUnitStartIndicator: true, PID: 1}})
+ assert.Len(t, ps, 0)
+ ps = b.add(&Packet{Header: &PacketHeader{ContinuityCounter: 7, PID: 1}})
+ assert.Len(t, ps, 0)
+ ps = b.dump()
+ assert.Len(t, ps, 2)
+ assert.Equal(t, uint16(1), ps[0].Header.PID)
+ ps = b.dump()
+ assert.Len(t, ps, 1)
+ assert.Equal(t, uint16(2), ps[0].Header.PID)
+ ps = b.dump()
+ assert.Len(t, ps, 0)
+}
diff --git a/packet_test.go b/packet_test.go
new file mode 100644
index 0000000..a88d62f
--- /dev/null
+++ b/packet_test.go
@@ -0,0 +1,150 @@
+package astits
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/asticode/go-astitools/binary"
+ "github.com/stretchr/testify/assert"
+)
+
+func packet(h PacketHeader, a PacketAdaptationField, i []byte) ([]byte, *Packet) {
+ w := astibinary.New()
+ w.Write(uint8(syncByte)) // Sync byte
+ w.Write([]byte("test")) // Sometimes packets are 192 bytes
+ w.Write(packetHeaderBytes(h)) // Header
+ w.Write(packetAdaptationFieldBytes(a)) // Adaptation field
+ var payload = append(i, make([]byte, 147-len(i))...) // Payload
+ w.Write(payload)
+ return w.Bytes(), &Packet{
+ AdaptationField: packetAdaptationField,
+ Bytes: w.Bytes(),
+ Header: packetHeader,
+ Payload: payload,
+ }
+}
+
+func TestParsePacket(t *testing.T) {
+ // Packet not starting with a sync
+ w := astibinary.New()
+ w.Write(uint16(1)) // Invalid sync byte
+ _, err := parsePacket(w.Bytes())
+ assert.EqualError(t, err, ErrPacketMustStartWithASyncByte.Error())
+
+ // Valid
+ b, ep := packet(*packetHeader, *packetAdaptationField, []byte("payload"))
+ p, err := parsePacket(b)
+ assert.NoError(t, err)
+ assert.Equal(t, p, ep)
+}
+
+func TestPayloadOffset(t *testing.T) {
+ assert.Equal(t, 3, payloadOffset(&PacketHeader{}, nil))
+ assert.Equal(t, 6, payloadOffset(&PacketHeader{HasAdaptationField: true}, &PacketAdaptationField{Length: 2}))
+}
+
+var packetHeader = &PacketHeader{
+ ContinuityCounter: 10,
+ HasAdaptationField: true,
+ HasPayload: true,
+ PayloadUnitStartIndicator: true,
+ PID: 5461,
+ TransportErrorIndicator: true,
+ TransportPriority: true,
+ TransportScramblingControl: ScramblingControlScrambledWithEvenKey,
+}
+
+func packetHeaderBytes(h PacketHeader) []byte {
+ w := astibinary.New()
+ w.Write("1") // Transport error indicator
+ w.Write(h.PayloadUnitStartIndicator) // Payload unit start indicator
+ w.Write("1") // Transport priority
+ w.Write(fmt.Sprintf("%.13b", h.PID)) // PID
+ w.Write("10") // Scrambling control
+ w.Write("11") // Adaptation field control
+ w.Write(fmt.Sprintf("%.4b", h.ContinuityCounter)) // Continuity counter
+ return w.Bytes()
+}
+
+func TestParsePacketHeader(t *testing.T) {
+ assert.Equal(t, packetHeader, parsePacketHeader(packetHeaderBytes(*packetHeader)))
+}
+
+var packetAdaptationField = &PacketAdaptationField{
+ AdaptationExtensionField: &PacketAdaptationExtensionField{
+ DTSNextAccessUnit: dtsClockReference,
+ HasLegalTimeWindow: true,
+ HasPiecewiseRate: true,
+ HasSeamlessSplice: true,
+ LegalTimeWindowIsValid: true,
+ LegalTimeWindowOffset: 10922,
+ Length: 11,
+ PiecewiseRate: 2796202,
+ SpliceType: 2,
+ },
+ DiscontinuityIndicator: true,
+ ElementaryStreamPriorityIndicator: true,
+ HasAdaptationExtensionField: true,
+ HasOPCR: true,
+ HasPCR: true,
+ HasTransportPrivateData: true,
+ HasSplicingCountdown: true,
+ Length: 36,
+ OPCR: pcr,
+ PCR: pcr,
+ RandomAccessIndicator: true,
+ SpliceCountdown: 2,
+ TransportPrivateDataLength: 4,
+ TransportPrivateData: []byte("test"),
+}
+
+func packetAdaptationFieldBytes(a PacketAdaptationField) []byte {
+ w := astibinary.New()
+ w.Write(uint8(36)) // Length
+ w.Write(a.DiscontinuityIndicator) // Discontinuity indicator
+ w.Write("1") // Random access indicator
+ w.Write("1") // Elementary stream priority indicator
+ w.Write("1") // PCR flag
+ w.Write("1") // OPCR flag
+ w.Write("1") // Splicing point flag
+ w.Write("1") // Transport data flag
+ w.Write("1") // Adaptation field extension flag
+ w.Write(pcrBytes()) // PCR
+ w.Write(pcrBytes()) // OPCR
+ w.Write(uint8(2)) // Splice countdown
+ w.Write(uint8(4)) // Transport private data length
+ w.Write([]byte("test")) // Transport private data
+ w.Write(uint8(11)) // Adaptation extension length
+ w.Write("1") // LTW flag
+ w.Write("1") // Piecewise rate flag
+ w.Write("1") // Seamless splice flag
+ w.Write("11111") // Reserved
+ w.Write("1") // LTW valid flag
+ w.Write("010101010101010") // LTW offset
+ w.Write("11") // Piecewise rate reserved
+ w.Write("1010101010101010101010") // Piecewise rate
+ w.Write(dtsBytes()) // Splice type + DTS next access unit
+ w.Write([]byte("stuff")) // Stuffing bytes
+ return w.Bytes()
+}
+
+func TestParsePacketAdaptationField(t *testing.T) {
+ assert.Equal(t, packetAdaptationField, parsePacketAdaptationField(packetAdaptationFieldBytes(*packetAdaptationField)))
+}
+
+var pcr = &ClockReference{
+ Base: 5726623061,
+ Extension: 341,
+}
+
+func pcrBytes() []byte {
+ w := astibinary.New()
+ w.Write("101010101010101010101010101010101") // Base
+ w.Write("111111") // Reserved
+ w.Write("101010101") // Extension
+ return w.Bytes()
+}
+
+func TestParsePCR(t *testing.T) {
+ assert.Equal(t, pcr, parsePCR(pcrBytes()))
+}
diff --git a/program_map.go b/program_map.go
new file mode 100644
index 0000000..b4f77b5
--- /dev/null
+++ b/program_map.go
@@ -0,0 +1,32 @@
+package astits
+
+import "sync"
+
+// programMap represents a program ids map
+type programMap struct {
+ m *sync.Mutex
+ p map[uint16]uint16 // map[ProgramMapID]ProgramNumber
+}
+
+// newProgramMap creates a new program ids map
+func newProgramMap() programMap {
+ return programMap{
+ m: &sync.Mutex{},
+ p: make(map[uint16]uint16),
+ }
+}
+
+// exists checks whether the program with this pid exists
+func (m programMap) exists(pid uint16) (ok bool) {
+ m.m.Lock()
+ defer m.m.Unlock()
+ _, ok = m.p[pid]
+ return
+}
+
+// set sets a new program id
+func (m programMap) set(pid, number uint16) {
+ m.m.Lock()
+ defer m.m.Unlock()
+ m.p[pid] = number
+}
diff --git a/program_map_test.go b/program_map_test.go
new file mode 100644
index 0000000..cc22f76
--- /dev/null
+++ b/program_map_test.go
@@ -0,0 +1,14 @@
+package astits
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestProgramMap(t *testing.T) {
+ pm := newProgramMap()
+ assert.False(t, pm.exists(1))
+ pm.set(1, 1)
+ assert.True(t, pm.exists(1))
+}