init
This commit is contained in:
21
vendor/github.com/asticode/go-astilog/LICENSE
generated
vendored
Normal file
21
vendor/github.com/asticode/go-astilog/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2018 Quentin Renard
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
30
vendor/github.com/asticode/go-astilog/configuration.go
generated
vendored
Normal file
30
vendor/github.com/asticode/go-astilog/configuration.go
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
package astilog
|
||||
|
||||
import "flag"
|
||||
|
||||
// Flags
|
||||
var (
|
||||
AppName = flag.String("logger-app-name", "", "the logger's app name")
|
||||
Filename = flag.String("logger-filename", "", "the logger's filename")
|
||||
Verbose = flag.Bool("v", false, "if true, then log level is debug")
|
||||
)
|
||||
|
||||
// Configuration represents the configuration of the logger
|
||||
type Configuration struct {
|
||||
AppName string `toml:"app_name"`
|
||||
DisableColors bool `toml:"disable_colors"`
|
||||
Filename string `toml:"filename"`
|
||||
Format string `toml:"format"`
|
||||
MessageKey string `toml:"message_key"`
|
||||
Out string `toml:"out"`
|
||||
Verbose bool `toml:"verbose"`
|
||||
}
|
||||
|
||||
// FlagConfig generates a Configuration based on flags
|
||||
func FlagConfig() Configuration {
|
||||
return Configuration{
|
||||
AppName: *AppName,
|
||||
Filename: *Filename,
|
||||
Verbose: *Verbose,
|
||||
}
|
||||
}
|
25
vendor/github.com/asticode/go-astilog/hooks.go
generated
vendored
Normal file
25
vendor/github.com/asticode/go-astilog/hooks.go
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
package astilog
|
||||
|
||||
import "github.com/sirupsen/logrus"
|
||||
|
||||
type withFieldHook struct {
|
||||
k, v string
|
||||
}
|
||||
|
||||
func newWithFieldHook(k, v string) *withFieldHook {
|
||||
return &withFieldHook{
|
||||
k: k,
|
||||
v: v,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *withFieldHook) Fire(e *logrus.Entry) error {
|
||||
if len(h.v) > 0 {
|
||||
e.Data[h.k] = h.v
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *withFieldHook) Levels() []logrus.Level {
|
||||
return logrus.AllLevels
|
||||
}
|
149
vendor/github.com/asticode/go-astilog/logger.go
generated
vendored
Normal file
149
vendor/github.com/asticode/go-astilog/logger.go
generated
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
package astilog
|
||||
|
||||
import (
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
"golang.org/x/crypto/ssh/terminal"
|
||||
)
|
||||
|
||||
// Logger represents a logger
|
||||
type Logger interface {
|
||||
Debug(v ...interface{})
|
||||
Debugf(format string, v ...interface{})
|
||||
Info(v ...interface{})
|
||||
Infof(format string, v ...interface{})
|
||||
Warn(v ...interface{})
|
||||
Warnf(format string, v ...interface{})
|
||||
Error(v ...interface{})
|
||||
Errorf(format string, v ...interface{})
|
||||
Fatal(v ...interface{})
|
||||
Fatalf(format string, v ...interface{})
|
||||
}
|
||||
|
||||
// LoggerSetter represents a logger setter
|
||||
type LoggerSetter interface {
|
||||
SetLogger(l Logger)
|
||||
}
|
||||
|
||||
// Fields represents logger fields
|
||||
type Fields map[string]string
|
||||
|
||||
// LoggerWithField represents a logger that can have fields
|
||||
type LoggerWithFields interface {
|
||||
WithField(k, v string)
|
||||
WithFields(fs Fields)
|
||||
}
|
||||
|
||||
// Outs
|
||||
const (
|
||||
OutFile = "file"
|
||||
OutStdOut = "stdout"
|
||||
OutSyslog = "syslog"
|
||||
)
|
||||
|
||||
// New creates a new Logger
|
||||
func New(c Configuration) Logger {
|
||||
// Init
|
||||
var l = NewLogrus()
|
||||
|
||||
// Hooks
|
||||
l.AddHook(newWithFieldHook("app_name", c.AppName))
|
||||
|
||||
// Out
|
||||
var out string
|
||||
l.Out, out = Out(c)
|
||||
|
||||
// Formatter
|
||||
l.Formatter = Formatter(c, out)
|
||||
|
||||
// Level
|
||||
l.Level = Level(c)
|
||||
return l
|
||||
}
|
||||
|
||||
// Out returns the out based on the configuration
|
||||
func Out(c Configuration) (w io.Writer, out string) {
|
||||
switch c.Out {
|
||||
case OutStdOut:
|
||||
return stdOut(), c.Out
|
||||
case OutSyslog:
|
||||
return syslogOut(c), c.Out
|
||||
default:
|
||||
if isTerminal(os.Stdout) {
|
||||
w = stdOut()
|
||||
out = OutStdOut
|
||||
} else {
|
||||
w = syslogOut(c)
|
||||
out = OutSyslog
|
||||
}
|
||||
if len(c.Filename) > 0 {
|
||||
f, err := os.OpenFile(c.Filename, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
|
||||
if err != nil {
|
||||
log.Println(errors.Wrapf(err, "astilog: creating %s failed", c.Filename))
|
||||
} else {
|
||||
w = f
|
||||
out = OutFile
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func isTerminal(w io.Writer) bool {
|
||||
switch v := w.(type) {
|
||||
case *os.File:
|
||||
return terminal.IsTerminal(int(v.Fd()))
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Formats
|
||||
const (
|
||||
FormatJSON = "json"
|
||||
FormatText = "text"
|
||||
)
|
||||
|
||||
// Formatter returns the formatter based on the configuration
|
||||
func Formatter(c Configuration, out string) logrus.Formatter {
|
||||
switch c.Format {
|
||||
case FormatJSON:
|
||||
return jsonFormatter(c)
|
||||
case FormatText:
|
||||
return textFormatter(c, out)
|
||||
default:
|
||||
switch out {
|
||||
case OutFile, OutStdOut:
|
||||
return textFormatter(c, out)
|
||||
default:
|
||||
return jsonFormatter(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func jsonFormatter(c Configuration) logrus.Formatter {
|
||||
f := &logrus.JSONFormatter{FieldMap: make(logrus.FieldMap)}
|
||||
if len(c.MessageKey) > 0 {
|
||||
f.FieldMap[logrus.FieldKeyMsg] = c.MessageKey
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func textFormatter(c Configuration, out string) logrus.Formatter {
|
||||
return &logrus.TextFormatter{
|
||||
DisableColors: c.DisableColors || out == OutFile,
|
||||
ForceColors: !c.DisableColors && out != OutFile,
|
||||
}
|
||||
}
|
||||
|
||||
func Level(c Configuration) logrus.Level {
|
||||
if c.Verbose {
|
||||
return logrus.DebugLevel
|
||||
}
|
||||
return logrus.InfoLevel
|
||||
}
|
27
vendor/github.com/asticode/go-astilog/logrus.go
generated
vendored
Normal file
27
vendor/github.com/asticode/go-astilog/logrus.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
package astilog
|
||||
|
||||
import (
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// Logrus represents a logrus logger
|
||||
type Logrus struct {
|
||||
*logrus.Logger
|
||||
}
|
||||
|
||||
// NewLogrus creates a new logrus logger
|
||||
func NewLogrus() *Logrus {
|
||||
return &Logrus{Logger: logrus.New()}
|
||||
}
|
||||
|
||||
// WithField implements the LoggerWithFields interface
|
||||
func (l *Logrus) WithField(k, v string) {
|
||||
l.AddHook(newWithFieldHook(k, v))
|
||||
}
|
||||
|
||||
// WithFields implements the LoggerWithFields interface
|
||||
func (l *Logrus) WithFields(fs Fields) {
|
||||
for k, v := range fs {
|
||||
l.WithField(k, v)
|
||||
}
|
||||
}
|
22
vendor/github.com/asticode/go-astilog/nop.go
generated
vendored
Normal file
22
vendor/github.com/asticode/go-astilog/nop.go
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
package astilog
|
||||
|
||||
import "os"
|
||||
|
||||
// NopLogger returns a nop logger
|
||||
func NopLogger() Logger {
|
||||
return &nop{}
|
||||
}
|
||||
|
||||
// nop is a nop logger
|
||||
type nop struct{}
|
||||
|
||||
func (n nop) Debug(v ...interface{}) {}
|
||||
func (n nop) Debugf(format string, v ...interface{}) {}
|
||||
func (n nop) Info(v ...interface{}) {}
|
||||
func (n nop) Infof(format string, v ...interface{}) {}
|
||||
func (n nop) Warn(v ...interface{}) {}
|
||||
func (n nop) Warnf(format string, v ...interface{}) {}
|
||||
func (n nop) Error(v ...interface{}) {}
|
||||
func (n nop) Errorf(format string, v ...interface{}) {}
|
||||
func (n nop) Fatal(v ...interface{}) { os.Exit(1) }
|
||||
func (n nop) Fatalf(format string, v ...interface{}) { os.Exit(1) }
|
19
vendor/github.com/asticode/go-astilog/out.go
generated
vendored
Normal file
19
vendor/github.com/asticode/go-astilog/out.go
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
// +build windows
|
||||
|
||||
package astilog
|
||||
|
||||
import (
|
||||
"io"
|
||||
"log"
|
||||
|
||||
colorable "github.com/mattn/go-colorable"
|
||||
)
|
||||
|
||||
func stdOut() io.Writer {
|
||||
return colorable.NewColorableStdout()
|
||||
}
|
||||
|
||||
func syslogOut(c Configuration) io.Writer {
|
||||
log.Println("astilog: syslog is not implemented on this os, using stdout instead")
|
||||
return stdOut()
|
||||
}
|
26
vendor/github.com/asticode/go-astilog/out_syslog.go
generated
vendored
Normal file
26
vendor/github.com/asticode/go-astilog/out_syslog.go
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
// +build !windows
|
||||
|
||||
package astilog
|
||||
|
||||
import (
|
||||
"io"
|
||||
"log"
|
||||
"log/syslog"
|
||||
"os"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
|
||||
func stdOut() io.Writer {
|
||||
return os.Stdout
|
||||
}
|
||||
|
||||
func syslogOut(c Configuration) (w io.Writer) {
|
||||
var err error
|
||||
if w, err = syslog.New(syslog.LOG_INFO|syslog.LOG_USER, c.AppName); err != nil {
|
||||
log.Println(errors.Wrap(err, "astilog: new syslog failed"))
|
||||
return os.Stdout
|
||||
}
|
||||
return
|
||||
}
|
3
vendor/github.com/asticode/go-astilog/readme.md
generated
vendored
Normal file
3
vendor/github.com/asticode/go-astilog/readme.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# Astilog
|
||||
|
||||
Astilog is a wrapper on top of xlog to provide proper configuration
|
36
vendor/github.com/asticode/go-astilog/std.go
generated
vendored
Normal file
36
vendor/github.com/asticode/go-astilog/std.go
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
package astilog
|
||||
|
||||
// Global logger
|
||||
var gb = NopLogger()
|
||||
|
||||
// FlagInit initializes the package based on flags
|
||||
func FlagInit() {
|
||||
SetLogger(New(FlagConfig()))
|
||||
}
|
||||
|
||||
// SetLogger sets the global logger
|
||||
func SetLogger(l Logger) {
|
||||
gb = l
|
||||
}
|
||||
|
||||
// SetDefaultLogger sets the default logger
|
||||
func SetDefaultLogger() {
|
||||
SetLogger(New(Configuration{Verbose: true}))
|
||||
}
|
||||
|
||||
// GetLogger returns the global logger
|
||||
func GetLogger() Logger {
|
||||
return gb
|
||||
}
|
||||
|
||||
// Global logger shortcuts
|
||||
func Debug(v ...interface{}) { gb.Debug(v...) }
|
||||
func Debugf(format string, v ...interface{}) { gb.Debugf(format, v...) }
|
||||
func Info(v ...interface{}) { gb.Info(v...) }
|
||||
func Infof(format string, v ...interface{}) { gb.Infof(format, v...) }
|
||||
func Warn(v ...interface{}) { gb.Warn(v...) }
|
||||
func Warnf(format string, v ...interface{}) { gb.Warnf(format, v...) }
|
||||
func Error(v ...interface{}) { gb.Error(v...) }
|
||||
func Errorf(format string, v ...interface{}) { gb.Errorf(format, v...) }
|
||||
func Fatal(v ...interface{}) { gb.Fatal(v...) }
|
||||
func Fatalf(format string, v ...interface{}) { gb.Fatalf(format, v...) }
|
21
vendor/github.com/asticode/go-astisub/LICENSE
generated
vendored
Normal file
21
vendor/github.com/asticode/go-astisub/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2016 Quentin Renard
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
91
vendor/github.com/asticode/go-astisub/README.md
generated
vendored
Normal file
91
vendor/github.com/asticode/go-astisub/README.md
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
[](http://goreportcard.com/report/github.com/asticode/go-astisub)
|
||||
[](https://godoc.org/github.com/asticode/go-astisub)
|
||||
[](https://travis-ci.org/asticode/go-astisub#)
|
||||
[](https://coveralls.io/repos/github/asticode/go-astisub)
|
||||
|
||||
This is a Golang library to manipulate subtitles.
|
||||
|
||||
It allows you to manipulate `srt`, `stl`, `ttml`, `ssa/ass` and `webvtt` files for now.
|
||||
|
||||
Available operations are `parsing`, `writing`, `syncing`, `fragmenting`, `unfragmenting`, `merging` and `optimizing`.
|
||||
|
||||
# Installation
|
||||
|
||||
To install the library and command line program, use the following:
|
||||
|
||||
go get -u github.com/asticode/go-astisub/...
|
||||
|
||||
# Using the library in your code
|
||||
|
||||
WARNING: the code below doesn't handle errors for readibility purposes. However you SHOULD!
|
||||
|
||||
```go
|
||||
// Open subtitles
|
||||
s1, _ := astisub.OpenFile("/path/to/example.ttml")
|
||||
s2, _ := astisub.ReadFromSRT(bytes.NewReader([]byte("00:01:00.000 --> 00:02:00.000\nCredits")))
|
||||
|
||||
// Add a duration to every subtitles (syncing)
|
||||
s1.Add(-2*time.Second)
|
||||
|
||||
// Fragment the subtitles
|
||||
s1.Fragment(2*time.Second)
|
||||
|
||||
// Merge subtitles
|
||||
s1.Merge(s2)
|
||||
|
||||
// Optimize subtitles
|
||||
s1.Optimize()
|
||||
|
||||
// Unfragment the subtitles
|
||||
s1.Unfragment()
|
||||
|
||||
// Write subtitles
|
||||
s1.Write("/path/to/example.srt")
|
||||
var buf = &bytes.Buffer{}
|
||||
s2.WriteToTTML(buf)
|
||||
```
|
||||
|
||||
# Using the CLI
|
||||
|
||||
If **astisub** has been installed properly you can:
|
||||
|
||||
- convert any type of subtitle to any other type of subtitle:
|
||||
|
||||
astisub convert -i example.srt -o example.ttml
|
||||
|
||||
- fragment any type of subtitle:
|
||||
|
||||
astisub fragment -i example.srt -f 2s -o example.out.srt
|
||||
|
||||
- merge any type of subtitle into any other type of subtitle:
|
||||
|
||||
astisub merge -i example.srt -i example.ttml -o example.out.srt
|
||||
|
||||
- optimize any type of subtitle:
|
||||
|
||||
astisub optimize -i example.srt -o example.out.srt
|
||||
|
||||
- unfragment any type of subtitle:
|
||||
|
||||
astisub unfragment -i example.srt -o example.out.srt
|
||||
|
||||
- sync any type of subtitle:
|
||||
|
||||
astisub sync -i example.srt -s "-2s" -o example.out.srt
|
||||
|
||||
# Features and roadmap
|
||||
|
||||
- [x] parsing
|
||||
- [x] writing
|
||||
- [x] syncing
|
||||
- [x] fragmenting/unfragmenting
|
||||
- [x] merging
|
||||
- [x] ordering
|
||||
- [x] optimizing
|
||||
- [x] .srt
|
||||
- [x] .ttml
|
||||
- [x] .vtt
|
||||
- [x] .stl
|
||||
- [x] .ssa/.ass
|
||||
- [ ] .teletext
|
||||
- [ ] .smi
|
7
vendor/github.com/asticode/go-astisub/language.go
generated
vendored
Normal file
7
vendor/github.com/asticode/go-astisub/language.go
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
package astisub
|
||||
|
||||
// Languages
|
||||
const (
|
||||
LanguageEnglish = "english"
|
||||
LanguageFrench = "french"
|
||||
)
|
135
vendor/github.com/asticode/go-astisub/srt.go
generated
vendored
Normal file
135
vendor/github.com/asticode/go-astisub/srt.go
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
package astisub
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// Constants
|
||||
const (
|
||||
srtTimeBoundariesSeparator = " --> "
|
||||
)
|
||||
|
||||
// Vars
|
||||
var (
|
||||
bytesSRTTimeBoundariesSeparator = []byte(srtTimeBoundariesSeparator)
|
||||
)
|
||||
|
||||
// parseDurationSRT parses an .srt duration
|
||||
func parseDurationSRT(i string) (time.Duration, error) {
|
||||
return parseDuration(i, ",", 3)
|
||||
}
|
||||
|
||||
// ReadFromSRT parses an .srt content
|
||||
func ReadFromSRT(i io.Reader) (o *Subtitles, err error) {
|
||||
// Init
|
||||
o = NewSubtitles()
|
||||
var scanner = bufio.NewScanner(i)
|
||||
|
||||
// Scan
|
||||
var line string
|
||||
var s = &Item{}
|
||||
for scanner.Scan() {
|
||||
// Fetch line
|
||||
line = scanner.Text()
|
||||
|
||||
// Line contains time boundaries
|
||||
if strings.Contains(line, srtTimeBoundariesSeparator) {
|
||||
// Remove last item of previous subtitle since it's the index
|
||||
s.Lines = s.Lines[:len(s.Lines)-1]
|
||||
|
||||
// Remove trailing empty lines
|
||||
if len(s.Lines) > 0 {
|
||||
for i := len(s.Lines) - 1; i >= 0; i-- {
|
||||
if len(s.Lines[i].Items) > 0 {
|
||||
for j := len(s.Lines[i].Items) - 1; j >= 0; j-- {
|
||||
if len(s.Lines[i].Items[j].Text) == 0 {
|
||||
s.Lines[i].Items = s.Lines[i].Items[:j]
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(s.Lines[i].Items) == 0 {
|
||||
s.Lines = s.Lines[:i]
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Init subtitle
|
||||
s = &Item{}
|
||||
|
||||
// Fetch time boundaries
|
||||
boundaries := strings.Split(line, srtTimeBoundariesSeparator)
|
||||
if s.StartAt, err = parseDurationSRT(boundaries[0]); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing srt duration %s failed", boundaries[0])
|
||||
return
|
||||
}
|
||||
if s.EndAt, err = parseDurationSRT(boundaries[1]); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing srt duration %s failed", boundaries[1])
|
||||
return
|
||||
}
|
||||
|
||||
// Append subtitle
|
||||
o.Items = append(o.Items, s)
|
||||
} else {
|
||||
// Add text
|
||||
s.Lines = append(s.Lines, Line{Items: []LineItem{{Text: line}}})
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// formatDurationSRT formats an .srt duration
|
||||
func formatDurationSRT(i time.Duration) string {
|
||||
return formatDuration(i, ",", 3)
|
||||
}
|
||||
|
||||
// WriteToSRT writes subtitles in .srt format
|
||||
func (s Subtitles) WriteToSRT(o io.Writer) (err error) {
|
||||
// Do not write anything if no subtitles
|
||||
if len(s.Items) == 0 {
|
||||
err = ErrNoSubtitlesToWrite
|
||||
return
|
||||
}
|
||||
|
||||
// Add BOM header
|
||||
var c []byte
|
||||
c = append(c, BytesBOM...)
|
||||
|
||||
// Loop through subtitles
|
||||
for k, v := range s.Items {
|
||||
// Add time boundaries
|
||||
c = append(c, []byte(strconv.Itoa(k+1))...)
|
||||
c = append(c, bytesLineSeparator...)
|
||||
c = append(c, []byte(formatDurationSRT(v.StartAt))...)
|
||||
c = append(c, bytesSRTTimeBoundariesSeparator...)
|
||||
c = append(c, []byte(formatDurationSRT(v.EndAt))...)
|
||||
c = append(c, bytesLineSeparator...)
|
||||
|
||||
// Loop through lines
|
||||
for _, l := range v.Lines {
|
||||
c = append(c, []byte(l.String())...)
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
|
||||
// Add new line
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
|
||||
// Remove last new line
|
||||
c = c[:len(c)-1]
|
||||
|
||||
// Write
|
||||
if _, err = o.Write(c); err != nil {
|
||||
err = errors.Wrap(err, "astisub: writing failed")
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
1254
vendor/github.com/asticode/go-astisub/ssa.go
generated
vendored
Normal file
1254
vendor/github.com/asticode/go-astisub/ssa.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
860
vendor/github.com/asticode/go-astisub/stl.go
generated
vendored
Normal file
860
vendor/github.com/asticode/go-astisub/stl.go
generated
vendored
Normal file
@@ -0,0 +1,860 @@
|
||||
package astisub
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/asticode/go-astitools/byte"
|
||||
"github.com/asticode/go-astitools/map"
|
||||
"github.com/asticode/go-astitools/ptr"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
|
||||
// https://tech.ebu.ch/docs/tech/tech3264.pdf
|
||||
// https://github.com/yanncoupin/stl2srt/blob/master/to_srt.py
|
||||
|
||||
// STL block sizes
|
||||
const (
|
||||
stlBlockSizeGSI = 1024
|
||||
stlBlockSizeTTI = 128
|
||||
)
|
||||
|
||||
// STL character code table number
|
||||
const (
|
||||
stlCharacterCodeTableNumberLatin uint16 = 12336
|
||||
stlCharacterCodeTableNumberLatinCyrillic = 12337
|
||||
stlCharacterCodeTableNumberLatinArabic = 12338
|
||||
stlCharacterCodeTableNumberLatinGreek = 12339
|
||||
stlCharacterCodeTableNumberLatinHebrew = 12340
|
||||
)
|
||||
|
||||
// STL character code tables
|
||||
// TODO Add missing tables
|
||||
var (
|
||||
stlCharacterCodeTables = map[uint16]*astimap.Map{
|
||||
stlCharacterCodeTableNumberLatin: astimap.NewMap(0x0, "").
|
||||
Set(0x20, " ").Set(0x21, "!").Set(0x22, "\"").Set(0x23, "#").
|
||||
Set(0x24, "¤").Set(0x25, "%").Set(0x26, "&").Set(0x27, "'").
|
||||
Set(0x28, "(").Set(0x29, ")").Set(0x2a, "*").Set(0x2b, "+").
|
||||
Set(0x2c, ",").Set(0x2d, "-").Set(0x2e, ".").Set(0x2f, "/").
|
||||
Set(0x30, "0").Set(0x31, "1").Set(0x32, "2").Set(0x33, "3").
|
||||
Set(0x34, "4").Set(0x35, "5").Set(0x36, "6").Set(0x37, "7").
|
||||
Set(0x38, "8").Set(0x39, "9").Set(0x3a, ":").Set(0x3b, ";").
|
||||
Set(0x3c, "<").Set(0x3d, "=").Set(0x3e, ">").Set(0x3f, "?").
|
||||
Set(0x40, "@").Set(0x41, "A").Set(0x42, "B").Set(0x43, "C").
|
||||
Set(0x44, "D").Set(0x45, "E").Set(0x46, "F").Set(0x47, "G").
|
||||
Set(0x48, "H").Set(0x49, "I").Set(0x4a, "J").Set(0x4b, "K").
|
||||
Set(0x4c, "L").Set(0x4d, "M").Set(0x4e, "N").Set(0x4f, "O").
|
||||
Set(0x50, "P").Set(0x51, "Q").Set(0x52, "R").Set(0x53, "S").
|
||||
Set(0x54, "T").Set(0x55, "U").Set(0x56, "V").Set(0x57, "W").
|
||||
Set(0x58, "X").Set(0x59, "Y").Set(0x5a, "Z").Set(0x5b, "[").
|
||||
Set(0x5c, "\\").Set(0x5d, "]").Set(0x5e, "^").Set(0x5f, "_").
|
||||
Set(0x60, "`").Set(0x61, "a").Set(0x62, "b").Set(0x63, "c").
|
||||
Set(0x64, "d").Set(0x65, "e").Set(0x66, "f").Set(0x67, "g").
|
||||
Set(0x68, "h").Set(0x69, "i").Set(0x6a, "j").Set(0x6b, "k").
|
||||
Set(0x6c, "l").Set(0x6d, "m").Set(0x6e, "n").Set(0x6f, "o").
|
||||
Set(0x70, "p").Set(0x71, "q").Set(0x72, "r").Set(0x73, "s").
|
||||
Set(0x74, "t").Set(0x75, "u").Set(0x76, "v").Set(0x77, "w").
|
||||
Set(0x78, "x").Set(0x79, "y").Set(0x7a, "z").Set(0x7b, "{").
|
||||
Set(0x7c, "|").Set(0x7d, "}").Set(0x7e, "~").
|
||||
Set(0xa0, string([]byte{0xC2, 0xA0})).Set(0xa1, "¡").Set(0xa2, "¢").
|
||||
Set(0xa3, "£").Set(0xa4, "$").Set(0xa5, "¥").Set(0xa7, "§").
|
||||
Set(0xa9, "‘").Set(0xaa, "“").Set(0xab, "«").Set(0xac, "←").
|
||||
Set(0xad, "↑").Set(0xae, "→").Set(0xaf, "↓").
|
||||
Set(0xb0, "°").Set(0xb1, "±").Set(0xb2, "²").Set(0xb3, "³").
|
||||
Set(0xb4, "×").Set(0xb5, "µ").Set(0xb6, "¶").Set(0xb7, "·").
|
||||
Set(0xb8, "÷").Set(0xb9, "’").Set(0xba, "”").Set(0xbb, "»").
|
||||
Set(0xbc, "¼").Set(0xbd, "½").Set(0xbe, "¾").Set(0xbf, "¿").
|
||||
Set(0xc1, string([]byte{0xCC, 0x80})).Set(0xc2, string([]byte{0xCC, 0x81})).
|
||||
Set(0xc3, string([]byte{0xCC, 0x82})).Set(0xc4, string([]byte{0xCC, 0x83})).
|
||||
Set(0xc5, string([]byte{0xCC, 0x84})).Set(0xc6, string([]byte{0xCC, 0x86})).
|
||||
Set(0xc7, string([]byte{0xCC, 0x87})).Set(0xc8, string([]byte{0xCC, 0x88})).
|
||||
Set(0xca, string([]byte{0xCC, 0x8A})).Set(0xcb, string([]byte{0xCC, 0xA7})).
|
||||
Set(0xcd, string([]byte{0xCC, 0x8B})).Set(0xce, string([]byte{0xCC, 0xA8})).
|
||||
Set(0xcf, string([]byte{0xCC, 0x8C})).
|
||||
Set(0xd0, "―").Set(0xd1, "¹").Set(0xd2, "®").Set(0xd3, "©").
|
||||
Set(0xd4, "™").Set(0xd5, "♪").Set(0xd6, "¬").Set(0xd7, "¦").
|
||||
Set(0xdc, "⅛").Set(0xdd, "⅜").Set(0xde, "⅝").Set(0xdf, "⅞").
|
||||
Set(0xe0, "Ω").Set(0xe1, "Æ").Set(0xe2, "Đ").Set(0xe3, "ª").
|
||||
Set(0xe4, "Ħ").Set(0xe6, "IJ").Set(0xe7, "Ŀ").Set(0xe8, "Ł").
|
||||
Set(0xe9, "Ø").Set(0xea, "Œ").Set(0xeb, "º").Set(0xec, "Þ").
|
||||
Set(0xed, "Ŧ").Set(0xee, "Ŋ").Set(0xef, "ʼn").
|
||||
Set(0xf0, "ĸ").Set(0xf1, "æ").Set(0xf2, "đ").Set(0xf3, "ð").
|
||||
Set(0xf4, "ħ").Set(0xf5, "ı").Set(0xf6, "ij").Set(0xf7, "ŀ").
|
||||
Set(0xf8, "ł").Set(0xf9, "ø").Set(0xfa, "œ").Set(0xfb, "ß").
|
||||
Set(0xfc, "þ").Set(0xfd, "ŧ").Set(0xfe, "ŋ").Set(0xff, string([]byte{0xC2, 0xAD})),
|
||||
}
|
||||
)
|
||||
|
||||
// STL code page numbers
|
||||
const (
|
||||
stlCodePageNumberCanadaFrench uint32 = 3683891
|
||||
stlCodePageNumberMultilingual = 3683632
|
||||
stlCodePageNumberNordic = 3683893
|
||||
stlCodePageNumberPortugal = 3683888
|
||||
stlCodePageNumberUnitedStates = 3420983
|
||||
)
|
||||
|
||||
// STL comment flag
|
||||
const (
|
||||
stlCommentFlagTextContainsSubtitleData = '\x00'
|
||||
stlCommentFlagTextContainsCommentsNotIntendedForTransmission = '\x01'
|
||||
)
|
||||
|
||||
// STL country codes
|
||||
const (
|
||||
stlCountryCodeFrance = "FRA"
|
||||
)
|
||||
|
||||
// STL cumulative status
|
||||
const (
|
||||
stlCumulativeStatusFirstSubtitleOfACumulativeSet = '\x01'
|
||||
stlCumulativeStatusIntermediateSubtitleOfACumulativeSet = '\x02'
|
||||
stlCumulativeStatusLastSubtitleOfACumulativeSet = '\x03'
|
||||
stlCumulativeStatusSubtitleNotPartOfACumulativeSet = '\x00'
|
||||
)
|
||||
|
||||
// STL display standard code
|
||||
const (
|
||||
stlDisplayStandardCodeOpenSubtitling = "0"
|
||||
stlDisplayStandardCodeLevel1Teletext = "1"
|
||||
stlDisplayStandardCodeLevel2Teletext = "2"
|
||||
)
|
||||
|
||||
// STL framerate mapping
|
||||
var stlFramerateMapping = astimap.NewMap("STL25.01", 25).
|
||||
Set("STL25.01", 25).
|
||||
Set("STL30.01", 30)
|
||||
|
||||
// STL justification code
|
||||
const (
|
||||
stlJustificationCodeCentredText = '\x02'
|
||||
stlJustificationCodeLeftJustifiedText = '\x01'
|
||||
stlJustificationCodeRightJustifiedText = '\x03'
|
||||
stlJustificationCodeUnchangedPresentation = '\x00'
|
||||
)
|
||||
|
||||
// STL language codes
|
||||
const (
|
||||
stlLanguageCodeEnglish = "09"
|
||||
stlLanguageCodeFrench = "0F"
|
||||
)
|
||||
|
||||
// STL language mapping
|
||||
var stlLanguageMapping = astimap.NewMap(stlLanguageCodeEnglish, LanguageEnglish).
|
||||
Set(stlLanguageCodeFrench, LanguageFrench)
|
||||
|
||||
// STL timecode status
|
||||
const (
|
||||
stlTimecodeStatusNotIntendedForUse = "0"
|
||||
stlTimecodeStatusIntendedForUse = "1"
|
||||
)
|
||||
|
||||
// TTI Special Extension Block Number
|
||||
const extensionBlockNumberReservedUserData = 0xfe
|
||||
|
||||
// ReadFromSTL parses an .stl content
|
||||
func ReadFromSTL(i io.Reader) (o *Subtitles, err error) {
|
||||
// Init
|
||||
o = NewSubtitles()
|
||||
|
||||
// Read GSI block
|
||||
var b []byte
|
||||
if b, err = readNBytes(i, stlBlockSizeGSI); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Parse GSI block
|
||||
var g *gsiBlock
|
||||
if g, err = parseGSIBlock(b); err != nil {
|
||||
err = errors.Wrap(err, "astisub: building gsi block failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Create character handler
|
||||
var ch *stlCharacterHandler
|
||||
if ch, err = newSTLCharacterHandler(g.characterCodeTableNumber); err != nil {
|
||||
err = errors.Wrap(err, "astisub: creating stl character handler failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Update metadata
|
||||
// TODO Add more STL fields to metadata
|
||||
o.Metadata = &Metadata{
|
||||
Framerate: g.framerate,
|
||||
Language: stlLanguageMapping.B(g.languageCode).(string),
|
||||
STLPublisher: g.publisher,
|
||||
Title: g.originalProgramTitle,
|
||||
}
|
||||
|
||||
// Parse Text and Timing Information (TTI) blocks.
|
||||
for {
|
||||
// Read TTI block
|
||||
if b, err = readNBytes(i, stlBlockSizeTTI); err != nil {
|
||||
if err == io.EOF {
|
||||
err = nil
|
||||
break
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Parse TTI block
|
||||
var t = parseTTIBlock(b, g.framerate)
|
||||
|
||||
if t.extensionBlockNumber != extensionBlockNumberReservedUserData {
|
||||
|
||||
// Create item
|
||||
var i = &Item{
|
||||
EndAt: t.timecodeOut - g.timecodeStartOfProgramme,
|
||||
StartAt: t.timecodeIn - g.timecodeStartOfProgramme,
|
||||
}
|
||||
|
||||
// Loop through rows
|
||||
for _, text := range bytes.Split(t.text, []byte{0x8a}) {
|
||||
parseTeletextRow(i, ch, func() styler { return newSTLStyler() }, text)
|
||||
}
|
||||
|
||||
// Append item
|
||||
o.Items = append(o.Items, i)
|
||||
}
|
||||
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// readNBytes reads n bytes
|
||||
func readNBytes(i io.Reader, c int) (o []byte, err error) {
|
||||
o = make([]byte, c)
|
||||
var n int
|
||||
if n, err = i.Read(o); err != nil || n != len(o) {
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return
|
||||
}
|
||||
err = errors.Wrapf(err, "astisub: reading %d bytes failed", c)
|
||||
return
|
||||
}
|
||||
err = fmt.Errorf("astisub: Read %d bytes, should have read %d", n, c)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// gsiBlock represents a GSI block
|
||||
type gsiBlock struct {
|
||||
characterCodeTableNumber uint16
|
||||
codePageNumber uint32
|
||||
countryOfOrigin string
|
||||
creationDate time.Time
|
||||
diskSequenceNumber int
|
||||
displayStandardCode string
|
||||
editorContactDetails string
|
||||
editorName string
|
||||
framerate int
|
||||
languageCode string
|
||||
maximumNumberOfDisplayableCharactersInAnyTextRow int
|
||||
maximumNumberOfDisplayableRows int
|
||||
originalEpisodeTitle string
|
||||
originalProgramTitle string
|
||||
publisher string
|
||||
revisionDate time.Time
|
||||
revisionNumber int
|
||||
subtitleListReferenceCode string
|
||||
timecodeFirstInCue time.Duration
|
||||
timecodeStartOfProgramme time.Duration
|
||||
timecodeStatus string
|
||||
totalNumberOfDisks int
|
||||
totalNumberOfSubtitleGroups int
|
||||
totalNumberOfSubtitles int
|
||||
totalNumberOfTTIBlocks int
|
||||
translatedEpisodeTitle string
|
||||
translatedProgramTitle string
|
||||
translatorContactDetails string
|
||||
translatorName string
|
||||
userDefinedArea string
|
||||
}
|
||||
|
||||
// newGSIBlock builds the subtitles GSI block
|
||||
func newGSIBlock(s Subtitles) (g *gsiBlock) {
|
||||
// Init
|
||||
g = &gsiBlock{
|
||||
characterCodeTableNumber: stlCharacterCodeTableNumberLatin,
|
||||
codePageNumber: stlCodePageNumberMultilingual,
|
||||
countryOfOrigin: stlCountryCodeFrance,
|
||||
creationDate: Now(),
|
||||
diskSequenceNumber: 1,
|
||||
displayStandardCode: stlDisplayStandardCodeLevel1Teletext,
|
||||
framerate: 25,
|
||||
languageCode: stlLanguageCodeFrench,
|
||||
maximumNumberOfDisplayableCharactersInAnyTextRow: 40,
|
||||
maximumNumberOfDisplayableRows: 23,
|
||||
subtitleListReferenceCode: "12345678",
|
||||
timecodeStatus: stlTimecodeStatusIntendedForUse,
|
||||
totalNumberOfDisks: 1,
|
||||
totalNumberOfSubtitleGroups: 1,
|
||||
totalNumberOfSubtitles: len(s.Items),
|
||||
totalNumberOfTTIBlocks: len(s.Items),
|
||||
}
|
||||
|
||||
// Add metadata
|
||||
if s.Metadata != nil {
|
||||
g.framerate = s.Metadata.Framerate
|
||||
g.languageCode = stlLanguageMapping.A(s.Metadata.Language).(string)
|
||||
g.originalProgramTitle = s.Metadata.Title
|
||||
g.publisher = s.Metadata.STLPublisher
|
||||
}
|
||||
|
||||
// Timecode first in cue
|
||||
if len(s.Items) > 0 {
|
||||
g.timecodeFirstInCue = s.Items[0].StartAt
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseGSIBlock parses a GSI block
|
||||
func parseGSIBlock(b []byte) (g *gsiBlock, err error) {
|
||||
// Init
|
||||
g = &gsiBlock{
|
||||
characterCodeTableNumber: binary.BigEndian.Uint16(b[12:14]),
|
||||
countryOfOrigin: string(bytes.TrimSpace(b[274:277])),
|
||||
codePageNumber: binary.BigEndian.Uint32(append([]byte{0x0}, b[0:3]...)),
|
||||
displayStandardCode: string(bytes.TrimSpace([]byte{b[11]})),
|
||||
editorName: string(bytes.TrimSpace(b[309:341])),
|
||||
editorContactDetails: string(bytes.TrimSpace(b[341:373])),
|
||||
framerate: stlFramerateMapping.B(string(b[3:11])).(int),
|
||||
languageCode: string(bytes.TrimSpace(b[14:16])),
|
||||
originalEpisodeTitle: string(bytes.TrimSpace(b[48:80])),
|
||||
originalProgramTitle: string(bytes.TrimSpace(b[16:48])),
|
||||
publisher: string(bytes.TrimSpace(b[277:309])),
|
||||
subtitleListReferenceCode: string(bytes.TrimSpace(b[208:224])),
|
||||
timecodeStatus: string(bytes.TrimSpace([]byte{b[255]})),
|
||||
translatedEpisodeTitle: string(bytes.TrimSpace(b[80:112])),
|
||||
translatedProgramTitle: string(bytes.TrimSpace(b[112:144])),
|
||||
translatorContactDetails: string(bytes.TrimSpace(b[176:208])),
|
||||
translatorName: string(bytes.TrimSpace(b[144:176])),
|
||||
userDefinedArea: string(bytes.TrimSpace(b[448:])),
|
||||
}
|
||||
|
||||
// Creation date
|
||||
if v := strings.TrimSpace(string(b[224:230])); len(v) > 0 {
|
||||
if g.creationDate, err = time.Parse("060102", v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing date %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Revision date
|
||||
if v := strings.TrimSpace(string(b[230:236])); len(v) > 0 {
|
||||
if g.revisionDate, err = time.Parse("060102", v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing date %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Revision number
|
||||
if v := strings.TrimSpace(string(b[236:238])); len(v) > 0 {
|
||||
if g.revisionNumber, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Total number of TTI blocks
|
||||
if v := strings.TrimSpace(string(b[238:243])); len(v) > 0 {
|
||||
if g.totalNumberOfTTIBlocks, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Total number of subtitles
|
||||
if v := strings.TrimSpace(string(b[243:248])); len(v) > 0 {
|
||||
if g.totalNumberOfSubtitles, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Total number of subtitle groups
|
||||
if v := strings.TrimSpace(string(b[248:251])); len(v) > 0 {
|
||||
if g.totalNumberOfSubtitleGroups, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Maximum number of displayable characters in any text row
|
||||
if v := strings.TrimSpace(string(b[251:253])); len(v) > 0 {
|
||||
if g.maximumNumberOfDisplayableCharactersInAnyTextRow, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Maximum number of displayable rows
|
||||
if v := strings.TrimSpace(string(b[253:255])); len(v) > 0 {
|
||||
if g.maximumNumberOfDisplayableRows, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Timecode start of programme
|
||||
if v := strings.TrimSpace(string(b[256:264])); len(v) > 0 {
|
||||
if g.timecodeStartOfProgramme, err = parseDurationSTL(v, g.framerate); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing of stl duration %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Timecode first in cue
|
||||
if v := strings.TrimSpace(string(b[264:272])); len(v) > 0 {
|
||||
if g.timecodeFirstInCue, err = parseDurationSTL(v, g.framerate); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing of stl duration %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Total number of disks
|
||||
if v := strings.TrimSpace(string(b[272])); len(v) > 0 {
|
||||
if g.totalNumberOfDisks, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Disk sequence number
|
||||
if v := strings.TrimSpace(string(b[273])); len(v) > 0 {
|
||||
if g.diskSequenceNumber, err = strconv.Atoi(v); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", v)
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// bytes transforms the GSI block into []byte
|
||||
func (b gsiBlock) bytes() (o []byte) {
|
||||
bs := make([]byte, 4)
|
||||
binary.BigEndian.PutUint32(bs, b.codePageNumber)
|
||||
o = append(o, astibyte.ToLength(bs[1:], ' ', 3)...) // Code page number
|
||||
o = append(o, astibyte.ToLength([]byte(stlFramerateMapping.A(b.framerate).(string)), ' ', 8)...) // Disk format code
|
||||
o = append(o, astibyte.ToLength([]byte(b.displayStandardCode), ' ', 1)...) // Display standard code
|
||||
binary.BigEndian.PutUint16(bs, b.characterCodeTableNumber)
|
||||
o = append(o, astibyte.ToLength(bs[:2], ' ', 2)...) // Character code table number
|
||||
o = append(o, astibyte.ToLength([]byte(b.languageCode), ' ', 2)...) // Language code
|
||||
o = append(o, astibyte.ToLength([]byte(b.originalProgramTitle), ' ', 32)...) // Original program title
|
||||
o = append(o, astibyte.ToLength([]byte(b.originalEpisodeTitle), ' ', 32)...) // Original episode title
|
||||
o = append(o, astibyte.ToLength([]byte(b.translatedProgramTitle), ' ', 32)...) // Translated program title
|
||||
o = append(o, astibyte.ToLength([]byte(b.translatedEpisodeTitle), ' ', 32)...) // Translated episode title
|
||||
o = append(o, astibyte.ToLength([]byte(b.translatorName), ' ', 32)...) // Translator's name
|
||||
o = append(o, astibyte.ToLength([]byte(b.translatorContactDetails), ' ', 32)...) // Translator's contact details
|
||||
o = append(o, astibyte.ToLength([]byte(b.subtitleListReferenceCode), ' ', 16)...) // Subtitle list reference code
|
||||
o = append(o, astibyte.ToLength([]byte(b.creationDate.Format("060102")), ' ', 6)...) // Creation date
|
||||
o = append(o, astibyte.ToLength([]byte(b.revisionDate.Format("060102")), ' ', 6)...) // Revision date
|
||||
o = append(o, astibyte.ToLength(astibyte.PadLeft([]byte(strconv.Itoa(b.revisionNumber)), '0', 2), '0', 2)...) // Revision number
|
||||
o = append(o, astibyte.ToLength(astibyte.PadLeft([]byte(strconv.Itoa(b.totalNumberOfTTIBlocks)), '0', 5), '0', 5)...) // Total number of TTI blocks
|
||||
o = append(o, astibyte.ToLength(astibyte.PadLeft([]byte(strconv.Itoa(b.totalNumberOfSubtitles)), '0', 5), '0', 5)...) // Total number of subtitles
|
||||
o = append(o, astibyte.ToLength(astibyte.PadLeft([]byte(strconv.Itoa(b.totalNumberOfSubtitleGroups)), '0', 3), '0', 3)...) // Total number of subtitle groups
|
||||
o = append(o, astibyte.ToLength(astibyte.PadLeft([]byte(strconv.Itoa(b.maximumNumberOfDisplayableCharactersInAnyTextRow)), '0', 2), '0', 2)...) // Maximum number of displayable characters in any text row
|
||||
o = append(o, astibyte.ToLength(astibyte.PadLeft([]byte(strconv.Itoa(b.maximumNumberOfDisplayableRows)), '0', 2), '0', 2)...) // Maximum number of displayable rows
|
||||
o = append(o, astibyte.ToLength([]byte(b.timecodeStatus), ' ', 1)...) // Timecode status
|
||||
o = append(o, astibyte.ToLength([]byte(formatDurationSTL(b.timecodeStartOfProgramme, b.framerate)), ' ', 8)...) // Timecode start of a programme
|
||||
o = append(o, astibyte.ToLength([]byte(formatDurationSTL(b.timecodeFirstInCue, b.framerate)), ' ', 8)...) // Timecode first in cue
|
||||
o = append(o, astibyte.ToLength([]byte(strconv.Itoa(b.totalNumberOfDisks)), ' ', 1)...) // Total number of disks
|
||||
o = append(o, astibyte.ToLength([]byte(strconv.Itoa(b.diskSequenceNumber)), ' ', 1)...) // Disk sequence number
|
||||
o = append(o, astibyte.ToLength([]byte(b.countryOfOrigin), ' ', 3)...) // Country of origin
|
||||
o = append(o, astibyte.ToLength([]byte(b.publisher), ' ', 32)...) // Publisher
|
||||
o = append(o, astibyte.ToLength([]byte(b.editorName), ' ', 32)...) // Editor's name
|
||||
o = append(o, astibyte.ToLength([]byte(b.editorContactDetails), ' ', 32)...) // Editor's contact details
|
||||
o = append(o, astibyte.ToLength([]byte{}, ' ', 75+576)...) // Spare bytes + user defined area // // Editor's contact details
|
||||
return
|
||||
}
|
||||
|
||||
// parseDurationSTL parses a STL duration
|
||||
func parseDurationSTL(i string, framerate int) (d time.Duration, err error) {
|
||||
// Parse hours
|
||||
var hours, hoursString = 0, i[0:2]
|
||||
if hours, err = strconv.Atoi(hoursString); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", hoursString)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse minutes
|
||||
var minutes, minutesString = 0, i[2:4]
|
||||
if minutes, err = strconv.Atoi(minutesString); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", minutesString)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse seconds
|
||||
var seconds, secondsString = 0, i[4:6]
|
||||
if seconds, err = strconv.Atoi(secondsString); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", secondsString)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse frames
|
||||
var frames, framesString = 0, i[6:8]
|
||||
if frames, err = strconv.Atoi(framesString); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", framesString)
|
||||
return
|
||||
}
|
||||
|
||||
// Set duration
|
||||
d = time.Duration(hours)*time.Hour + time.Duration(minutes)*time.Minute + time.Duration(seconds)*time.Second + time.Duration(1e9*frames/framerate)*time.Nanosecond
|
||||
return
|
||||
}
|
||||
|
||||
// formatDurationSTL formats a STL duration
|
||||
func formatDurationSTL(d time.Duration, framerate int) (o string) {
|
||||
// Add hours
|
||||
if d.Hours() < 10 {
|
||||
o += "0"
|
||||
}
|
||||
var delta = int(math.Floor(d.Hours()))
|
||||
o += strconv.Itoa(delta)
|
||||
d -= time.Duration(delta) * time.Hour
|
||||
|
||||
// Add minutes
|
||||
if d.Minutes() < 10 {
|
||||
o += "0"
|
||||
}
|
||||
delta = int(math.Floor(d.Minutes()))
|
||||
o += strconv.Itoa(delta)
|
||||
d -= time.Duration(delta) * time.Minute
|
||||
|
||||
// Add seconds
|
||||
if d.Seconds() < 10 {
|
||||
o += "0"
|
||||
}
|
||||
delta = int(math.Floor(d.Seconds()))
|
||||
o += strconv.Itoa(delta)
|
||||
d -= time.Duration(delta) * time.Second
|
||||
|
||||
// Add frames
|
||||
var frames = int(int(d.Nanoseconds()) * framerate / 1e9)
|
||||
if frames < 10 {
|
||||
o += "0"
|
||||
}
|
||||
o += strconv.Itoa(frames)
|
||||
return
|
||||
}
|
||||
|
||||
// ttiBlock represents a TTI block
|
||||
type ttiBlock struct {
|
||||
commentFlag byte
|
||||
cumulativeStatus byte
|
||||
extensionBlockNumber int
|
||||
justificationCode byte
|
||||
subtitleGroupNumber int
|
||||
subtitleNumber int
|
||||
text []byte
|
||||
timecodeIn time.Duration
|
||||
timecodeOut time.Duration
|
||||
verticalPosition int
|
||||
}
|
||||
|
||||
// newTTIBlock builds an item TTI block
|
||||
func newTTIBlock(i *Item, idx int) (t *ttiBlock) {
|
||||
// Init
|
||||
t = &ttiBlock{
|
||||
commentFlag: stlCommentFlagTextContainsSubtitleData,
|
||||
cumulativeStatus: stlCumulativeStatusSubtitleNotPartOfACumulativeSet,
|
||||
extensionBlockNumber: 255,
|
||||
justificationCode: stlJustificationCodeLeftJustifiedText,
|
||||
subtitleGroupNumber: 0,
|
||||
subtitleNumber: idx,
|
||||
timecodeIn: i.StartAt,
|
||||
timecodeOut: i.EndAt,
|
||||
verticalPosition: 20,
|
||||
}
|
||||
|
||||
// Add text
|
||||
var lines []string
|
||||
for _, l := range i.Lines {
|
||||
lines = append(lines, l.String())
|
||||
}
|
||||
t.text = []byte(strings.Join(lines, "\n"))
|
||||
return
|
||||
}
|
||||
|
||||
// parseTTIBlock parses a TTI block
|
||||
func parseTTIBlock(p []byte, framerate int) *ttiBlock {
|
||||
return &ttiBlock{
|
||||
commentFlag: p[15],
|
||||
cumulativeStatus: p[4],
|
||||
extensionBlockNumber: int(uint8(p[3])),
|
||||
justificationCode: p[14],
|
||||
subtitleGroupNumber: int(uint8(p[0])),
|
||||
subtitleNumber: int(binary.LittleEndian.Uint16(p[1:3])),
|
||||
text: p[16:128],
|
||||
timecodeIn: parseDurationSTLBytes(p[5:9], framerate),
|
||||
timecodeOut: parseDurationSTLBytes(p[9:13], framerate),
|
||||
verticalPosition: int(uint8(p[13])),
|
||||
}
|
||||
}
|
||||
|
||||
// bytes transforms the TTI block into []byte
|
||||
func (t *ttiBlock) bytes(g *gsiBlock) (o []byte) {
|
||||
o = append(o, byte(uint8(t.subtitleGroupNumber))) // Subtitle group number
|
||||
var b = make([]byte, 2)
|
||||
binary.LittleEndian.PutUint16(b, uint16(t.subtitleNumber))
|
||||
o = append(o, b...) // Subtitle number
|
||||
o = append(o, byte(uint8(t.extensionBlockNumber))) // Extension block number
|
||||
o = append(o, t.cumulativeStatus) // Cumulative status
|
||||
o = append(o, formatDurationSTLBytes(t.timecodeIn, g.framerate)...) // Timecode in
|
||||
o = append(o, formatDurationSTLBytes(t.timecodeOut, g.framerate)...) // Timecode out
|
||||
o = append(o, byte(uint8(t.verticalPosition))) // Vertical position
|
||||
o = append(o, t.justificationCode) // Justification code
|
||||
o = append(o, t.commentFlag) // Comment flag
|
||||
o = append(o, astibyte.ToLength(encodeTextSTL(string(t.text)), '\x8f', 112)...) // Text field
|
||||
return
|
||||
}
|
||||
|
||||
// formatDurationSTLBytes formats a STL duration in bytes
|
||||
func formatDurationSTLBytes(d time.Duration, framerate int) (o []byte) {
|
||||
// Add hours
|
||||
var hours = int(math.Floor(d.Hours()))
|
||||
o = append(o, byte(uint8(hours)))
|
||||
d -= time.Duration(hours) * time.Hour
|
||||
|
||||
// Add minutes
|
||||
var minutes = int(math.Floor(d.Minutes()))
|
||||
o = append(o, byte(uint8(minutes)))
|
||||
d -= time.Duration(minutes) * time.Minute
|
||||
|
||||
// Add seconds
|
||||
var seconds = int(math.Floor(d.Seconds()))
|
||||
o = append(o, byte(uint8(seconds)))
|
||||
d -= time.Duration(seconds) * time.Second
|
||||
|
||||
// Add frames
|
||||
var frames = int(int(d.Nanoseconds()) * framerate / 1e9)
|
||||
o = append(o, byte(uint8(frames)))
|
||||
return
|
||||
}
|
||||
|
||||
// parseDurationSTLBytes parses a STL duration in bytes
|
||||
func parseDurationSTLBytes(b []byte, framerate int) time.Duration {
|
||||
return time.Duration(uint8(b[0]))*time.Hour + time.Duration(uint8(b[1]))*time.Minute + time.Duration(uint8(b[2]))*time.Second + time.Duration(1e9*int(uint8(b[3]))/framerate)*time.Nanosecond
|
||||
}
|
||||
|
||||
type stlCharacterHandler struct {
|
||||
accent string
|
||||
c uint16
|
||||
m *astimap.Map
|
||||
}
|
||||
|
||||
func newSTLCharacterHandler(characterCodeTable uint16) (*stlCharacterHandler, error) {
|
||||
if v, ok := stlCharacterCodeTables[characterCodeTable]; ok {
|
||||
return &stlCharacterHandler{
|
||||
c: characterCodeTable,
|
||||
m: v,
|
||||
}, nil
|
||||
}
|
||||
return nil, fmt.Errorf("astisub: table doesn't exist for character code table %d", characterCodeTable)
|
||||
}
|
||||
|
||||
// TODO Use this instead of encodeTextSTL => use in teletext process like for decode
|
||||
// TODO Test
|
||||
func (h *stlCharacterHandler) encode(i []byte) byte {
|
||||
return ' '
|
||||
}
|
||||
|
||||
func (h *stlCharacterHandler) decode(i byte) (o []byte) {
|
||||
k := int(i)
|
||||
if !h.m.InA(k) {
|
||||
return
|
||||
}
|
||||
v := h.m.B(k).(string)
|
||||
if len(h.accent) > 0 {
|
||||
o = norm.NFC.Bytes([]byte(v + h.accent))
|
||||
h.accent = ""
|
||||
return
|
||||
} else if h.c == stlCharacterCodeTableNumberLatin && k >= 0xc0 && k <= 0xcf {
|
||||
h.accent = v
|
||||
return
|
||||
}
|
||||
return []byte(v)
|
||||
}
|
||||
|
||||
type stlStyler struct {
|
||||
boxing *bool
|
||||
italics *bool
|
||||
underline *bool
|
||||
}
|
||||
|
||||
func newSTLStyler() *stlStyler {
|
||||
return &stlStyler{}
|
||||
}
|
||||
|
||||
func (s *stlStyler) parseSpacingAttribute(i byte) {
|
||||
switch i {
|
||||
case 0x80:
|
||||
s.italics = astiptr.Bool(true)
|
||||
case 0x81:
|
||||
s.italics = astiptr.Bool(false)
|
||||
case 0x82:
|
||||
s.underline = astiptr.Bool(true)
|
||||
case 0x83:
|
||||
s.underline = astiptr.Bool(false)
|
||||
case 0x84:
|
||||
s.boxing = astiptr.Bool(true)
|
||||
case 0x85:
|
||||
s.boxing = astiptr.Bool(false)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *stlStyler) hasBeenSet() bool {
|
||||
return s.italics != nil || s.boxing != nil || s.underline != nil
|
||||
}
|
||||
|
||||
func (s *stlStyler) hasChanged(sa *StyleAttributes) bool {
|
||||
return s.boxing != sa.STLBoxing || s.italics != sa.STLItalics || s.underline != sa.STLUnderline
|
||||
}
|
||||
|
||||
func (s *stlStyler) propagateStyleAttributes(sa *StyleAttributes) {
|
||||
sa.propagateSTLAttributes()
|
||||
}
|
||||
|
||||
func (s *stlStyler) update(sa *StyleAttributes) {
|
||||
if s.boxing != nil && s.boxing != sa.STLBoxing {
|
||||
sa.STLBoxing = s.boxing
|
||||
}
|
||||
if s.italics != nil && s.italics != sa.STLItalics {
|
||||
sa.STLItalics = s.italics
|
||||
}
|
||||
if s.underline != nil && s.underline != sa.STLUnderline {
|
||||
sa.STLUnderline = s.underline
|
||||
}
|
||||
}
|
||||
|
||||
// WriteToSTL writes subtitles in .stl format
|
||||
func (s Subtitles) WriteToSTL(o io.Writer) (err error) {
|
||||
// Do not write anything if no subtitles
|
||||
if len(s.Items) == 0 {
|
||||
err = ErrNoSubtitlesToWrite
|
||||
return
|
||||
}
|
||||
|
||||
// Write GSI block
|
||||
var g = newGSIBlock(s)
|
||||
if _, err = o.Write(g.bytes()); err != nil {
|
||||
err = errors.Wrap(err, "astisub: writing gsi block failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Loop through items
|
||||
for idx, item := range s.Items {
|
||||
// Write tti block
|
||||
if _, err = o.Write(newTTIBlock(item, idx+1).bytes(g)); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: writing tti block #%d failed", idx+1)
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// TODO Remove below
|
||||
|
||||
// STL unicode diacritic
|
||||
var stlUnicodeDiacritic = astimap.NewMap(byte('\x00'), "\x00").
|
||||
Set(byte('\xc1'), "\u0300"). // Grave accent
|
||||
Set(byte('\xc2'), "\u0301"). // Acute accent
|
||||
Set(byte('\xc3'), "\u0302"). // Circumflex
|
||||
Set(byte('\xc4'), "\u0303"). // Tilde
|
||||
Set(byte('\xc5'), "\u0304"). // Macron
|
||||
Set(byte('\xc6'), "\u0306"). // Breve
|
||||
Set(byte('\xc7'), "\u0307"). // Dot
|
||||
Set(byte('\xc8'), "\u0308"). // Umlaut
|
||||
Set(byte('\xca'), "\u030a"). // Ring
|
||||
Set(byte('\xcb'), "\u0327"). // Cedilla
|
||||
Set(byte('\xcd'), "\u030B"). // Double acute accent
|
||||
Set(byte('\xce'), "\u0328"). // Ogonek
|
||||
Set(byte('\xcf'), "\u030c") // Caron
|
||||
|
||||
// STL unicode mapping
|
||||
var stlUnicodeMapping = astimap.NewMap(byte('\x00'), "\x00").
|
||||
Set(byte('\x8a'), "\u000a"). // Line break
|
||||
Set(byte('\xa8'), "\u00a4"). // ¤
|
||||
Set(byte('\xa9'), "\u2018"). // ‘
|
||||
Set(byte('\xaa'), "\u201C"). // “
|
||||
Set(byte('\xab'), "\u00AB"). // «
|
||||
Set(byte('\xac'), "\u2190"). // ←
|
||||
Set(byte('\xad'), "\u2191"). // ↑
|
||||
Set(byte('\xae'), "\u2192"). // →
|
||||
Set(byte('\xaf'), "\u2193"). // ↓
|
||||
Set(byte('\xb4'), "\u00D7"). // ×
|
||||
Set(byte('\xb8'), "\u00F7"). // ÷
|
||||
Set(byte('\xb9'), "\u2019"). // ’
|
||||
Set(byte('\xba'), "\u201D"). // ”
|
||||
Set(byte('\xbc'), "\u00BC"). // ¼
|
||||
Set(byte('\xbd'), "\u00BD"). // ½
|
||||
Set(byte('\xbe'), "\u00BE"). // ¾
|
||||
Set(byte('\xbf'), "\u00BF"). // ¿
|
||||
Set(byte('\xd0'), "\u2015"). // ―
|
||||
Set(byte('\xd1'), "\u00B9"). // ¹
|
||||
Set(byte('\xd2'), "\u00AE"). // ®
|
||||
Set(byte('\xd3'), "\u00A9"). // ©
|
||||
Set(byte('\xd4'), "\u2122"). // ™
|
||||
Set(byte('\xd5'), "\u266A"). // ♪
|
||||
Set(byte('\xd6'), "\u00AC"). // ¬
|
||||
Set(byte('\xd7'), "\u00A6"). // ¦
|
||||
Set(byte('\xdc'), "\u215B"). // ⅛
|
||||
Set(byte('\xdd'), "\u215C"). // ⅜
|
||||
Set(byte('\xde'), "\u215D"). // ⅝
|
||||
Set(byte('\xdf'), "\u215E"). // ⅞
|
||||
Set(byte('\xe0'), "\u2126"). // Ohm Ω
|
||||
Set(byte('\xe1'), "\u00C6"). // Æ
|
||||
Set(byte('\xe2'), "\u0110"). // Đ
|
||||
Set(byte('\xe3'), "\u00AA"). // ª
|
||||
Set(byte('\xe4'), "\u0126"). // Ħ
|
||||
Set(byte('\xe6'), "\u0132"). // IJ
|
||||
Set(byte('\xe7'), "\u013F"). // Ŀ
|
||||
Set(byte('\xe8'), "\u0141"). // Ł
|
||||
Set(byte('\xe9'), "\u00D8"). // Ø
|
||||
Set(byte('\xea'), "\u0152"). // Œ
|
||||
Set(byte('\xeb'), "\u00BA"). // º
|
||||
Set(byte('\xec'), "\u00DE"). // Þ
|
||||
Set(byte('\xed'), "\u0166"). // Ŧ
|
||||
Set(byte('\xee'), "\u014A"). // Ŋ
|
||||
Set(byte('\xef'), "\u0149"). // ʼn
|
||||
Set(byte('\xf0'), "\u0138"). // ĸ
|
||||
Set(byte('\xf1'), "\u00E6"). // æ
|
||||
Set(byte('\xf2'), "\u0111"). // đ
|
||||
Set(byte('\xf3'), "\u00F0"). // ð
|
||||
Set(byte('\xf4'), "\u0127"). // ħ
|
||||
Set(byte('\xf5'), "\u0131"). // ı
|
||||
Set(byte('\xf6'), "\u0133"). // ij
|
||||
Set(byte('\xf7'), "\u0140"). // ŀ
|
||||
Set(byte('\xf8'), "\u0142"). // ł
|
||||
Set(byte('\xf9'), "\u00F8"). // ø
|
||||
Set(byte('\xfa'), "\u0153"). // œ
|
||||
Set(byte('\xfb'), "\u00DF"). // ß
|
||||
Set(byte('\xfc'), "\u00FE"). // þ
|
||||
Set(byte('\xfd'), "\u0167"). // ŧ
|
||||
Set(byte('\xfe'), "\u014B"). // ŋ
|
||||
Set(byte('\xff'), "\u00AD") // Soft hyphen
|
||||
|
||||
// encodeTextSTL encodes the STL text
|
||||
func encodeTextSTL(i string) (o []byte) {
|
||||
i = string(norm.NFD.Bytes([]byte(i)))
|
||||
for _, c := range i {
|
||||
if stlUnicodeMapping.InB(string(c)) {
|
||||
o = append(o, stlUnicodeMapping.A(string(c)).(byte))
|
||||
} else if stlUnicodeDiacritic.InB(string(c)) {
|
||||
o = append(o[:len(o)-1], stlUnicodeDiacritic.A(string(c)).(byte), o[len(o)-1])
|
||||
} else {
|
||||
o = append(o, byte(c))
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
702
vendor/github.com/asticode/go-astisub/subtitles.go
generated
vendored
Normal file
702
vendor/github.com/asticode/go-astisub/subtitles.go
generated
vendored
Normal file
@@ -0,0 +1,702 @@
|
||||
package astisub
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// Bytes
|
||||
var (
|
||||
BytesBOM = []byte{239, 187, 191}
|
||||
bytesLineSeparator = []byte("\n")
|
||||
bytesSpace = []byte(" ")
|
||||
)
|
||||
|
||||
// Colors
|
||||
var (
|
||||
ColorBlack = &Color{}
|
||||
ColorBlue = &Color{Blue: 255}
|
||||
ColorCyan = &Color{Blue: 255, Green: 255}
|
||||
ColorGray = &Color{Blue: 128, Green: 128, Red: 128}
|
||||
ColorGreen = &Color{Green: 128}
|
||||
ColorLime = &Color{Green: 255}
|
||||
ColorMagenta = &Color{Blue: 255, Red: 255}
|
||||
ColorMaroon = &Color{Red: 128}
|
||||
ColorNavy = &Color{Blue: 128}
|
||||
ColorOlive = &Color{Green: 128, Red: 128}
|
||||
ColorPurple = &Color{Blue: 128, Red: 128}
|
||||
ColorRed = &Color{Red: 255}
|
||||
ColorSilver = &Color{Blue: 192, Green: 192, Red: 192}
|
||||
ColorTeal = &Color{Blue: 128, Green: 128}
|
||||
ColorYellow = &Color{Green: 255, Red: 255}
|
||||
ColorWhite = &Color{Blue: 255, Green: 255, Red: 255}
|
||||
)
|
||||
|
||||
// Errors
|
||||
var (
|
||||
ErrInvalidExtension = errors.New("astisub: invalid extension")
|
||||
ErrNoSubtitlesToWrite = errors.New("astisub: no subtitles to write")
|
||||
)
|
||||
|
||||
// Now allows testing functions using it
|
||||
var Now = func() time.Time {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
// Options represents open or write options
|
||||
type Options struct {
|
||||
Filename string
|
||||
Teletext TeletextOptions
|
||||
}
|
||||
|
||||
// Open opens a subtitle reader based on options
|
||||
func Open(o Options) (s *Subtitles, err error) {
|
||||
// Open the file
|
||||
var f *os.File
|
||||
if f, err = os.Open(o.Filename); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: opening %s failed", o.Filename)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Parse the content
|
||||
switch filepath.Ext(o.Filename) {
|
||||
case ".srt":
|
||||
s, err = ReadFromSRT(f)
|
||||
case ".ssa", ".ass":
|
||||
s, err = ReadFromSSA(f)
|
||||
case ".stl":
|
||||
s, err = ReadFromSTL(f)
|
||||
case ".ts":
|
||||
s, err = ReadFromTeletext(f, o.Teletext)
|
||||
case ".ttml":
|
||||
s, err = ReadFromTTML(f)
|
||||
case ".vtt":
|
||||
s, err = ReadFromWebVTT(f)
|
||||
default:
|
||||
err = ErrInvalidExtension
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// OpenFile opens a file regardless of other options
|
||||
func OpenFile(filename string) (*Subtitles, error) {
|
||||
return Open(Options{Filename: filename})
|
||||
}
|
||||
|
||||
// Subtitles represents an ordered list of items with formatting
|
||||
type Subtitles struct {
|
||||
Items []*Item
|
||||
Metadata *Metadata
|
||||
Regions map[string]*Region
|
||||
Styles map[string]*Style
|
||||
}
|
||||
|
||||
// NewSubtitles creates new subtitles
|
||||
func NewSubtitles() *Subtitles {
|
||||
return &Subtitles{
|
||||
Regions: make(map[string]*Region),
|
||||
Styles: make(map[string]*Style),
|
||||
}
|
||||
}
|
||||
|
||||
// Item represents a text to show between 2 time boundaries with formatting
|
||||
type Item struct {
|
||||
Comments []string
|
||||
EndAt time.Duration
|
||||
InlineStyle *StyleAttributes
|
||||
Lines []Line
|
||||
Region *Region
|
||||
StartAt time.Duration
|
||||
Style *Style
|
||||
}
|
||||
|
||||
// String implements the Stringer interface
|
||||
func (i Item) String() string {
|
||||
var os []string
|
||||
for _, l := range i.Lines {
|
||||
os = append(os, l.String())
|
||||
}
|
||||
return strings.Join(os, " - ")
|
||||
}
|
||||
|
||||
// Color represents a color
|
||||
type Color struct {
|
||||
Alpha, Blue, Green, Red uint8
|
||||
}
|
||||
|
||||
// newColorFromSSAString builds a new color based on an SSA string
|
||||
func newColorFromSSAString(s string, base int) (c *Color, err error) {
|
||||
var i int64
|
||||
if i, err = strconv.ParseInt(s, base, 64); err != nil {
|
||||
err = errors.Wrapf(err, "parsing int %s with base %d failed", s, base)
|
||||
return
|
||||
}
|
||||
c = &Color{
|
||||
Alpha: uint8(i>>24) & 0xff,
|
||||
Blue: uint8(i>>16) & 0xff,
|
||||
Green: uint8(i>>8) & 0xff,
|
||||
Red: uint8(i) & 0xff,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// SSAString expresses the color as an SSA string
|
||||
func (c *Color) SSAString() string {
|
||||
return fmt.Sprintf("%.8x", uint32(c.Alpha)<<24|uint32(c.Blue)<<16|uint32(c.Green)<<8|uint32(c.Red))
|
||||
}
|
||||
|
||||
// TTMLString expresses the color as a TTML string
|
||||
func (c *Color) TTMLString() string {
|
||||
return fmt.Sprintf("%.6x", uint32(c.Red)<<16|uint32(c.Green)<<8|uint32(c.Blue))
|
||||
}
|
||||
|
||||
// StyleAttributes represents style attributes
|
||||
type StyleAttributes struct {
|
||||
SSAAlignment *int
|
||||
SSAAlphaLevel *float64
|
||||
SSAAngle *float64 // degrees
|
||||
SSABackColour *Color
|
||||
SSABold *bool
|
||||
SSABorderStyle *int
|
||||
SSAEffect string
|
||||
SSAEncoding *int
|
||||
SSAFontName string
|
||||
SSAFontSize *float64
|
||||
SSAItalic *bool
|
||||
SSALayer *int
|
||||
SSAMarginLeft *int // pixels
|
||||
SSAMarginRight *int // pixels
|
||||
SSAMarginVertical *int // pixels
|
||||
SSAMarked *bool
|
||||
SSAOutline *int // pixels
|
||||
SSAOutlineColour *Color
|
||||
SSAPrimaryColour *Color
|
||||
SSAScaleX *float64 // %
|
||||
SSAScaleY *float64 // %
|
||||
SSASecondaryColour *Color
|
||||
SSAShadow *int // pixels
|
||||
SSASpacing *int // pixels
|
||||
SSAStrikeout *bool
|
||||
SSAUnderline *bool
|
||||
STLBoxing *bool
|
||||
STLItalics *bool
|
||||
STLUnderline *bool
|
||||
TeletextColor *Color
|
||||
TeletextDoubleHeight *bool
|
||||
TeletextDoubleSize *bool
|
||||
TeletextDoubleWidth *bool
|
||||
TeletextSpacesAfter *int
|
||||
TeletextSpacesBefore *int
|
||||
// TODO Use pointers with real types below
|
||||
TTMLBackgroundColor string // https://htmlcolorcodes.com/fr/
|
||||
TTMLColor string
|
||||
TTMLDirection string
|
||||
TTMLDisplay string
|
||||
TTMLDisplayAlign string
|
||||
TTMLExtent string
|
||||
TTMLFontFamily string
|
||||
TTMLFontSize string
|
||||
TTMLFontStyle string
|
||||
TTMLFontWeight string
|
||||
TTMLLineHeight string
|
||||
TTMLOpacity string
|
||||
TTMLOrigin string
|
||||
TTMLOverflow string
|
||||
TTMLPadding string
|
||||
TTMLShowBackground string
|
||||
TTMLTextAlign string
|
||||
TTMLTextDecoration string
|
||||
TTMLTextOutline string
|
||||
TTMLUnicodeBidi string
|
||||
TTMLVisibility string
|
||||
TTMLWrapOption string
|
||||
TTMLWritingMode string
|
||||
TTMLZIndex int
|
||||
WebVTTAlign string
|
||||
WebVTTLine string
|
||||
WebVTTLines int
|
||||
WebVTTPosition string
|
||||
WebVTTRegionAnchor string
|
||||
WebVTTScroll string
|
||||
WebVTTSize string
|
||||
WebVTTVertical string
|
||||
WebVTTViewportAnchor string
|
||||
WebVTTWidth string
|
||||
}
|
||||
|
||||
func (sa *StyleAttributes) propagateSSAAttributes() {}
|
||||
|
||||
func (sa *StyleAttributes) propagateSTLAttributes() {}
|
||||
|
||||
func (sa *StyleAttributes) propagateTeletextAttributes() {
|
||||
if sa.TeletextColor != nil {
|
||||
sa.TTMLColor = "#" + sa.TeletextColor.TTMLString()
|
||||
}
|
||||
}
|
||||
|
||||
func (sa *StyleAttributes) propagateTTMLAttributes() {}
|
||||
|
||||
func (sa *StyleAttributes) propagateWebVTTAttributes() {}
|
||||
|
||||
// Metadata represents metadata
|
||||
// TODO Merge attributes
|
||||
type Metadata struct {
|
||||
Comments []string
|
||||
Framerate int
|
||||
Language string
|
||||
SSACollisions string
|
||||
SSAOriginalEditing string
|
||||
SSAOriginalScript string
|
||||
SSAOriginalTiming string
|
||||
SSAOriginalTranslation string
|
||||
SSAPlayDepth *int
|
||||
SSAPlayResX, SSAPlayResY *int
|
||||
SSAScriptType string
|
||||
SSAScriptUpdatedBy string
|
||||
SSASynchPoint string
|
||||
SSATimer *float64
|
||||
SSAUpdateDetails string
|
||||
SSAWrapStyle string
|
||||
STLPublisher string
|
||||
Title string
|
||||
TTMLCopyright string
|
||||
}
|
||||
|
||||
// Region represents a subtitle's region
|
||||
type Region struct {
|
||||
ID string
|
||||
InlineStyle *StyleAttributes
|
||||
Style *Style
|
||||
}
|
||||
|
||||
// Style represents a subtitle's style
|
||||
type Style struct {
|
||||
ID string
|
||||
InlineStyle *StyleAttributes
|
||||
Style *Style
|
||||
}
|
||||
|
||||
// Line represents a set of formatted line items
|
||||
type Line struct {
|
||||
Items []LineItem
|
||||
VoiceName string
|
||||
}
|
||||
|
||||
// String implement the Stringer interface
|
||||
func (l Line) String() string {
|
||||
var texts []string
|
||||
for _, i := range l.Items {
|
||||
texts = append(texts, i.Text)
|
||||
}
|
||||
return strings.Join(texts, " ")
|
||||
}
|
||||
|
||||
// LineItem represents a formatted line item
|
||||
type LineItem struct {
|
||||
InlineStyle *StyleAttributes
|
||||
Style *Style
|
||||
Text string
|
||||
}
|
||||
|
||||
// Add adds a duration to each time boundaries. As in the time package, duration can be negative.
|
||||
func (s *Subtitles) Add(d time.Duration) {
|
||||
for idx := 0; idx < len(s.Items); idx++ {
|
||||
s.Items[idx].EndAt += d
|
||||
s.Items[idx].StartAt += d
|
||||
if s.Items[idx].EndAt <= 0 && s.Items[idx].StartAt <= 0 {
|
||||
s.Items = append(s.Items[:idx], s.Items[idx+1:]...)
|
||||
idx--
|
||||
} else if s.Items[idx].StartAt <= 0 {
|
||||
s.Items[idx].StartAt = time.Duration(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Duration returns the subtitles duration
|
||||
func (s Subtitles) Duration() time.Duration {
|
||||
if len(s.Items) == 0 {
|
||||
return time.Duration(0)
|
||||
}
|
||||
return s.Items[len(s.Items)-1].EndAt
|
||||
}
|
||||
|
||||
// ForceDuration updates the subtitles duration.
|
||||
// If requested duration is bigger, then we create a dummy item.
|
||||
// If requested duration is smaller, then we remove useless items and we cut the last item or add a dummy item.
|
||||
func (s *Subtitles) ForceDuration(d time.Duration, addDummyItem bool) {
|
||||
// Requested duration is the same as the subtitles'one
|
||||
if s.Duration() == d {
|
||||
return
|
||||
}
|
||||
|
||||
// Requested duration is bigger than subtitles'one
|
||||
if s.Duration() > d {
|
||||
// Find last item before input duration and update end at
|
||||
var lastIndex = -1
|
||||
for index, i := range s.Items {
|
||||
// Start at is bigger than input duration, we've found the last item
|
||||
if i.StartAt >= d {
|
||||
lastIndex = index
|
||||
break
|
||||
} else if i.EndAt > d {
|
||||
s.Items[index].EndAt = d
|
||||
}
|
||||
}
|
||||
|
||||
// Last index has been found
|
||||
if lastIndex != -1 {
|
||||
s.Items = s.Items[:lastIndex]
|
||||
}
|
||||
}
|
||||
|
||||
// Add dummy item with the minimum duration possible
|
||||
if addDummyItem && s.Duration() < d {
|
||||
s.Items = append(s.Items, &Item{EndAt: d, Lines: []Line{{Items: []LineItem{{Text: "..."}}}}, StartAt: d - time.Millisecond})
|
||||
}
|
||||
}
|
||||
|
||||
// Fragment fragments subtitles with a specific fragment duration
|
||||
func (s *Subtitles) Fragment(f time.Duration) {
|
||||
// Nothing to fragment
|
||||
if len(s.Items) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Here we want to simulate fragments of duration f until there are no subtitles left in that period of time
|
||||
var fragmentStartAt, fragmentEndAt = time.Duration(0), f
|
||||
for fragmentStartAt < s.Items[len(s.Items)-1].EndAt {
|
||||
// We loop through subtitles and process the ones that either contain the fragment start at,
|
||||
// or contain the fragment end at
|
||||
//
|
||||
// It's useless processing subtitles contained between fragment start at and end at
|
||||
// |____________________| <- subtitle
|
||||
// | |
|
||||
// fragment start at fragment end at
|
||||
for i, sub := range s.Items {
|
||||
// Init
|
||||
var newSub = &Item{}
|
||||
*newSub = *sub
|
||||
|
||||
// A switch is more readable here
|
||||
switch {
|
||||
// Subtitle contains fragment start at
|
||||
// |____________________| <- subtitle
|
||||
// | |
|
||||
// fragment start at fragment end at
|
||||
case sub.StartAt < fragmentStartAt && sub.EndAt > fragmentStartAt:
|
||||
sub.StartAt = fragmentStartAt
|
||||
newSub.EndAt = fragmentStartAt
|
||||
// Subtitle contains fragment end at
|
||||
// |____________________| <- subtitle
|
||||
// | |
|
||||
// fragment start at fragment end at
|
||||
case sub.StartAt < fragmentEndAt && sub.EndAt > fragmentEndAt:
|
||||
sub.StartAt = fragmentEndAt
|
||||
newSub.EndAt = fragmentEndAt
|
||||
default:
|
||||
continue
|
||||
}
|
||||
|
||||
// Insert new sub
|
||||
s.Items = append(s.Items[:i], append([]*Item{newSub}, s.Items[i:]...)...)
|
||||
}
|
||||
|
||||
// Update fragments boundaries
|
||||
fragmentStartAt += f
|
||||
fragmentEndAt += f
|
||||
}
|
||||
|
||||
// Order
|
||||
s.Order()
|
||||
}
|
||||
|
||||
// IsEmpty returns whether the subtitles are empty
|
||||
func (s Subtitles) IsEmpty() bool {
|
||||
return len(s.Items) == 0
|
||||
}
|
||||
|
||||
// Merge merges subtitles i into subtitles
|
||||
func (s *Subtitles) Merge(i *Subtitles) {
|
||||
// Append items
|
||||
s.Items = append(s.Items, i.Items...)
|
||||
s.Order()
|
||||
|
||||
// Add regions
|
||||
for _, region := range i.Regions {
|
||||
if _, ok := s.Regions[region.ID]; !ok {
|
||||
s.Regions[region.ID] = region
|
||||
}
|
||||
}
|
||||
|
||||
// Add styles
|
||||
for _, style := range i.Styles {
|
||||
if _, ok := s.Styles[style.ID]; !ok {
|
||||
s.Styles[style.ID] = style
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Optimize optimizes subtitles
|
||||
func (s *Subtitles) Optimize() {
|
||||
// Nothing to optimize
|
||||
if len(s.Items) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Remove unused regions and style
|
||||
s.removeUnusedRegionsAndStyles()
|
||||
}
|
||||
|
||||
// removeUnusedRegionsAndStyles removes unused regions and styles
|
||||
func (s *Subtitles) removeUnusedRegionsAndStyles() {
|
||||
// Loop through items
|
||||
var usedRegions, usedStyles = make(map[string]bool), make(map[string]bool)
|
||||
for _, item := range s.Items {
|
||||
// Add region
|
||||
if item.Region != nil {
|
||||
usedRegions[item.Region.ID] = true
|
||||
}
|
||||
|
||||
// Add style
|
||||
if item.Style != nil {
|
||||
usedStyles[item.Style.ID] = true
|
||||
}
|
||||
|
||||
// Loop through lines
|
||||
for _, line := range item.Lines {
|
||||
// Loop through line items
|
||||
for _, lineItem := range line.Items {
|
||||
// Add style
|
||||
if lineItem.Style != nil {
|
||||
usedStyles[lineItem.Style.ID] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Loop through regions
|
||||
for id, region := range s.Regions {
|
||||
if _, ok := usedRegions[region.ID]; ok {
|
||||
if region.Style != nil {
|
||||
usedStyles[region.Style.ID] = true
|
||||
}
|
||||
} else {
|
||||
delete(s.Regions, id)
|
||||
}
|
||||
}
|
||||
|
||||
// Loop through style
|
||||
for id, style := range s.Styles {
|
||||
if _, ok := usedStyles[style.ID]; !ok {
|
||||
delete(s.Styles, id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Order orders items
|
||||
func (s *Subtitles) Order() {
|
||||
// Nothing to do if less than 1 element
|
||||
if len(s.Items) <= 1 {
|
||||
return
|
||||
}
|
||||
|
||||
// Order
|
||||
var swapped = true
|
||||
for swapped {
|
||||
swapped = false
|
||||
for index := 1; index < len(s.Items); index++ {
|
||||
if s.Items[index-1].StartAt > s.Items[index].StartAt {
|
||||
var tmp = s.Items[index-1]
|
||||
s.Items[index-1] = s.Items[index]
|
||||
s.Items[index] = tmp
|
||||
swapped = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RemoveStyling removes the styling from the subtitles
|
||||
func (s *Subtitles) RemoveStyling() {
|
||||
s.Regions = map[string]*Region{}
|
||||
s.Styles = map[string]*Style{}
|
||||
for _, i := range s.Items {
|
||||
i.Region = nil
|
||||
i.Style = nil
|
||||
i.InlineStyle = nil
|
||||
for idxLine, l := range i.Lines {
|
||||
for idxLineItem := range l.Items {
|
||||
i.Lines[idxLine].Items[idxLineItem].InlineStyle = nil
|
||||
i.Lines[idxLine].Items[idxLineItem].Style = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Unfragment unfragments subtitles
|
||||
func (s *Subtitles) Unfragment() {
|
||||
// Nothing to do if less than 1 element
|
||||
if len(s.Items) <= 1 {
|
||||
return
|
||||
}
|
||||
|
||||
// Loop through items
|
||||
for i := 0; i < len(s.Items)-1; i++ {
|
||||
for j := i + 1; j < len(s.Items); j++ {
|
||||
// Items are the same
|
||||
if s.Items[i].String() == s.Items[j].String() && s.Items[i].EndAt == s.Items[j].StartAt {
|
||||
s.Items[i].EndAt = s.Items[j].EndAt
|
||||
s.Items = append(s.Items[:j], s.Items[j+1:]...)
|
||||
j--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Order
|
||||
s.Order()
|
||||
}
|
||||
|
||||
// Write writes subtitles to a file
|
||||
func (s Subtitles) Write(dst string) (err error) {
|
||||
// Create the file
|
||||
var f *os.File
|
||||
if f, err = os.Create(dst); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: creating %s failed", dst)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Write the content
|
||||
switch filepath.Ext(dst) {
|
||||
case ".srt":
|
||||
err = s.WriteToSRT(f)
|
||||
case ".ssa", ".ass":
|
||||
err = s.WriteToSSA(f)
|
||||
case ".stl":
|
||||
err = s.WriteToSTL(f)
|
||||
case ".ttml":
|
||||
err = s.WriteToTTML(f)
|
||||
case ".vtt":
|
||||
err = s.WriteToWebVTT(f)
|
||||
default:
|
||||
err = ErrInvalidExtension
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseDuration parses a duration in "00:00:00.000", "00:00:00,000" or "0:00:00:00" format
|
||||
func parseDuration(i, millisecondSep string, numberOfMillisecondDigits int) (o time.Duration, err error) {
|
||||
// Split milliseconds
|
||||
var parts = strings.Split(i, millisecondSep)
|
||||
var milliseconds int
|
||||
var s string
|
||||
if len(parts) >= 2 {
|
||||
// Invalid number of millisecond digits
|
||||
s = strings.TrimSpace(parts[len(parts)-1])
|
||||
if len(s) > 3 {
|
||||
err = fmt.Errorf("astisub: Invalid number of millisecond digits detected in %s", i)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse milliseconds
|
||||
if milliseconds, err = strconv.Atoi(s); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", s)
|
||||
return
|
||||
}
|
||||
milliseconds *= int(math.Pow10(numberOfMillisecondDigits - len(s)))
|
||||
s = strings.Join(parts[:len(parts)-1], millisecondSep)
|
||||
} else {
|
||||
s = i
|
||||
}
|
||||
|
||||
// Split hours, minutes and seconds
|
||||
parts = strings.Split(strings.TrimSpace(s), ":")
|
||||
var partSeconds, partMinutes, partHours string
|
||||
if len(parts) == 2 {
|
||||
partSeconds = parts[1]
|
||||
partMinutes = parts[0]
|
||||
} else if len(parts) == 3 {
|
||||
partSeconds = parts[2]
|
||||
partMinutes = parts[1]
|
||||
partHours = parts[0]
|
||||
} else {
|
||||
err = fmt.Errorf("astisub: No hours, minutes or seconds detected in %s", i)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse seconds
|
||||
var seconds int
|
||||
s = strings.TrimSpace(partSeconds)
|
||||
if seconds, err = strconv.Atoi(s); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", s)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse minutes
|
||||
var minutes int
|
||||
s = strings.TrimSpace(partMinutes)
|
||||
if minutes, err = strconv.Atoi(s); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", s)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse hours
|
||||
var hours int
|
||||
if len(partHours) > 0 {
|
||||
s = strings.TrimSpace(partHours)
|
||||
if hours, err = strconv.Atoi(s); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi of %s failed", s)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Generate output
|
||||
o = time.Duration(milliseconds)*time.Millisecond + time.Duration(seconds)*time.Second + time.Duration(minutes)*time.Minute + time.Duration(hours)*time.Hour
|
||||
return
|
||||
}
|
||||
|
||||
// formatDuration formats a duration
|
||||
func formatDuration(i time.Duration, millisecondSep string, numberOfMillisecondDigits int) (s string) {
|
||||
// Parse hours
|
||||
var hours = int(i / time.Hour)
|
||||
var n = i % time.Hour
|
||||
if hours < 10 {
|
||||
s += "0"
|
||||
}
|
||||
s += strconv.Itoa(hours) + ":"
|
||||
|
||||
// Parse minutes
|
||||
var minutes = int(n / time.Minute)
|
||||
n = i % time.Minute
|
||||
if minutes < 10 {
|
||||
s += "0"
|
||||
}
|
||||
s += strconv.Itoa(minutes) + ":"
|
||||
|
||||
// Parse seconds
|
||||
var seconds = int(n / time.Second)
|
||||
n = i % time.Second
|
||||
if seconds < 10 {
|
||||
s += "0"
|
||||
}
|
||||
s += strconv.Itoa(seconds) + millisecondSep
|
||||
|
||||
// Parse milliseconds
|
||||
var milliseconds = float64(n/time.Millisecond) / float64(1000)
|
||||
s += fmt.Sprintf("%."+strconv.Itoa(numberOfMillisecondDigits)+"f", milliseconds)[2:]
|
||||
return
|
||||
}
|
||||
|
||||
// appendStringToBytesWithNewLine adds a string to bytes then adds a new line
|
||||
func appendStringToBytesWithNewLine(i []byte, s string) (o []byte) {
|
||||
o = append(i, []byte(s)...)
|
||||
o = append(o, bytesLineSeparator...)
|
||||
return
|
||||
}
|
1004
vendor/github.com/asticode/go-astisub/teletext.go
generated
vendored
Normal file
1004
vendor/github.com/asticode/go-astisub/teletext.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
673
vendor/github.com/asticode/go-astisub/ttml.go
generated
vendored
Normal file
673
vendor/github.com/asticode/go-astisub/ttml.go
generated
vendored
Normal file
@@ -0,0 +1,673 @@
|
||||
package astisub
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"sort"
|
||||
|
||||
"github.com/asticode/go-astitools/map"
|
||||
"github.com/asticode/go-astitools/string"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// https://www.w3.org/TR/ttaf1-dfxp/
|
||||
// http://www.skynav.com:8080/ttv/check
|
||||
// https://www.speechpad.com/captions/ttml
|
||||
|
||||
// TTML languages
|
||||
const (
|
||||
ttmlLanguageEnglish = "en"
|
||||
ttmlLanguageFrench = "fr"
|
||||
)
|
||||
|
||||
// TTML language mapping
|
||||
var ttmlLanguageMapping = astimap.NewMap(ttmlLanguageEnglish, LanguageEnglish).
|
||||
Set(ttmlLanguageFrench, LanguageFrench)
|
||||
|
||||
// TTML Clock Time Frames and Offset Time
|
||||
var (
|
||||
ttmlRegexpClockTimeFrames = regexp.MustCompile("\\:[\\d]+$")
|
||||
ttmlRegexpOffsetTime = regexp.MustCompile("^(\\d+)(\\.(\\d+))?(h|m|s|ms|f|t)$")
|
||||
)
|
||||
|
||||
// TTMLIn represents an input TTML that must be unmarshaled
|
||||
// We split it from the output TTML as we can't add strict namespace without breaking retrocompatibility
|
||||
type TTMLIn struct {
|
||||
Framerate int `xml:"frameRate,attr"`
|
||||
Lang string `xml:"lang,attr"`
|
||||
Metadata TTMLInMetadata `xml:"head>metadata"`
|
||||
Regions []TTMLInRegion `xml:"head>layout>region"`
|
||||
Styles []TTMLInStyle `xml:"head>styling>style"`
|
||||
Subtitles []TTMLInSubtitle `xml:"body>div>p"`
|
||||
XMLName xml.Name `xml:"tt"`
|
||||
}
|
||||
|
||||
// metadata returns the Metadata of the TTML
|
||||
func (t TTMLIn) metadata() *Metadata {
|
||||
return &Metadata{
|
||||
Framerate: t.Framerate,
|
||||
Language: ttmlLanguageMapping.B(astistring.ToLength(t.Lang, " ", 2)).(string),
|
||||
Title: t.Metadata.Title,
|
||||
TTMLCopyright: t.Metadata.Copyright,
|
||||
}
|
||||
}
|
||||
|
||||
// TTMLInMetadata represents an input TTML Metadata
|
||||
type TTMLInMetadata struct {
|
||||
Copyright string `xml:"copyright"`
|
||||
Title string `xml:"title"`
|
||||
}
|
||||
|
||||
// TTMLInStyleAttributes represents input TTML style attributes
|
||||
type TTMLInStyleAttributes struct {
|
||||
BackgroundColor string `xml:"backgroundColor,attr,omitempty"`
|
||||
Color string `xml:"color,attr,omitempty"`
|
||||
Direction string `xml:"direction,attr,omitempty"`
|
||||
Display string `xml:"display,attr,omitempty"`
|
||||
DisplayAlign string `xml:"displayAlign,attr,omitempty"`
|
||||
Extent string `xml:"extent,attr,omitempty"`
|
||||
FontFamily string `xml:"fontFamily,attr,omitempty"`
|
||||
FontSize string `xml:"fontSize,attr,omitempty"`
|
||||
FontStyle string `xml:"fontStyle,attr,omitempty"`
|
||||
FontWeight string `xml:"fontWeight,attr,omitempty"`
|
||||
LineHeight string `xml:"lineHeight,attr,omitempty"`
|
||||
Opacity string `xml:"opacity,attr,omitempty"`
|
||||
Origin string `xml:"origin,attr,omitempty"`
|
||||
Overflow string `xml:"overflow,attr,omitempty"`
|
||||
Padding string `xml:"padding,attr,omitempty"`
|
||||
ShowBackground string `xml:"showBackground,attr,omitempty"`
|
||||
TextAlign string `xml:"textAlign,attr,omitempty"`
|
||||
TextDecoration string `xml:"textDecoration,attr,omitempty"`
|
||||
TextOutline string `xml:"textOutline,attr,omitempty"`
|
||||
UnicodeBidi string `xml:"unicodeBidi,attr,omitempty"`
|
||||
Visibility string `xml:"visibility,attr,omitempty"`
|
||||
WrapOption string `xml:"wrapOption,attr,omitempty"`
|
||||
WritingMode string `xml:"writingMode,attr,omitempty"`
|
||||
ZIndex int `xml:"zIndex,attr,omitempty"`
|
||||
}
|
||||
|
||||
// StyleAttributes converts TTMLInStyleAttributes into a StyleAttributes
|
||||
func (s TTMLInStyleAttributes) styleAttributes() (o *StyleAttributes) {
|
||||
o = &StyleAttributes{
|
||||
TTMLBackgroundColor: s.BackgroundColor,
|
||||
TTMLColor: s.Color,
|
||||
TTMLDirection: s.Direction,
|
||||
TTMLDisplay: s.Display,
|
||||
TTMLDisplayAlign: s.DisplayAlign,
|
||||
TTMLExtent: s.Extent,
|
||||
TTMLFontFamily: s.FontFamily,
|
||||
TTMLFontSize: s.FontSize,
|
||||
TTMLFontStyle: s.FontStyle,
|
||||
TTMLFontWeight: s.FontWeight,
|
||||
TTMLLineHeight: s.LineHeight,
|
||||
TTMLOpacity: s.Opacity,
|
||||
TTMLOrigin: s.Origin,
|
||||
TTMLOverflow: s.Overflow,
|
||||
TTMLPadding: s.Padding,
|
||||
TTMLShowBackground: s.ShowBackground,
|
||||
TTMLTextAlign: s.TextAlign,
|
||||
TTMLTextDecoration: s.TextDecoration,
|
||||
TTMLTextOutline: s.TextOutline,
|
||||
TTMLUnicodeBidi: s.UnicodeBidi,
|
||||
TTMLVisibility: s.Visibility,
|
||||
TTMLWrapOption: s.WrapOption,
|
||||
TTMLWritingMode: s.WritingMode,
|
||||
TTMLZIndex: s.ZIndex,
|
||||
}
|
||||
o.propagateTTMLAttributes()
|
||||
return
|
||||
}
|
||||
|
||||
// TTMLInHeader represents an input TTML header
|
||||
type TTMLInHeader struct {
|
||||
ID string `xml:"id,attr,omitempty"`
|
||||
Style string `xml:"style,attr,omitempty"`
|
||||
TTMLInStyleAttributes
|
||||
}
|
||||
|
||||
// TTMLInRegion represents an input TTML region
|
||||
type TTMLInRegion struct {
|
||||
TTMLInHeader
|
||||
XMLName xml.Name `xml:"region"`
|
||||
}
|
||||
|
||||
// TTMLInStyle represents an input TTML style
|
||||
type TTMLInStyle struct {
|
||||
TTMLInHeader
|
||||
XMLName xml.Name `xml:"style"`
|
||||
}
|
||||
|
||||
// TTMLInSubtitle represents an input TTML subtitle
|
||||
type TTMLInSubtitle struct {
|
||||
Begin *TTMLInDuration `xml:"begin,attr,omitempty"`
|
||||
End *TTMLInDuration `xml:"end,attr,omitempty"`
|
||||
ID string `xml:"id,attr,omitempty"`
|
||||
Items string `xml:",innerxml"` // We must store inner XML here since there's no tag to describe both any tag and chardata
|
||||
Region string `xml:"region,attr,omitempty"`
|
||||
Style string `xml:"style,attr,omitempty"`
|
||||
TTMLInStyleAttributes
|
||||
}
|
||||
|
||||
// TTMLInItems represents input TTML items
|
||||
type TTMLInItems []TTMLInItem
|
||||
|
||||
// UnmarshalXML implements the XML unmarshaler interface
|
||||
func (i *TTMLInItems) UnmarshalXML(d *xml.Decoder, start xml.StartElement) (err error) {
|
||||
// Get next tokens
|
||||
var t xml.Token
|
||||
for {
|
||||
// Get next token
|
||||
if t, err = d.Token(); err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
err = errors.Wrap(err, "astisub: getting next token failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Start element
|
||||
if se, ok := t.(xml.StartElement); ok {
|
||||
var e = TTMLInItem{}
|
||||
if err = d.DecodeElement(&e, &se); err != nil {
|
||||
err = errors.Wrap(err, "astisub: decoding xml.StartElement failed")
|
||||
return
|
||||
}
|
||||
*i = append(*i, e)
|
||||
} else if b, ok := t.(xml.CharData); ok {
|
||||
var str = strings.TrimSpace(string(b))
|
||||
if len(str) > 0 {
|
||||
*i = append(*i, TTMLInItem{Text: str})
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TTMLInItem represents an input TTML item
|
||||
type TTMLInItem struct {
|
||||
Style string `xml:"style,attr,omitempty"`
|
||||
Text string `xml:",chardata"`
|
||||
TTMLInStyleAttributes
|
||||
XMLName xml.Name
|
||||
}
|
||||
|
||||
// TTMLInDuration represents an input TTML duration
|
||||
type TTMLInDuration struct {
|
||||
d time.Duration
|
||||
frames, framerate int // Framerate is in frame/s
|
||||
}
|
||||
|
||||
// UnmarshalText implements the TextUnmarshaler interface
|
||||
// Possible formats are:
|
||||
// - hh:mm:ss.mmm
|
||||
// - hh:mm:ss:fff (fff being frames)
|
||||
func (d *TTMLInDuration) UnmarshalText(i []byte) (err error) {
|
||||
var text = string(i)
|
||||
if matches := ttmlRegexpOffsetTime.FindStringSubmatch(text); matches != nil {
|
||||
metric := matches[4]
|
||||
value, err := strconv.Atoi(matches[1])
|
||||
|
||||
if err != nil {
|
||||
err = errors.Wrapf(err, "astisub: failed to parse value %s", matches[1])
|
||||
return err
|
||||
}
|
||||
|
||||
d.d = time.Duration(0)
|
||||
|
||||
var (
|
||||
nsBase int64
|
||||
fraction int
|
||||
fractionBase float64
|
||||
)
|
||||
|
||||
if len(matches[3]) > 0 {
|
||||
fraction, err = strconv.Atoi(matches[3])
|
||||
fractionBase = math.Pow10(len(matches[3]))
|
||||
|
||||
if err != nil {
|
||||
err = errors.Wrapf(err, "astisub: failed to parse fraction %s", matches[3])
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
switch metric {
|
||||
case "h":
|
||||
nsBase = time.Hour.Nanoseconds()
|
||||
case "m":
|
||||
nsBase = time.Minute.Nanoseconds()
|
||||
case "s":
|
||||
nsBase = time.Second.Nanoseconds()
|
||||
case "ms":
|
||||
nsBase = time.Millisecond.Nanoseconds()
|
||||
case "f":
|
||||
nsBase = time.Second.Nanoseconds()
|
||||
d.frames = value % d.framerate
|
||||
value = value / d.framerate
|
||||
// TODO: fraction of frames
|
||||
case "t":
|
||||
// TODO: implement ticks
|
||||
return errors.New("astisub: offset time in ticks not implemented")
|
||||
}
|
||||
|
||||
d.d += time.Duration(nsBase * int64(value))
|
||||
|
||||
if fractionBase > 0 {
|
||||
d.d += time.Duration(nsBase * int64(fraction) / int64(fractionBase))
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
if indexes := ttmlRegexpClockTimeFrames.FindStringIndex(text); indexes != nil {
|
||||
// Parse frames
|
||||
var s = text[indexes[0]+1 : indexes[1]]
|
||||
if d.frames, err = strconv.Atoi(s); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: atoi %s failed", s)
|
||||
return
|
||||
}
|
||||
|
||||
// Update text
|
||||
text = text[:indexes[0]] + ".000"
|
||||
}
|
||||
|
||||
d.d, err = parseDuration(text, ".", 3)
|
||||
return
|
||||
}
|
||||
|
||||
// duration returns the input TTML Duration's time.Duration
|
||||
func (d TTMLInDuration) duration() time.Duration {
|
||||
if d.framerate > 0 {
|
||||
return d.d + time.Duration(float64(d.frames)/float64(d.framerate)*1e9)*time.Nanosecond
|
||||
}
|
||||
return d.d
|
||||
}
|
||||
|
||||
// ReadFromTTML parses a .ttml content
|
||||
func ReadFromTTML(i io.Reader) (o *Subtitles, err error) {
|
||||
// Init
|
||||
o = NewSubtitles()
|
||||
|
||||
// Unmarshal XML
|
||||
var ttml TTMLIn
|
||||
if err = xml.NewDecoder(i).Decode(&ttml); err != nil {
|
||||
err = errors.Wrap(err, "astisub: xml decoding failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Add metadata
|
||||
o.Metadata = ttml.metadata()
|
||||
|
||||
// Loop through styles
|
||||
var parentStyles = make(map[string]*Style)
|
||||
for _, ts := range ttml.Styles {
|
||||
var s = &Style{
|
||||
ID: ts.ID,
|
||||
InlineStyle: ts.TTMLInStyleAttributes.styleAttributes(),
|
||||
}
|
||||
o.Styles[s.ID] = s
|
||||
if len(ts.Style) > 0 {
|
||||
parentStyles[ts.Style] = s
|
||||
}
|
||||
}
|
||||
|
||||
// Take care of parent styles
|
||||
for id, s := range parentStyles {
|
||||
if _, ok := o.Styles[id]; !ok {
|
||||
err = fmt.Errorf("astisub: Style %s requested by style %s doesn't exist", id, s.ID)
|
||||
return
|
||||
}
|
||||
s.Style = o.Styles[id]
|
||||
}
|
||||
|
||||
// Loop through regions
|
||||
for _, tr := range ttml.Regions {
|
||||
var r = &Region{
|
||||
ID: tr.ID,
|
||||
InlineStyle: tr.TTMLInStyleAttributes.styleAttributes(),
|
||||
}
|
||||
if len(tr.Style) > 0 {
|
||||
if _, ok := o.Styles[tr.Style]; !ok {
|
||||
err = fmt.Errorf("astisub: Style %s requested by region %s doesn't exist", tr.Style, r.ID)
|
||||
return
|
||||
}
|
||||
r.Style = o.Styles[tr.Style]
|
||||
}
|
||||
o.Regions[r.ID] = r
|
||||
}
|
||||
|
||||
// Loop through subtitles
|
||||
for _, ts := range ttml.Subtitles {
|
||||
// Init item
|
||||
ts.Begin.framerate = ttml.Framerate
|
||||
ts.End.framerate = ttml.Framerate
|
||||
var s = &Item{
|
||||
EndAt: ts.End.duration(),
|
||||
InlineStyle: ts.TTMLInStyleAttributes.styleAttributes(),
|
||||
StartAt: ts.Begin.duration(),
|
||||
}
|
||||
|
||||
// Add region
|
||||
if len(ts.Region) > 0 {
|
||||
if _, ok := o.Regions[ts.Region]; !ok {
|
||||
err = fmt.Errorf("astisub: Region %s requested by subtitle between %s and %s doesn't exist", ts.Region, s.StartAt, s.EndAt)
|
||||
return
|
||||
}
|
||||
s.Region = o.Regions[ts.Region]
|
||||
}
|
||||
|
||||
// Add style
|
||||
if len(ts.Style) > 0 {
|
||||
if _, ok := o.Styles[ts.Style]; !ok {
|
||||
err = fmt.Errorf("astisub: Style %s requested by subtitle between %s and %s doesn't exist", ts.Style, s.StartAt, s.EndAt)
|
||||
return
|
||||
}
|
||||
s.Style = o.Styles[ts.Style]
|
||||
}
|
||||
|
||||
// Unmarshal items
|
||||
var items = TTMLInItems{}
|
||||
if err = xml.Unmarshal([]byte("<span>"+ts.Items+"</span>"), &items); err != nil {
|
||||
err = errors.Wrap(err, "astisub: unmarshaling items failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Loop through texts
|
||||
var l = &Line{}
|
||||
for _, tt := range items {
|
||||
// New line specified with the "br" tag
|
||||
if strings.ToLower(tt.XMLName.Local) == "br" {
|
||||
s.Lines = append(s.Lines, *l)
|
||||
l = &Line{}
|
||||
continue
|
||||
}
|
||||
|
||||
// New line decoded as a line break. This can happen if there's a "br" tag within the text since
|
||||
// since the go xml unmarshaler will unmarshal a "br" tag as a line break if the field has the
|
||||
// chardata xml tag.
|
||||
for idx, li := range strings.Split(tt.Text, "\n") {
|
||||
// New line
|
||||
if idx > 0 {
|
||||
s.Lines = append(s.Lines, *l)
|
||||
l = &Line{}
|
||||
}
|
||||
|
||||
// Init line item
|
||||
var t = LineItem{
|
||||
InlineStyle: tt.TTMLInStyleAttributes.styleAttributes(),
|
||||
Text: strings.TrimSpace(li),
|
||||
}
|
||||
|
||||
// Add style
|
||||
if len(tt.Style) > 0 {
|
||||
if _, ok := o.Styles[tt.Style]; !ok {
|
||||
err = fmt.Errorf("astisub: Style %s requested by item with text %s doesn't exist", tt.Style, tt.Text)
|
||||
return
|
||||
}
|
||||
t.Style = o.Styles[tt.Style]
|
||||
}
|
||||
|
||||
// Append items
|
||||
l.Items = append(l.Items, t)
|
||||
}
|
||||
|
||||
}
|
||||
s.Lines = append(s.Lines, *l)
|
||||
|
||||
// Append subtitle
|
||||
o.Items = append(o.Items, s)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// TTMLOut represents an output TTML that must be marshaled
|
||||
// We split it from the input TTML as this time we'll add strict namespaces
|
||||
type TTMLOut struct {
|
||||
Lang string `xml:"xml:lang,attr,omitempty"`
|
||||
Metadata *TTMLOutMetadata `xml:"head>metadata,omitempty"`
|
||||
Styles []TTMLOutStyle `xml:"head>styling>style,omitempty"` //!\\ Order is important! Keep Styling above Layout
|
||||
Regions []TTMLOutRegion `xml:"head>layout>region,omitempty"`
|
||||
Subtitles []TTMLOutSubtitle `xml:"body>div>p,omitempty"`
|
||||
XMLName xml.Name `xml:"http://www.w3.org/ns/ttml tt"`
|
||||
XMLNamespaceTTM string `xml:"xmlns:ttm,attr"`
|
||||
XMLNamespaceTTS string `xml:"xmlns:tts,attr"`
|
||||
}
|
||||
|
||||
// TTMLOutMetadata represents an output TTML Metadata
|
||||
type TTMLOutMetadata struct {
|
||||
Copyright string `xml:"ttm:copyright,omitempty"`
|
||||
Title string `xml:"ttm:title,omitempty"`
|
||||
}
|
||||
|
||||
// TTMLOutStyleAttributes represents output TTML style attributes
|
||||
type TTMLOutStyleAttributes struct {
|
||||
BackgroundColor string `xml:"tts:backgroundColor,attr,omitempty"`
|
||||
Color string `xml:"tts:color,attr,omitempty"`
|
||||
Direction string `xml:"tts:direction,attr,omitempty"`
|
||||
Display string `xml:"tts:display,attr,omitempty"`
|
||||
DisplayAlign string `xml:"tts:displayAlign,attr,omitempty"`
|
||||
Extent string `xml:"tts:extent,attr,omitempty"`
|
||||
FontFamily string `xml:"tts:fontFamily,attr,omitempty"`
|
||||
FontSize string `xml:"tts:fontSize,attr,omitempty"`
|
||||
FontStyle string `xml:"tts:fontStyle,attr,omitempty"`
|
||||
FontWeight string `xml:"tts:fontWeight,attr,omitempty"`
|
||||
LineHeight string `xml:"tts:lineHeight,attr,omitempty"`
|
||||
Opacity string `xml:"tts:opacity,attr,omitempty"`
|
||||
Origin string `xml:"tts:origin,attr,omitempty"`
|
||||
Overflow string `xml:"tts:overflow,attr,omitempty"`
|
||||
Padding string `xml:"tts:padding,attr,omitempty"`
|
||||
ShowBackground string `xml:"tts:showBackground,attr,omitempty"`
|
||||
TextAlign string `xml:"tts:textAlign,attr,omitempty"`
|
||||
TextDecoration string `xml:"tts:textDecoration,attr,omitempty"`
|
||||
TextOutline string `xml:"tts:textOutline,attr,omitempty"`
|
||||
UnicodeBidi string `xml:"tts:unicodeBidi,attr,omitempty"`
|
||||
Visibility string `xml:"tts:visibility,attr,omitempty"`
|
||||
WrapOption string `xml:"tts:wrapOption,attr,omitempty"`
|
||||
WritingMode string `xml:"tts:writingMode,attr,omitempty"`
|
||||
ZIndex int `xml:"tts:zIndex,attr,omitempty"`
|
||||
}
|
||||
|
||||
// ttmlOutStyleAttributesFromStyleAttributes converts StyleAttributes into a TTMLOutStyleAttributes
|
||||
func ttmlOutStyleAttributesFromStyleAttributes(s *StyleAttributes) TTMLOutStyleAttributes {
|
||||
if s == nil {
|
||||
return TTMLOutStyleAttributes{}
|
||||
}
|
||||
return TTMLOutStyleAttributes{
|
||||
BackgroundColor: s.TTMLBackgroundColor,
|
||||
Color: s.TTMLColor,
|
||||
Direction: s.TTMLDirection,
|
||||
Display: s.TTMLDisplay,
|
||||
DisplayAlign: s.TTMLDisplayAlign,
|
||||
Extent: s.TTMLExtent,
|
||||
FontFamily: s.TTMLFontFamily,
|
||||
FontSize: s.TTMLFontSize,
|
||||
FontStyle: s.TTMLFontStyle,
|
||||
FontWeight: s.TTMLFontWeight,
|
||||
LineHeight: s.TTMLLineHeight,
|
||||
Opacity: s.TTMLOpacity,
|
||||
Origin: s.TTMLOrigin,
|
||||
Overflow: s.TTMLOverflow,
|
||||
Padding: s.TTMLPadding,
|
||||
ShowBackground: s.TTMLShowBackground,
|
||||
TextAlign: s.TTMLTextAlign,
|
||||
TextDecoration: s.TTMLTextDecoration,
|
||||
TextOutline: s.TTMLTextOutline,
|
||||
UnicodeBidi: s.TTMLUnicodeBidi,
|
||||
Visibility: s.TTMLVisibility,
|
||||
WrapOption: s.TTMLWrapOption,
|
||||
WritingMode: s.TTMLWritingMode,
|
||||
ZIndex: s.TTMLZIndex,
|
||||
}
|
||||
}
|
||||
|
||||
// TTMLOutHeader represents an output TTML header
|
||||
type TTMLOutHeader struct {
|
||||
ID string `xml:"xml:id,attr,omitempty"`
|
||||
Style string `xml:"style,attr,omitempty"`
|
||||
TTMLOutStyleAttributes
|
||||
}
|
||||
|
||||
// TTMLOutRegion represents an output TTML region
|
||||
type TTMLOutRegion struct {
|
||||
TTMLOutHeader
|
||||
XMLName xml.Name `xml:"region"`
|
||||
}
|
||||
|
||||
// TTMLOutStyle represents an output TTML style
|
||||
type TTMLOutStyle struct {
|
||||
TTMLOutHeader
|
||||
XMLName xml.Name `xml:"style"`
|
||||
}
|
||||
|
||||
// TTMLOutSubtitle represents an output TTML subtitle
|
||||
type TTMLOutSubtitle struct {
|
||||
Begin TTMLOutDuration `xml:"begin,attr"`
|
||||
End TTMLOutDuration `xml:"end,attr"`
|
||||
ID string `xml:"id,attr,omitempty"`
|
||||
Items []TTMLOutItem
|
||||
Region string `xml:"region,attr,omitempty"`
|
||||
Style string `xml:"style,attr,omitempty"`
|
||||
TTMLOutStyleAttributes
|
||||
}
|
||||
|
||||
// TTMLOutItem represents an output TTML Item
|
||||
type TTMLOutItem struct {
|
||||
Style string `xml:"style,attr,omitempty"`
|
||||
Text string `xml:",chardata"`
|
||||
TTMLOutStyleAttributes
|
||||
XMLName xml.Name
|
||||
}
|
||||
|
||||
// TTMLOutDuration represents an output TTML duration
|
||||
type TTMLOutDuration time.Duration
|
||||
|
||||
// MarshalText implements the TextMarshaler interface
|
||||
func (t TTMLOutDuration) MarshalText() ([]byte, error) {
|
||||
return []byte(formatDuration(time.Duration(t), ".", 3)), nil
|
||||
}
|
||||
|
||||
// WriteToTTML writes subtitles in .ttml format
|
||||
func (s Subtitles) WriteToTTML(o io.Writer) (err error) {
|
||||
// Do not write anything if no subtitles
|
||||
if len(s.Items) == 0 {
|
||||
return ErrNoSubtitlesToWrite
|
||||
}
|
||||
|
||||
// Init TTML
|
||||
var ttml = TTMLOut{
|
||||
XMLNamespaceTTM: "http://www.w3.org/ns/ttml#metadata",
|
||||
XMLNamespaceTTS: "http://www.w3.org/ns/ttml#styling",
|
||||
}
|
||||
|
||||
// Add metadata
|
||||
if s.Metadata != nil {
|
||||
ttml.Lang = ttmlLanguageMapping.A(s.Metadata.Language).(string)
|
||||
if len(s.Metadata.TTMLCopyright) > 0 || len(s.Metadata.Title) > 0 {
|
||||
ttml.Metadata = &TTMLOutMetadata{
|
||||
Copyright: s.Metadata.TTMLCopyright,
|
||||
Title: s.Metadata.Title,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add regions
|
||||
var k []string
|
||||
for _, region := range s.Regions {
|
||||
k = append(k, region.ID)
|
||||
}
|
||||
sort.Strings(k)
|
||||
for _, id := range k {
|
||||
var ttmlRegion = TTMLOutRegion{TTMLOutHeader: TTMLOutHeader{
|
||||
ID: s.Regions[id].ID,
|
||||
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(s.Regions[id].InlineStyle),
|
||||
}}
|
||||
if s.Regions[id].Style != nil {
|
||||
ttmlRegion.Style = s.Regions[id].Style.ID
|
||||
}
|
||||
ttml.Regions = append(ttml.Regions, ttmlRegion)
|
||||
}
|
||||
|
||||
// Add styles
|
||||
k = []string{}
|
||||
for _, style := range s.Styles {
|
||||
k = append(k, style.ID)
|
||||
}
|
||||
sort.Strings(k)
|
||||
for _, id := range k {
|
||||
var ttmlStyle = TTMLOutStyle{TTMLOutHeader: TTMLOutHeader{
|
||||
ID: s.Styles[id].ID,
|
||||
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(s.Styles[id].InlineStyle),
|
||||
}}
|
||||
if s.Styles[id].Style != nil {
|
||||
ttmlStyle.Style = s.Styles[id].Style.ID
|
||||
}
|
||||
ttml.Styles = append(ttml.Styles, ttmlStyle)
|
||||
}
|
||||
|
||||
// Add items
|
||||
for _, item := range s.Items {
|
||||
// Init subtitle
|
||||
var ttmlSubtitle = TTMLOutSubtitle{
|
||||
Begin: TTMLOutDuration(item.StartAt),
|
||||
End: TTMLOutDuration(item.EndAt),
|
||||
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(item.InlineStyle),
|
||||
}
|
||||
|
||||
// Add region
|
||||
if item.Region != nil {
|
||||
ttmlSubtitle.Region = item.Region.ID
|
||||
}
|
||||
|
||||
// Add style
|
||||
if item.Style != nil {
|
||||
ttmlSubtitle.Style = item.Style.ID
|
||||
}
|
||||
|
||||
// Add lines
|
||||
for _, line := range item.Lines {
|
||||
// Loop through line items
|
||||
for _, lineItem := range line.Items {
|
||||
// Init ttml item
|
||||
var ttmlItem = TTMLOutItem{
|
||||
Text: lineItem.Text,
|
||||
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(lineItem.InlineStyle),
|
||||
XMLName: xml.Name{Local: "span"},
|
||||
}
|
||||
|
||||
// Add style
|
||||
if lineItem.Style != nil {
|
||||
ttmlItem.Style = lineItem.Style.ID
|
||||
}
|
||||
|
||||
// Add ttml item
|
||||
ttmlSubtitle.Items = append(ttmlSubtitle.Items, ttmlItem)
|
||||
}
|
||||
|
||||
// Add line break
|
||||
ttmlSubtitle.Items = append(ttmlSubtitle.Items, TTMLOutItem{XMLName: xml.Name{Local: "br"}})
|
||||
}
|
||||
|
||||
// Remove last line break
|
||||
if len(ttmlSubtitle.Items) > 0 {
|
||||
ttmlSubtitle.Items = ttmlSubtitle.Items[:len(ttmlSubtitle.Items)-1]
|
||||
}
|
||||
|
||||
// Append subtitle
|
||||
ttml.Subtitles = append(ttml.Subtitles, ttmlSubtitle)
|
||||
}
|
||||
|
||||
// Marshal XML
|
||||
var e = xml.NewEncoder(o)
|
||||
e.Indent("", " ")
|
||||
if err = e.Encode(ttml); err != nil {
|
||||
err = errors.Wrap(err, "astisub: xml encoding failed")
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
315
vendor/github.com/asticode/go-astisub/webvtt.go
generated
vendored
Normal file
315
vendor/github.com/asticode/go-astisub/webvtt.go
generated
vendored
Normal file
@@ -0,0 +1,315 @@
|
||||
package astisub
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"sort"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// https://www.w3.org/TR/webvtt1/
|
||||
|
||||
// Constants
|
||||
const (
|
||||
webvttBlockNameComment = "comment"
|
||||
webvttBlockNameRegion = "region"
|
||||
webvttBlockNameStyle = "style"
|
||||
webvttBlockNameText = "text"
|
||||
webvttTimeBoundariesSeparator = " --> "
|
||||
)
|
||||
|
||||
// Vars
|
||||
var (
|
||||
bytesWebVTTTimeBoundariesSeparator = []byte(webvttTimeBoundariesSeparator)
|
||||
)
|
||||
|
||||
// parseDurationWebVTT parses a .vtt duration
|
||||
func parseDurationWebVTT(i string) (time.Duration, error) {
|
||||
return parseDuration(i, ".", 3)
|
||||
}
|
||||
|
||||
// ReadFromWebVTT parses a .vtt content
|
||||
// TODO Tags (u, i, b)
|
||||
// TODO Class
|
||||
// TODO Speaker name
|
||||
func ReadFromWebVTT(i io.Reader) (o *Subtitles, err error) {
|
||||
// Init
|
||||
o = NewSubtitles()
|
||||
var scanner = bufio.NewScanner(i)
|
||||
var line string
|
||||
|
||||
// Skip the header
|
||||
for scanner.Scan() {
|
||||
line = scanner.Text()
|
||||
line = strings.TrimPrefix(line, string(BytesBOM))
|
||||
if len(line) > 0 && line == "WEBVTT" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Scan
|
||||
var item = &Item{}
|
||||
var blockName string
|
||||
var comments []string
|
||||
for scanner.Scan() {
|
||||
// Fetch line
|
||||
line = scanner.Text()
|
||||
// Check prefixes
|
||||
switch {
|
||||
// Comment
|
||||
case strings.HasPrefix(line, "NOTE "):
|
||||
blockName = webvttBlockNameComment
|
||||
comments = append(comments, strings.TrimPrefix(line, "NOTE "))
|
||||
// Empty line
|
||||
case len(line) == 0:
|
||||
// Reset block name
|
||||
blockName = ""
|
||||
// Region
|
||||
case strings.HasPrefix(line, "Region: "):
|
||||
// Add region styles
|
||||
var r = &Region{InlineStyle: &StyleAttributes{}}
|
||||
for _, part := range strings.Split(strings.TrimPrefix(line, "Region: "), " ") {
|
||||
// Split on "="
|
||||
var split = strings.Split(part, "=")
|
||||
if len(split) <= 1 {
|
||||
err = fmt.Errorf("astisub: Invalid region style %s", part)
|
||||
return
|
||||
}
|
||||
|
||||
// Switch on key
|
||||
switch split[0] {
|
||||
case "id":
|
||||
r.ID = split[1]
|
||||
case "lines":
|
||||
if r.InlineStyle.WebVTTLines, err = strconv.Atoi(split[1]); err != nil {
|
||||
err = errors.Wrapf(err, "atoi of %s failed", split[1])
|
||||
return
|
||||
}
|
||||
case "regionanchor":
|
||||
r.InlineStyle.WebVTTRegionAnchor = split[1]
|
||||
case "scroll":
|
||||
r.InlineStyle.WebVTTScroll = split[1]
|
||||
case "viewportanchor":
|
||||
r.InlineStyle.WebVTTViewportAnchor = split[1]
|
||||
case "width":
|
||||
r.InlineStyle.WebVTTWidth = split[1]
|
||||
}
|
||||
}
|
||||
r.InlineStyle.propagateWebVTTAttributes()
|
||||
|
||||
// Add region
|
||||
o.Regions[r.ID] = r
|
||||
// Style
|
||||
case strings.HasPrefix(line, "STYLE "):
|
||||
blockName = webvttBlockNameStyle
|
||||
// Time boundaries
|
||||
case strings.Contains(line, webvttTimeBoundariesSeparator):
|
||||
// Set block name
|
||||
blockName = webvttBlockNameText
|
||||
|
||||
// Init new item
|
||||
item = &Item{
|
||||
Comments: comments,
|
||||
InlineStyle: &StyleAttributes{},
|
||||
}
|
||||
|
||||
// Split line on time boundaries
|
||||
var parts = strings.Split(line, webvttTimeBoundariesSeparator)
|
||||
// Split line on space to catch inline styles as well
|
||||
var partsRight = strings.Split(parts[1], " ")
|
||||
|
||||
// Parse time boundaries
|
||||
if item.StartAt, err = parseDurationWebVTT(parts[0]); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing webvtt duration %s failed", parts[0])
|
||||
return
|
||||
}
|
||||
if item.EndAt, err = parseDurationWebVTT(partsRight[0]); err != nil {
|
||||
err = errors.Wrapf(err, "astisub: parsing webvtt duration %s failed", partsRight[0])
|
||||
return
|
||||
}
|
||||
|
||||
// Parse style
|
||||
if len(partsRight) > 1 {
|
||||
// Add styles
|
||||
for index := 1; index < len(partsRight); index++ {
|
||||
// Split line on ":"
|
||||
var split = strings.Split(partsRight[index], ":")
|
||||
if len(split) <= 1 {
|
||||
err = fmt.Errorf("astisub: Invalid inline style %s", partsRight[index])
|
||||
return
|
||||
}
|
||||
|
||||
// Switch on key
|
||||
switch split[0] {
|
||||
case "align":
|
||||
item.InlineStyle.WebVTTAlign = split[1]
|
||||
case "line":
|
||||
item.InlineStyle.WebVTTLine = split[1]
|
||||
case "position":
|
||||
item.InlineStyle.WebVTTPosition = split[1]
|
||||
case "region":
|
||||
if _, ok := o.Regions[split[1]]; !ok {
|
||||
err = fmt.Errorf("astisub: Unknown region %s", split[1])
|
||||
return
|
||||
}
|
||||
item.Region = o.Regions[split[1]]
|
||||
case "size":
|
||||
item.InlineStyle.WebVTTSize = split[1]
|
||||
case "vertical":
|
||||
item.InlineStyle.WebVTTVertical = split[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
item.InlineStyle.propagateWebVTTAttributes()
|
||||
|
||||
// Reset comments
|
||||
comments = []string{}
|
||||
|
||||
// Append item
|
||||
o.Items = append(o.Items, item)
|
||||
// Text
|
||||
default:
|
||||
// Switch on block name
|
||||
switch blockName {
|
||||
case webvttBlockNameComment:
|
||||
comments = append(comments, line)
|
||||
case webvttBlockNameStyle:
|
||||
// TODO Do something with the style
|
||||
case webvttBlockNameText:
|
||||
item.Lines = append(item.Lines, Line{Items: []LineItem{{Text: line}}})
|
||||
default:
|
||||
// This is the ID
|
||||
// TODO Do something with the id
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// formatDurationWebVTT formats a .vtt duration
|
||||
func formatDurationWebVTT(i time.Duration) string {
|
||||
return formatDuration(i, ".", 3)
|
||||
}
|
||||
|
||||
// WriteToWebVTT writes subtitles in .vtt format
|
||||
func (s Subtitles) WriteToWebVTT(o io.Writer) (err error) {
|
||||
// Do not write anything if no subtitles
|
||||
if len(s.Items) == 0 {
|
||||
err = ErrNoSubtitlesToWrite
|
||||
return
|
||||
}
|
||||
|
||||
// Add header
|
||||
var c []byte
|
||||
c = append(c, []byte("WEBVTT\n\n")...)
|
||||
|
||||
// Add regions
|
||||
var k []string
|
||||
for _, region := range s.Regions {
|
||||
k = append(k, region.ID)
|
||||
}
|
||||
sort.Strings(k)
|
||||
for _, id := range k {
|
||||
c = append(c, []byte("Region: id="+s.Regions[id].ID)...)
|
||||
if s.Regions[id].InlineStyle.WebVTTLines != 0 {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("lines="+strconv.Itoa(s.Regions[id].InlineStyle.WebVTTLines))...)
|
||||
}
|
||||
if s.Regions[id].InlineStyle.WebVTTRegionAnchor != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("regionanchor="+s.Regions[id].InlineStyle.WebVTTRegionAnchor)...)
|
||||
}
|
||||
if s.Regions[id].InlineStyle.WebVTTScroll != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("scroll="+s.Regions[id].InlineStyle.WebVTTScroll)...)
|
||||
}
|
||||
if s.Regions[id].InlineStyle.WebVTTViewportAnchor != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("viewportanchor="+s.Regions[id].InlineStyle.WebVTTViewportAnchor)...)
|
||||
}
|
||||
if s.Regions[id].InlineStyle.WebVTTWidth != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("width="+s.Regions[id].InlineStyle.WebVTTWidth)...)
|
||||
}
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
if len(s.Regions) > 0 {
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
|
||||
// Loop through subtitles
|
||||
for index, item := range s.Items {
|
||||
// Add comments
|
||||
if len(item.Comments) > 0 {
|
||||
c = append(c, []byte("NOTE ")...)
|
||||
for _, comment := range item.Comments {
|
||||
c = append(c, []byte(comment)...)
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
|
||||
// Add time boundaries
|
||||
c = append(c, []byte(strconv.Itoa(index+1))...)
|
||||
c = append(c, bytesLineSeparator...)
|
||||
c = append(c, []byte(formatDurationWebVTT(item.StartAt))...)
|
||||
c = append(c, bytesWebVTTTimeBoundariesSeparator...)
|
||||
c = append(c, []byte(formatDurationWebVTT(item.EndAt))...)
|
||||
|
||||
// Add styles
|
||||
if item.InlineStyle != nil {
|
||||
if item.InlineStyle.WebVTTAlign != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("align:"+item.InlineStyle.WebVTTAlign)...)
|
||||
}
|
||||
if item.InlineStyle.WebVTTLine != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("line:"+item.InlineStyle.WebVTTLine)...)
|
||||
}
|
||||
if item.InlineStyle.WebVTTPosition != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("position:"+item.InlineStyle.WebVTTPosition)...)
|
||||
}
|
||||
if item.Region != nil {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("region:"+item.Region.ID)...)
|
||||
}
|
||||
if item.InlineStyle.WebVTTSize != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("size:"+item.InlineStyle.WebVTTSize)...)
|
||||
}
|
||||
if item.InlineStyle.WebVTTVertical != "" {
|
||||
c = append(c, bytesSpace...)
|
||||
c = append(c, []byte("vertical:"+item.InlineStyle.WebVTTVertical)...)
|
||||
}
|
||||
}
|
||||
|
||||
// Add new line
|
||||
c = append(c, bytesLineSeparator...)
|
||||
|
||||
// Loop through lines
|
||||
for _, l := range item.Lines {
|
||||
c = append(c, []byte(l.String())...)
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
|
||||
// Add new line
|
||||
c = append(c, bytesLineSeparator...)
|
||||
}
|
||||
|
||||
// Remove last new line
|
||||
c = c[:len(c)-1]
|
||||
|
||||
// Write
|
||||
if _, err = o.Write(c); err != nil {
|
||||
err = errors.Wrap(err, "astisub: writing failed")
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
21
vendor/github.com/asticode/go-astitools/LICENSE
generated
vendored
Normal file
21
vendor/github.com/asticode/go-astitools/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2018 Quentin Renard
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
35
vendor/github.com/asticode/go-astitools/bits/hamming.go
generated
vendored
Normal file
35
vendor/github.com/asticode/go-astitools/bits/hamming.go
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
package astibits
|
||||
|
||||
var hamming84tab = [256]uint8{
|
||||
0x01, 0xff, 0xff, 0x08, 0xff, 0x0c, 0x04, 0xff, 0xff, 0x08, 0x08, 0x08, 0x06, 0xff, 0xff, 0x08,
|
||||
0xff, 0x0a, 0x02, 0xff, 0x06, 0xff, 0xff, 0x0f, 0x06, 0xff, 0xff, 0x08, 0x06, 0x06, 0x06, 0xff,
|
||||
0xff, 0x0a, 0x04, 0xff, 0x04, 0xff, 0x04, 0x04, 0x00, 0xff, 0xff, 0x08, 0xff, 0x0d, 0x04, 0xff,
|
||||
0x0a, 0x0a, 0xff, 0x0a, 0xff, 0x0a, 0x04, 0xff, 0xff, 0x0a, 0x03, 0xff, 0x06, 0xff, 0xff, 0x0e,
|
||||
0x01, 0x01, 0x01, 0xff, 0x01, 0xff, 0xff, 0x0f, 0x01, 0xff, 0xff, 0x08, 0xff, 0x0d, 0x05, 0xff,
|
||||
0x01, 0xff, 0xff, 0x0f, 0xff, 0x0f, 0x0f, 0x0f, 0xff, 0x0b, 0x03, 0xff, 0x06, 0xff, 0xff, 0x0f,
|
||||
0x01, 0xff, 0xff, 0x09, 0xff, 0x0d, 0x04, 0xff, 0xff, 0x0d, 0x03, 0xff, 0x0d, 0x0d, 0xff, 0x0d,
|
||||
0xff, 0x0a, 0x03, 0xff, 0x07, 0xff, 0xff, 0x0f, 0x03, 0xff, 0x03, 0x03, 0xff, 0x0d, 0x03, 0xff,
|
||||
0xff, 0x0c, 0x02, 0xff, 0x0c, 0x0c, 0xff, 0x0c, 0x00, 0xff, 0xff, 0x08, 0xff, 0x0c, 0x05, 0xff,
|
||||
0x02, 0xff, 0x02, 0x02, 0xff, 0x0c, 0x02, 0xff, 0xff, 0x0b, 0x02, 0xff, 0x06, 0xff, 0xff, 0x0e,
|
||||
0x00, 0xff, 0xff, 0x09, 0xff, 0x0c, 0x04, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0xff, 0xff, 0x0e,
|
||||
0xff, 0x0a, 0x02, 0xff, 0x07, 0xff, 0xff, 0x0e, 0x00, 0xff, 0xff, 0x0e, 0xff, 0x0e, 0x0e, 0x0e,
|
||||
0x01, 0xff, 0xff, 0x09, 0xff, 0x0c, 0x05, 0xff, 0xff, 0x0b, 0x05, 0xff, 0x05, 0xff, 0x05, 0x05,
|
||||
0xff, 0x0b, 0x02, 0xff, 0x07, 0xff, 0xff, 0x0f, 0x0b, 0x0b, 0xff, 0x0b, 0xff, 0x0b, 0x05, 0xff,
|
||||
0xff, 0x09, 0x09, 0x09, 0x07, 0xff, 0xff, 0x09, 0x00, 0xff, 0xff, 0x09, 0xff, 0x0d, 0x05, 0xff,
|
||||
0x07, 0xff, 0xff, 0x09, 0x07, 0x07, 0x07, 0xff, 0xff, 0x0b, 0x03, 0xff, 0x07, 0xff, 0xff, 0x0e,
|
||||
}
|
||||
|
||||
func Hamming84Decode(i uint8) (o uint8, ok bool) {
|
||||
o = hamming84tab[i]
|
||||
if o == 0xff {
|
||||
return
|
||||
}
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func Hamming2418Decode(i uint32) (o uint32, ok bool) {
|
||||
o = i
|
||||
ok = true
|
||||
return
|
||||
}
|
26
vendor/github.com/asticode/go-astitools/bits/parity.go
generated
vendored
Normal file
26
vendor/github.com/asticode/go-astitools/bits/parity.go
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
package astibits
|
||||
|
||||
var parityTab = [256]uint8{
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
|
||||
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
|
||||
}
|
||||
|
||||
func Parity(i uint8) (o uint8, ok bool) {
|
||||
ok = parityTab[i] == 1
|
||||
o = i & 0x7f
|
||||
return
|
||||
}
|
17
vendor/github.com/asticode/go-astitools/byte/length.go
generated
vendored
Normal file
17
vendor/github.com/asticode/go-astitools/byte/length.go
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
package astibyte
|
||||
|
||||
// ToLength forces the length of a []byte
|
||||
func ToLength(i []byte, rpl byte, length int) []byte {
|
||||
if len(i) == length {
|
||||
return i
|
||||
} else if len(i) > length {
|
||||
return i[:length]
|
||||
} else {
|
||||
var o = make([]byte, length)
|
||||
o = i
|
||||
for idx := 0; idx < length-len(i); idx++ {
|
||||
o = append(o, rpl)
|
||||
}
|
||||
return o
|
||||
}
|
||||
}
|
29
vendor/github.com/asticode/go-astitools/byte/pad.go
generated
vendored
Normal file
29
vendor/github.com/asticode/go-astitools/byte/pad.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
package astibyte
|
||||
|
||||
// PadLeft adds n rpl to the left of i so that len is length
|
||||
func PadLeft(i []byte, rpl byte, length int) []byte {
|
||||
if len(i) >= length {
|
||||
return i
|
||||
} else {
|
||||
var o = make([]byte, length)
|
||||
o = i
|
||||
for idx := 0; idx < length-len(i); idx++ {
|
||||
o = append([]byte{rpl}, o...)
|
||||
}
|
||||
return o
|
||||
}
|
||||
}
|
||||
|
||||
// PadRight adds n rpl to the right of i so that len is length
|
||||
func PadRight(i []byte, rpl byte, length int) []byte {
|
||||
if len(i) >= length {
|
||||
return i
|
||||
} else {
|
||||
var o = make([]byte, length)
|
||||
o = i
|
||||
for idx := 0; idx < length-len(i); idx++ {
|
||||
o = append(o, rpl)
|
||||
}
|
||||
return o
|
||||
}
|
||||
}
|
54
vendor/github.com/asticode/go-astitools/map/map.go
generated
vendored
Normal file
54
vendor/github.com/asticode/go-astitools/map/map.go
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
package astimap
|
||||
|
||||
// Map represents a bi-directional map
|
||||
type Map struct {
|
||||
defaultA interface{}
|
||||
defaultB interface{}
|
||||
mapAToB map[interface{}]interface{}
|
||||
mapBToA map[interface{}]interface{}
|
||||
}
|
||||
|
||||
// NewMap builds a new *Map
|
||||
func NewMap(defaultA, defaultB interface{}) *Map {
|
||||
return &Map{
|
||||
defaultA: defaultA,
|
||||
defaultB: defaultB,
|
||||
mapAToB: make(map[interface{}]interface{}),
|
||||
mapBToA: make(map[interface{}]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
// A retrieves a based on b
|
||||
func (m *Map) A(b interface{}) interface{} {
|
||||
if a, ok := m.mapBToA[b]; ok {
|
||||
return a
|
||||
}
|
||||
return m.defaultA
|
||||
}
|
||||
|
||||
// B retrieves b based on a
|
||||
func (m *Map) B(a interface{}) interface{} {
|
||||
if b, ok := m.mapAToB[a]; ok {
|
||||
return b
|
||||
}
|
||||
return m.defaultB
|
||||
}
|
||||
|
||||
// InA checks whether a exists
|
||||
func (m *Map) InA(a interface{}) (ok bool) {
|
||||
_, ok = m.mapAToB[a]
|
||||
return
|
||||
}
|
||||
|
||||
// InB checks whether b exists
|
||||
func (m *Map) InB(b interface{}) (ok bool) {
|
||||
_, ok = m.mapBToA[b]
|
||||
return
|
||||
}
|
||||
|
||||
// Set sets a key/value couple
|
||||
func (m *Map) Set(a, b interface{}) *Map {
|
||||
m.mapAToB[a] = b
|
||||
m.mapBToA[b] = a
|
||||
return m
|
||||
}
|
43
vendor/github.com/asticode/go-astitools/ptr/astiptr.go
generated
vendored
Normal file
43
vendor/github.com/asticode/go-astitools/ptr/astiptr.go
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
package astiptr
|
||||
|
||||
import "time"
|
||||
|
||||
// Bool transforms a bool into a *bool
|
||||
func Bool(i bool) *bool {
|
||||
return &i
|
||||
}
|
||||
|
||||
// Duration transforms a time.Duration into a *time.Duration
|
||||
func Duration(i time.Duration) *time.Duration {
|
||||
return &i
|
||||
}
|
||||
|
||||
// Float transforms a float64 into a *float64
|
||||
func Float(i float64) *float64 {
|
||||
return &i
|
||||
}
|
||||
|
||||
// Int transforms an int into an *int
|
||||
func Int(i int) *int {
|
||||
return &i
|
||||
}
|
||||
|
||||
// Int64 transforms an int64 into an *int64
|
||||
func Int64(i int64) *int64 {
|
||||
return &i
|
||||
}
|
||||
|
||||
// Str transforms a string into a *string
|
||||
func Str(i string) *string {
|
||||
return &i
|
||||
}
|
||||
|
||||
// UInt8 transforms a uint8 into a *uint8
|
||||
func UInt8(i uint8) *uint8 {
|
||||
return &i
|
||||
}
|
||||
|
||||
// UInt32 transforms a uint32 into a *uint32
|
||||
func UInt32(i uint32) *uint32 {
|
||||
return &i
|
||||
}
|
15
vendor/github.com/asticode/go-astitools/string/length.go
generated
vendored
Normal file
15
vendor/github.com/asticode/go-astitools/string/length.go
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
package astistring
|
||||
|
||||
// ToLength forces the length of a string
|
||||
func ToLength(i, rpl string, length int) string {
|
||||
if len(i) == length {
|
||||
return i
|
||||
} else if len(i) > length {
|
||||
return i[:length]
|
||||
} else {
|
||||
for idx := 0; idx <= length-len(i); idx++ {
|
||||
i += rpl
|
||||
}
|
||||
return i
|
||||
}
|
||||
}
|
35
vendor/github.com/asticode/go-astitools/string/rand.go
generated
vendored
Normal file
35
vendor/github.com/asticode/go-astitools/string/rand.go
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
package astistring
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
letterIdxBits = 6 // 6 bits to represent a letter index
|
||||
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
|
||||
letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits
|
||||
)
|
||||
|
||||
var src = rand.NewSource(time.Now().UnixNano())
|
||||
|
||||
// RandomString generates a random string
|
||||
// https://stackoverflow.com/questions/22892120/how-to-generate-a-random-string-of-a-fixed-length-in-golang
|
||||
func RandomString(n int) string {
|
||||
b := make([]byte, n)
|
||||
// A src.Int63() generates 63 random bits, enough for letterIdxMax characters!
|
||||
for i, cache, remain := n-1, src.Int63(), letterIdxMax; i >= 0; {
|
||||
if remain == 0 {
|
||||
cache, remain = src.Int63(), letterIdxMax
|
||||
}
|
||||
if idx := int(cache & letterIdxMask); idx < len(letterBytes) {
|
||||
b[i] = letterBytes[idx]
|
||||
i--
|
||||
}
|
||||
cache >>= letterIdxBits
|
||||
remain--
|
||||
}
|
||||
|
||||
return string(b)
|
||||
}
|
21
vendor/github.com/asticode/go-astits/LICENSE
generated
vendored
Normal file
21
vendor/github.com/asticode/go-astits/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2017 Quentin Renard
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
123
vendor/github.com/asticode/go-astits/README.md
generated
vendored
Normal file
123
vendor/github.com/asticode/go-astits/README.md
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
[](http://goreportcard.com/report/github.com/asticode/go-astits)
|
||||
[](https://godoc.org/github.com/asticode/go-astits)
|
||||
[](https://travis-ci.org/asticode/go-astits#)
|
||||
[](https://coveralls.io/github/asticode/go-astits)
|
||||
|
||||
This is a Golang library to natively parse and demux MPEG Transport Streams (ts) in GO.
|
||||
|
||||
WARNING: this library is not yet production ready. For instance, while parsing a slice of bytes, it doesn't check whether
|
||||
the length of the slice is sufficient and rather panic on purpose. Use at your own risks!
|
||||
|
||||
# Installation
|
||||
|
||||
To install the library use the following:
|
||||
|
||||
go get -u github.com/asticode/go-astits/...
|
||||
|
||||
# Before looking at the code...
|
||||
|
||||
The transport stream is made of packets.<br>
|
||||
Each packet has a header, an optional adaptation field and a payload.<br>
|
||||
Several payloads can be appended and parsed as a data.
|
||||
|
||||
```
|
||||
TRANSPORT STREAM
|
||||
+--------------------------------------------------------------------------------------------------+
|
||||
| |
|
||||
|
||||
PACKET PACKET
|
||||
+----------------------------------------------+----------------------------------------------+----
|
||||
| | |
|
||||
|
||||
+--------+---------------------------+---------+--------+---------------------------+---------+
|
||||
| HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | ...
|
||||
+--------+---------------------------+---------+--------+---------------------------+---------+
|
||||
|
||||
| | | |
|
||||
+---------+ +---------+
|
||||
| |
|
||||
+----------------------------------------------+
|
||||
DATA
|
||||
```
|
||||
|
||||
# Using the library in your code
|
||||
|
||||
WARNING: the code below doesn't handle errors for readability purposes. However you SHOULD!
|
||||
|
||||
```go
|
||||
// Create a cancellable context in case you want to stop reading packets/data any time you want
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
// Handle SIGTERM signal
|
||||
ch := make(chan os.Signal, 1)
|
||||
signal.Notify(ch, syscall.SIGTERM)
|
||||
go func() {
|
||||
<-ch
|
||||
cancel()
|
||||
}()
|
||||
|
||||
// Open your file or initialize any kind of io.Reader
|
||||
f, _ := os.Open("/path/to/file.ts")
|
||||
defer f.Close()
|
||||
|
||||
// Create the demuxer
|
||||
dmx := astits.New(ctx, f)
|
||||
for {
|
||||
// Get the next data
|
||||
d, _ := dmx.NextData()
|
||||
|
||||
// Data is a PMT data
|
||||
if d.PMT != nil {
|
||||
// Loop through elementary streams
|
||||
for _, es := range d.PMT.ElementaryStreams {
|
||||
fmt.Printf("Stream detected: %d\n", es.ElementaryPID)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
In order to pass options to the demuxer, look for the methods prefixed with `Opt` and add them upon calling `New`:
|
||||
|
||||
```go
|
||||
// This is your custom packets parser
|
||||
p := func(ps []*astits.Packet) (ds []*astits.Data, skip bool, err error) {
|
||||
// This is your logic
|
||||
skip = true
|
||||
return
|
||||
}
|
||||
|
||||
// Now you can create a demuxer with the proper options
|
||||
dmx := New(ctx, f, OptPacketSize(192), OptPacketsParser(p))
|
||||
```
|
||||
|
||||
# CLI
|
||||
|
||||
This library provides a CLI that will automatically get installed in `GOPATH/bin` on `go get` execution.
|
||||
|
||||
## List streams
|
||||
|
||||
$ astits -i <path to your file> -f <format: text|json (default: text)>
|
||||
|
||||
## List data
|
||||
|
||||
$ astits data -i <path to your file> -d <data type: eit|nit|... (repeatable argument | if empty, all data types are shown)>
|
||||
|
||||
# Features and roadmap
|
||||
|
||||
- [x] Parse PES packets
|
||||
- [x] Parse PAT packets
|
||||
- [x] Parse PMT packets
|
||||
- [x] Parse EIT packets
|
||||
- [x] Parse NIT packets
|
||||
- [x] Parse SDT packets
|
||||
- [x] Parse TOT packets
|
||||
- [ ] Parse BAT packets
|
||||
- [ ] Parse DIT packets
|
||||
- [ ] Parse RST packets
|
||||
- [ ] Parse SIT packets
|
||||
- [ ] Parse ST packets
|
||||
- [ ] Parse TDT packets
|
||||
- [ ] Parse TSDT packets
|
29
vendor/github.com/asticode/go-astits/clock_reference.go
generated
vendored
Normal file
29
vendor/github.com/asticode/go-astits/clock_reference.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// ClockReference represents a clock reference
|
||||
// Base is based on a 90 kHz clock and extension is based on a 27 MHz clock
|
||||
type ClockReference struct {
|
||||
Base, Extension int
|
||||
}
|
||||
|
||||
// newClockReference builds a new clock reference
|
||||
func newClockReference(base, extension int) *ClockReference {
|
||||
return &ClockReference{
|
||||
Base: base,
|
||||
Extension: extension,
|
||||
}
|
||||
}
|
||||
|
||||
// Duration converts the clock reference into duration
|
||||
func (p ClockReference) Duration() time.Duration {
|
||||
return time.Duration(p.Base*1e9/90000) + time.Duration(p.Extension*1e9/27000000)
|
||||
}
|
||||
|
||||
// Time converts the clock reference into time
|
||||
func (p ClockReference) Time() time.Time {
|
||||
return time.Unix(0, p.Duration().Nanoseconds())
|
||||
}
|
95
vendor/github.com/asticode/go-astits/data.go
generated
vendored
Normal file
95
vendor/github.com/asticode/go-astits/data.go
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// PIDs
|
||||
const (
|
||||
PIDPAT = 0x0 // Program Association Table (PAT) contains a directory listing of all Program Map Tables.
|
||||
PIDCAT = 0x1 // Conditional Access Table (CAT) contains a directory listing of all ITU-T Rec. H.222 entitlement management message streams used by Program Map Tables.
|
||||
PIDTSDT = 0x2 // Transport Stream Description Table (TSDT) contains descriptors related to the overall transport stream
|
||||
PIDNull = 0x1fff // Null Packet (used for fixed bandwidth padding)
|
||||
)
|
||||
|
||||
// Data represents a data
|
||||
type Data struct {
|
||||
EIT *EITData
|
||||
FirstPacket *Packet
|
||||
NIT *NITData
|
||||
PAT *PATData
|
||||
PES *PESData
|
||||
PID uint16
|
||||
PMT *PMTData
|
||||
SDT *SDTData
|
||||
TOT *TOTData
|
||||
}
|
||||
|
||||
// parseData parses a payload spanning over multiple packets and returns a set of data
|
||||
func parseData(ps []*Packet, prs PacketsParser, pm programMap) (ds []*Data, err error) {
|
||||
// Use custom parser first
|
||||
if prs != nil {
|
||||
var skip bool
|
||||
if ds, skip, err = prs(ps); err != nil {
|
||||
err = errors.Wrap(err, "astits: custom packets parsing failed")
|
||||
return
|
||||
} else if skip {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Reconstruct payload
|
||||
var l int
|
||||
for _, p := range ps {
|
||||
l += len(p.Payload)
|
||||
}
|
||||
var payload = make([]byte, l)
|
||||
var c int
|
||||
for _, p := range ps {
|
||||
c += copy(payload[c:], p.Payload)
|
||||
}
|
||||
|
||||
// Parse PID
|
||||
var pid = ps[0].Header.PID
|
||||
|
||||
// Parse payload
|
||||
if pid == PIDCAT {
|
||||
// Information in a CAT payload is private and dependent on the CA system. Use the PacketsParser
|
||||
// to parse this type of payload
|
||||
} else if isPSIPayload(pid, pm) {
|
||||
var psiData *PSIData
|
||||
if psiData, err = parsePSIData(payload); err != nil {
|
||||
err = errors.Wrap(err, "astits: parsing PSI data failed")
|
||||
return
|
||||
}
|
||||
ds = psiData.toData(ps[0], pid)
|
||||
} else if isPESPayload(payload) {
|
||||
d, err := parsePESData(payload)
|
||||
if err == nil {
|
||||
ds = append(ds, &Data{
|
||||
FirstPacket: ps[0],
|
||||
PES: d,
|
||||
PID: pid,
|
||||
})
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// isPSIPayload checks whether the payload is a PSI one
|
||||
func isPSIPayload(pid uint16, pm programMap) bool {
|
||||
return pid == PIDPAT || // PAT
|
||||
pm.exists(pid) || // PMT
|
||||
((pid >= 0x10 && pid <= 0x14) || (pid >= 0x1e && pid <= 0x1f)) //DVB
|
||||
}
|
||||
|
||||
// isPESPayload checks whether the payload is a PES one
|
||||
func isPESPayload(i []byte) bool {
|
||||
// Packet is not big enough
|
||||
if len(i) < 3 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check prefix
|
||||
return uint32(i[0])<<16|uint32(i[1])<<8|uint32(i[2]) == 1
|
||||
}
|
73
vendor/github.com/asticode/go-astits/data_eit.go
generated
vendored
Normal file
73
vendor/github.com/asticode/go-astits/data_eit.go
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
package astits
|
||||
|
||||
import "time"
|
||||
|
||||
// EITData represents an EIT data
|
||||
// Page: 36 | Chapter: 5.2.4 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type EITData struct {
|
||||
Events []*EITDataEvent
|
||||
LastTableID uint8
|
||||
OriginalNetworkID uint16
|
||||
SegmentLastSectionNumber uint8
|
||||
ServiceID uint16
|
||||
TransportStreamID uint16
|
||||
}
|
||||
|
||||
// EITDataEvent represents an EIT data event
|
||||
type EITDataEvent struct {
|
||||
Descriptors []*Descriptor
|
||||
Duration time.Duration
|
||||
EventID uint16
|
||||
HasFreeCSAMode bool // When true indicates that access to one or more streams may be controlled by a CA system.
|
||||
RunningStatus uint8
|
||||
StartTime time.Time
|
||||
}
|
||||
|
||||
// parseEITSection parses an EIT section
|
||||
func parseEITSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *EITData) {
|
||||
// Init
|
||||
d = &EITData{ServiceID: tableIDExtension}
|
||||
|
||||
// Transport stream ID
|
||||
d.TransportStreamID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Original network ID
|
||||
d.OriginalNetworkID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Segment last section number
|
||||
d.SegmentLastSectionNumber = uint8(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Last table ID
|
||||
d.LastTableID = uint8(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Loop until end of section data is reached
|
||||
for *offset < offsetSectionsEnd {
|
||||
// Event ID
|
||||
var e = &EITDataEvent{}
|
||||
e.EventID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Start time
|
||||
e.StartTime = parseDVBTime(i, offset)
|
||||
|
||||
// Duration
|
||||
e.Duration = parseDVBDurationSeconds(i, offset)
|
||||
|
||||
// Running status
|
||||
e.RunningStatus = uint8(i[*offset]) >> 5
|
||||
|
||||
// Free CA mode
|
||||
e.HasFreeCSAMode = uint8(i[*offset]&0x10) > 0
|
||||
|
||||
// Descriptors
|
||||
e.Descriptors = parseDescriptors(i, offset)
|
||||
|
||||
// Add event
|
||||
d.Events = append(d.Events, e)
|
||||
}
|
||||
return
|
||||
}
|
49
vendor/github.com/asticode/go-astits/data_nit.go
generated
vendored
Normal file
49
vendor/github.com/asticode/go-astits/data_nit.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
package astits
|
||||
|
||||
// NITData represents a NIT data
|
||||
// Page: 29 | Chapter: 5.2.1 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type NITData struct {
|
||||
NetworkDescriptors []*Descriptor
|
||||
NetworkID uint16
|
||||
TransportStreams []*NITDataTransportStream
|
||||
}
|
||||
|
||||
// NITDataTransportStream represents a NIT data transport stream
|
||||
type NITDataTransportStream struct {
|
||||
OriginalNetworkID uint16
|
||||
TransportDescriptors []*Descriptor
|
||||
TransportStreamID uint16
|
||||
}
|
||||
|
||||
// parseNITSection parses a NIT section
|
||||
func parseNITSection(i []byte, offset *int, tableIDExtension uint16) (d *NITData) {
|
||||
// Init
|
||||
d = &NITData{NetworkID: tableIDExtension}
|
||||
|
||||
// Network descriptors
|
||||
d.NetworkDescriptors = parseDescriptors(i, offset)
|
||||
|
||||
// Transport stream loop length
|
||||
var transportStreamLoopLength = int(uint16(i[*offset]&0xf)<<8 | uint16(i[*offset+1]))
|
||||
*offset += 2
|
||||
|
||||
// Transport stream loop
|
||||
transportStreamLoopLength += *offset
|
||||
for *offset < transportStreamLoopLength {
|
||||
// Transport stream ID
|
||||
var ts = &NITDataTransportStream{}
|
||||
ts.TransportStreamID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Original network ID
|
||||
ts.OriginalNetworkID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Transport descriptors
|
||||
ts.TransportDescriptors = parseDescriptors(i, offset)
|
||||
|
||||
// Append transport stream
|
||||
d.TransportStreams = append(d.TransportStreams, ts)
|
||||
}
|
||||
return
|
||||
}
|
30
vendor/github.com/asticode/go-astits/data_pat.go
generated
vendored
Normal file
30
vendor/github.com/asticode/go-astits/data_pat.go
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
package astits
|
||||
|
||||
// PATData represents a PAT data
|
||||
// https://en.wikipedia.org/wiki/Program-specific_information
|
||||
type PATData struct {
|
||||
Programs []*PATProgram
|
||||
TransportStreamID uint16
|
||||
}
|
||||
|
||||
// PATProgram represents a PAT program
|
||||
type PATProgram struct {
|
||||
ProgramMapID uint16 // The packet identifier that contains the associated PMT
|
||||
ProgramNumber uint16 // Relates to the Table ID extension in the associated PMT. A value of 0 is reserved for a NIT packet identifier.
|
||||
}
|
||||
|
||||
// parsePATSection parses a PAT section
|
||||
func parsePATSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *PATData) {
|
||||
// Init
|
||||
d = &PATData{TransportStreamID: tableIDExtension}
|
||||
|
||||
// Loop until end of section data is reached
|
||||
for *offset < offsetSectionsEnd {
|
||||
d.Programs = append(d.Programs, &PATProgram{
|
||||
ProgramMapID: uint16(i[*offset+2]&0x1f)<<8 | uint16(i[*offset+3]),
|
||||
ProgramNumber: uint16(i[*offset])<<8 | uint16(i[*offset+1]),
|
||||
})
|
||||
*offset += 4
|
||||
}
|
||||
return
|
||||
}
|
322
vendor/github.com/asticode/go-astits/data_pes.go
generated
vendored
Normal file
322
vendor/github.com/asticode/go-astits/data_pes.go
generated
vendored
Normal file
@@ -0,0 +1,322 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// P-STD buffer scales
|
||||
const (
|
||||
PSTDBufferScale128Bytes = 0
|
||||
PSTDBufferScale1024Bytes = 1
|
||||
)
|
||||
|
||||
// PTS DTS indicator
|
||||
const (
|
||||
PTSDTSIndicatorBothPresent = 3
|
||||
PTSDTSIndicatorIsForbidden = 1
|
||||
PTSDTSIndicatorNoPTSOrDTS = 0
|
||||
PTSDTSIndicatorOnlyPTS = 2
|
||||
)
|
||||
|
||||
// Stream IDs
|
||||
const (
|
||||
StreamIDPrivateStream1 = 189
|
||||
StreamIDPaddingStream = 190
|
||||
StreamIDPrivateStream2 = 191
|
||||
)
|
||||
|
||||
// Trick mode controls
|
||||
const (
|
||||
TrickModeControlFastForward = 0
|
||||
TrickModeControlFastReverse = 3
|
||||
TrickModeControlFreezeFrame = 2
|
||||
TrickModeControlSlowMotion = 1
|
||||
TrickModeControlSlowReverse = 4
|
||||
)
|
||||
|
||||
// PESData represents a PES data
|
||||
// https://en.wikipedia.org/wiki/Packetized_elementary_stream
|
||||
// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
|
||||
// http://happy.emu.id.au/lab/tut/dttb/dtbtut4b.htm
|
||||
type PESData struct {
|
||||
Data []byte
|
||||
Header *PESHeader
|
||||
}
|
||||
|
||||
// PESHeader represents a packet PES header
|
||||
type PESHeader struct {
|
||||
OptionalHeader *PESOptionalHeader
|
||||
PacketLength uint16 // Specifies the number of bytes remaining in the packet after this field. Can be zero. If the PES packet length is set to zero, the PES packet can be of any length. A value of zero for the PES packet length can be used only when the PES packet payload is a video elementary stream.
|
||||
StreamID uint8 // Examples: Audio streams (0xC0-0xDF), Video streams (0xE0-0xEF)
|
||||
}
|
||||
|
||||
// PESOptionalHeader represents a PES optional header
|
||||
type PESOptionalHeader struct {
|
||||
AdditionalCopyInfo uint8
|
||||
CRC uint16
|
||||
DataAlignmentIndicator bool // True indicates that the PES packet header is immediately followed by the video start code or audio syncword
|
||||
DSMTrickMode *DSMTrickMode
|
||||
DTS *ClockReference
|
||||
ESCR *ClockReference
|
||||
ESRate uint32
|
||||
Extension2Data []byte
|
||||
Extension2Length uint8
|
||||
HasAdditionalCopyInfo bool
|
||||
HasCRC bool
|
||||
HasDSMTrickMode bool
|
||||
HasESCR bool
|
||||
HasESRate bool
|
||||
HasExtension bool
|
||||
HasExtension2 bool
|
||||
HasOptionalFields bool
|
||||
HasPackHeaderField bool
|
||||
HasPrivateData bool
|
||||
HasProgramPacketSequenceCounter bool
|
||||
HasPSTDBuffer bool
|
||||
HeaderLength uint8
|
||||
IsCopyrighted bool
|
||||
IsOriginal bool
|
||||
MarkerBits uint8
|
||||
MPEG1OrMPEG2ID uint8
|
||||
OriginalStuffingLength uint8
|
||||
PacketSequenceCounter uint8
|
||||
PackField uint8
|
||||
Priority bool
|
||||
PrivateData []byte
|
||||
PSTDBufferScale uint8
|
||||
PSTDBufferSize uint16
|
||||
PTS *ClockReference
|
||||
PTSDTSIndicator uint8
|
||||
ScramblingControl uint8
|
||||
}
|
||||
|
||||
// DSMTrickMode represents a DSM trick mode
|
||||
// https://books.google.fr/books?id=vwUrAwAAQBAJ&pg=PT501&lpg=PT501&dq=dsm+trick+mode+control&source=bl&ots=fI-9IHXMRL&sig=PWnhxrsoMWNQcl1rMCPmJGNO9Ds&hl=fr&sa=X&ved=0ahUKEwjogafD8bjXAhVQ3KQKHeHKD5oQ6AEINDAB#v=onepage&q=dsm%20trick%20mode%20control&f=false
|
||||
type DSMTrickMode struct {
|
||||
FieldID uint8
|
||||
FrequencyTruncation uint8
|
||||
IntraSliceRefresh uint8
|
||||
RepeatControl uint8
|
||||
TrickModeControl uint8
|
||||
}
|
||||
|
||||
// parsePESData parses a PES data
|
||||
func parsePESData(i []byte) (d *PESData, err error) {
|
||||
// Init
|
||||
d = &PESData{}
|
||||
|
||||
// Parse header
|
||||
var offset, dataStart, dataEnd = 3, 0, 0
|
||||
if d.Header, dataStart, dataEnd, err = parsePESHeader(i, &offset); err != nil {
|
||||
err = errors.Wrap(err, "astits: parsing PES header failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Parse data
|
||||
d.Data = i[dataStart:dataEnd]
|
||||
return
|
||||
}
|
||||
|
||||
// hasPESOptionalHeader checks whether the data has a PES optional header
|
||||
func hasPESOptionalHeader(streamID uint8) bool {
|
||||
return streamID != StreamIDPaddingStream && streamID != StreamIDPrivateStream2
|
||||
}
|
||||
|
||||
// parsePESData parses a PES header
|
||||
func parsePESHeader(i []byte, offset *int) (h *PESHeader, dataStart, dataEnd int, err error) {
|
||||
// Init
|
||||
h = &PESHeader{}
|
||||
|
||||
// Stream ID
|
||||
h.StreamID = uint8(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Length
|
||||
h.PacketLength = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Data end
|
||||
if h.PacketLength > 0 {
|
||||
dataEnd = *offset + int(h.PacketLength)
|
||||
} else {
|
||||
dataEnd = len(i)
|
||||
}
|
||||
|
||||
// Check for incomplete data
|
||||
if dataEnd > len(i) {
|
||||
err = fmt.Errorf("astits: pes dataEnd (%d) > len(i) (%d)", dataEnd, len(i))
|
||||
return
|
||||
}
|
||||
|
||||
// Optional header
|
||||
if hasPESOptionalHeader(h.StreamID) {
|
||||
h.OptionalHeader, dataStart = parsePESOptionalHeader(i, offset)
|
||||
} else {
|
||||
dataStart = *offset
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parsePESOptionalHeader parses a PES optional header
|
||||
func parsePESOptionalHeader(i []byte, offset *int) (h *PESOptionalHeader, dataStart int) {
|
||||
// Init
|
||||
h = &PESOptionalHeader{}
|
||||
|
||||
// Marker bits
|
||||
h.MarkerBits = uint8(i[*offset]) >> 6
|
||||
|
||||
// Scrambling control
|
||||
h.ScramblingControl = uint8(i[*offset]) >> 4 & 0x3
|
||||
|
||||
// Priority
|
||||
h.Priority = uint8(i[*offset])&0x8 > 0
|
||||
|
||||
// Data alignment indicator
|
||||
h.DataAlignmentIndicator = uint8(i[*offset])&0x4 > 0
|
||||
|
||||
// Copyrighted
|
||||
h.IsCopyrighted = uint(i[*offset])&0x2 > 0
|
||||
|
||||
// Original or copy
|
||||
h.IsOriginal = uint8(i[*offset])&0x1 > 0
|
||||
*offset += 1
|
||||
|
||||
// PTS DST indicator
|
||||
h.PTSDTSIndicator = uint8(i[*offset]) >> 6 & 0x3
|
||||
|
||||
// Flags
|
||||
h.HasESCR = uint8(i[*offset])&0x20 > 0
|
||||
h.HasESRate = uint8(i[*offset])&0x10 > 0
|
||||
h.HasDSMTrickMode = uint8(i[*offset])&0x8 > 0
|
||||
h.HasAdditionalCopyInfo = uint8(i[*offset])&0x4 > 0
|
||||
h.HasCRC = uint8(i[*offset])&0x2 > 0
|
||||
h.HasExtension = uint8(i[*offset])&0x1 > 0
|
||||
*offset += 1
|
||||
|
||||
// Header length
|
||||
h.HeaderLength = uint8(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Data start
|
||||
dataStart = *offset + int(h.HeaderLength)
|
||||
|
||||
// PTS/DTS
|
||||
if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS {
|
||||
h.PTS = parsePTSOrDTS(i[*offset:])
|
||||
*offset += 5
|
||||
} else if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent {
|
||||
h.PTS = parsePTSOrDTS(i[*offset:])
|
||||
*offset += 5
|
||||
h.DTS = parsePTSOrDTS(i[*offset:])
|
||||
*offset += 5
|
||||
}
|
||||
|
||||
// ESCR
|
||||
if h.HasESCR {
|
||||
h.ESCR = parseESCR(i[*offset:])
|
||||
*offset += 6
|
||||
}
|
||||
|
||||
// ES rate
|
||||
if h.HasESRate {
|
||||
h.ESRate = uint32(i[*offset])&0x7f<<15 | uint32(i[*offset+1])<<7 | uint32(i[*offset+2])>>1
|
||||
*offset += 3
|
||||
}
|
||||
|
||||
// Trick mode
|
||||
if h.HasDSMTrickMode {
|
||||
h.DSMTrickMode = parseDSMTrickMode(i[*offset])
|
||||
*offset += 1
|
||||
}
|
||||
|
||||
// Additional copy info
|
||||
if h.HasAdditionalCopyInfo {
|
||||
h.AdditionalCopyInfo = i[*offset] & 0x7f
|
||||
*offset += 1
|
||||
}
|
||||
|
||||
// CRC
|
||||
if h.HasCRC {
|
||||
h.CRC = uint16(i[*offset])>>8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
}
|
||||
|
||||
// Extension
|
||||
if h.HasExtension {
|
||||
// Flags
|
||||
h.HasPrivateData = i[*offset]&0x80 > 0
|
||||
h.HasPackHeaderField = i[*offset]&0x40 > 0
|
||||
h.HasProgramPacketSequenceCounter = i[*offset]&0x20 > 0
|
||||
h.HasPSTDBuffer = i[*offset]&0x10 > 0
|
||||
h.HasExtension2 = i[*offset]&0x1 > 0
|
||||
*offset += 1
|
||||
|
||||
// Private data
|
||||
if h.HasPrivateData {
|
||||
h.PrivateData = i[*offset : *offset+16]
|
||||
*offset += 16
|
||||
}
|
||||
|
||||
// Pack field length
|
||||
if h.HasPackHeaderField {
|
||||
h.PackField = uint8(i[*offset])
|
||||
*offset += 1
|
||||
}
|
||||
|
||||
// Program packet sequence counter
|
||||
if h.HasProgramPacketSequenceCounter {
|
||||
h.PacketSequenceCounter = uint8(i[*offset]) & 0x7f
|
||||
h.MPEG1OrMPEG2ID = uint8(i[*offset+1]) >> 6 & 0x1
|
||||
h.OriginalStuffingLength = uint8(i[*offset+1]) & 0x3f
|
||||
*offset += 2
|
||||
}
|
||||
|
||||
// P-STD buffer
|
||||
if h.HasPSTDBuffer {
|
||||
h.PSTDBufferScale = i[*offset] >> 5 & 0x1
|
||||
h.PSTDBufferSize = uint16(i[*offset])&0x1f<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
}
|
||||
|
||||
// Extension 2
|
||||
if h.HasExtension2 {
|
||||
// Length
|
||||
h.Extension2Length = uint8(i[*offset]) & 0x7f
|
||||
*offset += 2
|
||||
|
||||
// Data
|
||||
h.Extension2Data = i[*offset : *offset+int(h.Extension2Length)]
|
||||
*offset += int(h.Extension2Length)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseDSMTrickMode parses a DSM trick mode
|
||||
func parseDSMTrickMode(i byte) (m *DSMTrickMode) {
|
||||
m = &DSMTrickMode{}
|
||||
m.TrickModeControl = i >> 5
|
||||
if m.TrickModeControl == TrickModeControlFastForward || m.TrickModeControl == TrickModeControlFastReverse {
|
||||
m.FieldID = i >> 3 & 0x3
|
||||
m.IntraSliceRefresh = i >> 2 & 0x1
|
||||
m.FrequencyTruncation = i & 0x3
|
||||
} else if m.TrickModeControl == TrickModeControlFreezeFrame {
|
||||
m.FieldID = i >> 3 & 0x3
|
||||
} else if m.TrickModeControl == TrickModeControlSlowMotion || m.TrickModeControl == TrickModeControlSlowReverse {
|
||||
m.RepeatControl = i & 0x1f
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parsePTSOrDTS parses a PTS or a DTS
|
||||
func parsePTSOrDTS(i []byte) *ClockReference {
|
||||
return newClockReference(int(uint64(i[0])>>1&0x7<<30|uint64(i[1])<<22|uint64(i[2])>>1&0x7f<<15|uint64(i[3])<<7|uint64(i[4])>>1&0x7f), 0)
|
||||
}
|
||||
|
||||
// parseESCR parses an ESCR
|
||||
func parseESCR(i []byte) *ClockReference {
|
||||
var escr = uint64(i[0])>>3&0x7<<39 | uint64(i[0])&0x3<<37 | uint64(i[1])<<29 | uint64(i[2])>>3<<24 | uint64(i[2])&0x3<<22 | uint64(i[3])<<14 | uint64(i[4])>>3<<9 | uint64(i[4])&0x3<<7 | uint64(i[5])>>1
|
||||
return newClockReference(int(escr>>9), int(escr&0x1ff))
|
||||
}
|
57
vendor/github.com/asticode/go-astits/data_pmt.go
generated
vendored
Normal file
57
vendor/github.com/asticode/go-astits/data_pmt.go
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
package astits
|
||||
|
||||
// Stream types
|
||||
const (
|
||||
StreamTypeLowerBitrateVideo = 27 // ITU-T Rec. H.264 and ISO/IEC 14496-10
|
||||
StreamTypeMPEG1Audio = 3 // ISO/IEC 11172-3
|
||||
StreamTypeMPEG2HalvedSampleRateAudio = 4 // ISO/IEC 13818-3
|
||||
StreamTypeMPEG2PacketizedData = 6 // ITU-T Rec. H.222 and ISO/IEC 13818-1 i.e., DVB subtitles/VBI and AC-3
|
||||
)
|
||||
|
||||
// PMTData represents a PMT data
|
||||
// https://en.wikipedia.org/wiki/Program-specific_information
|
||||
type PMTData struct {
|
||||
ElementaryStreams []*PMTElementaryStream
|
||||
PCRPID uint16 // The packet identifier that contains the program clock reference used to improve the random access accuracy of the stream's timing that is derived from the program timestamp. If this is unused. then it is set to 0x1FFF (all bits on).
|
||||
ProgramDescriptors []*Descriptor // Program descriptors
|
||||
ProgramNumber uint16
|
||||
}
|
||||
|
||||
// PMTElementaryStream represents a PMT elementary stream
|
||||
type PMTElementaryStream struct {
|
||||
ElementaryPID uint16 // The packet identifier that contains the stream type data.
|
||||
ElementaryStreamDescriptors []*Descriptor // Elementary stream descriptors
|
||||
StreamType uint8 // This defines the structure of the data contained within the elementary packet identifier.
|
||||
}
|
||||
|
||||
// parsePMTSection parses a PMT section
|
||||
func parsePMTSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *PMTData) {
|
||||
// Init
|
||||
d = &PMTData{ProgramNumber: tableIDExtension}
|
||||
|
||||
// PCR PID
|
||||
d.PCRPID = uint16(i[*offset]&0x1f)<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Program descriptors
|
||||
d.ProgramDescriptors = parseDescriptors(i, offset)
|
||||
|
||||
// Loop until end of section data is reached
|
||||
for *offset < offsetSectionsEnd {
|
||||
// Stream type
|
||||
var e = &PMTElementaryStream{}
|
||||
e.StreamType = uint8(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Elementary PID
|
||||
e.ElementaryPID = uint16(i[*offset]&0x1f)<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Elementary descriptors
|
||||
e.ElementaryStreamDescriptors = parseDescriptors(i, offset)
|
||||
|
||||
// Add elementary stream
|
||||
d.ElementaryStreams = append(d.ElementaryStreams, e)
|
||||
}
|
||||
return
|
||||
}
|
356
vendor/github.com/asticode/go-astits/data_psi.go
generated
vendored
Normal file
356
vendor/github.com/asticode/go-astits/data_psi.go
generated
vendored
Normal file
@@ -0,0 +1,356 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/asticode/go-astilog"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// PSI table IDs
|
||||
const (
|
||||
PSITableTypeBAT = "BAT"
|
||||
PSITableTypeDIT = "DIT"
|
||||
PSITableTypeEIT = "EIT"
|
||||
PSITableTypeNIT = "NIT"
|
||||
PSITableTypeNull = "Null"
|
||||
PSITableTypePAT = "PAT"
|
||||
PSITableTypePMT = "PMT"
|
||||
PSITableTypeRST = "RST"
|
||||
PSITableTypeSDT = "SDT"
|
||||
PSITableTypeSIT = "SIT"
|
||||
PSITableTypeST = "ST"
|
||||
PSITableTypeTDT = "TDT"
|
||||
PSITableTypeTOT = "TOT"
|
||||
PSITableTypeUnknown = "Unknown"
|
||||
)
|
||||
|
||||
// PSIData represents a PSI data
|
||||
// https://en.wikipedia.org/wiki/Program-specific_information
|
||||
type PSIData struct {
|
||||
PointerField int // Present at the start of the TS packet payload signaled by the payload_unit_start_indicator bit in the TS header. Used to set packet alignment bytes or content before the start of tabled payload data.
|
||||
Sections []*PSISection
|
||||
}
|
||||
|
||||
// PSISection represents a PSI section
|
||||
type PSISection struct {
|
||||
CRC32 uint32 // A checksum of the entire table excluding the pointer field, pointer filler bytes and the trailing CRC32.
|
||||
Header *PSISectionHeader
|
||||
Syntax *PSISectionSyntax
|
||||
}
|
||||
|
||||
// PSISectionHeader represents a PSI section header
|
||||
type PSISectionHeader struct {
|
||||
PrivateBit bool // The PAT, PMT, and CAT all set this to 0. Other tables set this to 1.
|
||||
SectionLength uint16 // The number of bytes that follow for the syntax section (with CRC value) and/or table data. These bytes must not exceed a value of 1021.
|
||||
SectionSyntaxIndicator bool // A flag that indicates if the syntax section follows the section length. The PAT, PMT, and CAT all set this to 1.
|
||||
TableID int // Table Identifier, that defines the structure of the syntax section and other contained data. As an exception, if this is the byte that immediately follow previous table section and is set to 0xFF, then it indicates that the repeat of table section end here and the rest of TS data payload shall be stuffed with 0xFF. Consequently the value 0xFF shall not be used for the Table Identifier.
|
||||
TableType string
|
||||
}
|
||||
|
||||
// PSISectionSyntax represents a PSI section syntax
|
||||
type PSISectionSyntax struct {
|
||||
Data *PSISectionSyntaxData
|
||||
Header *PSISectionSyntaxHeader
|
||||
}
|
||||
|
||||
// PSISectionSyntaxHeader represents a PSI section syntax header
|
||||
type PSISectionSyntaxHeader struct {
|
||||
CurrentNextIndicator bool // Indicates if data is current in effect or is for future use. If the bit is flagged on, then the data is to be used at the present moment.
|
||||
LastSectionNumber uint8 // This indicates which table is the last table in the sequence of tables.
|
||||
SectionNumber uint8 // This is an index indicating which table this is in a related sequence of tables. The first table starts from 0.
|
||||
TableIDExtension uint16 // Informational only identifier. The PAT uses this for the transport stream identifier and the PMT uses this for the Program number.
|
||||
VersionNumber uint8 // Syntax version number. Incremented when data is changed and wrapped around on overflow for values greater than 32.
|
||||
}
|
||||
|
||||
// PSISectionSyntaxData represents a PSI section syntax data
|
||||
type PSISectionSyntaxData struct {
|
||||
EIT *EITData
|
||||
NIT *NITData
|
||||
PAT *PATData
|
||||
PMT *PMTData
|
||||
SDT *SDTData
|
||||
TOT *TOTData
|
||||
}
|
||||
|
||||
// parsePSIData parses a PSI data
|
||||
func parsePSIData(i []byte) (d *PSIData, err error) {
|
||||
// Init data
|
||||
d = &PSIData{}
|
||||
var offset int
|
||||
|
||||
// Pointer field
|
||||
d.PointerField = int(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Pointer filler bytes
|
||||
offset += d.PointerField
|
||||
|
||||
// Parse sections
|
||||
var s *PSISection
|
||||
var stop bool
|
||||
for offset < len(i) && !stop {
|
||||
if s, stop, err = parsePSISection(i, &offset); err != nil {
|
||||
err = errors.Wrap(err, "astits: parsing PSI table failed")
|
||||
return
|
||||
}
|
||||
d.Sections = append(d.Sections, s)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parsePSISection parses a PSI section
|
||||
func parsePSISection(i []byte, offset *int) (s *PSISection, stop bool, err error) {
|
||||
// Init section
|
||||
s = &PSISection{}
|
||||
|
||||
// Parse header
|
||||
var offsetStart, offsetSectionsEnd, offsetEnd int
|
||||
s.Header, offsetStart, _, offsetSectionsEnd, offsetEnd = parsePSISectionHeader(i, offset)
|
||||
|
||||
// Check whether we need to stop the parsing
|
||||
if shouldStopPSIParsing(s.Header.TableType) {
|
||||
stop = true
|
||||
return
|
||||
}
|
||||
|
||||
// Check whether there's a syntax section
|
||||
if s.Header.SectionLength > 0 {
|
||||
// Parse syntax
|
||||
s.Syntax = parsePSISectionSyntax(i, offset, s.Header, offsetSectionsEnd)
|
||||
|
||||
// Process CRC32
|
||||
if hasCRC32(s.Header.TableType) {
|
||||
// Parse CRC32
|
||||
s.CRC32 = parseCRC32(i[offsetSectionsEnd:offsetEnd])
|
||||
*offset += 4
|
||||
|
||||
// Check CRC32
|
||||
var c = computeCRC32(i[offsetStart:offsetSectionsEnd])
|
||||
if c != s.CRC32 {
|
||||
err = fmt.Errorf("astits: Table CRC32 %x != computed CRC32 %x", s.CRC32, c)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseCRC32 parses a CRC32
|
||||
func parseCRC32(i []byte) uint32 {
|
||||
return uint32(i[len(i)-4])<<24 | uint32(i[len(i)-3])<<16 | uint32(i[len(i)-2])<<8 | uint32(i[len(i)-1])
|
||||
}
|
||||
|
||||
// computeCRC32 computes a CRC32
|
||||
// https://stackoverflow.com/questions/35034042/how-to-calculate-crc32-in-psi-si-packet
|
||||
func computeCRC32(i []byte) (o uint32) {
|
||||
o = uint32(0xffffffff)
|
||||
for _, b := range i {
|
||||
for i := 0; i < 8; i++ {
|
||||
if (o >= uint32(0x80000000)) != (b >= uint8(0x80)) {
|
||||
o = (o << 1) ^ 0x04C11DB7
|
||||
} else {
|
||||
o = o << 1
|
||||
}
|
||||
b <<= 1
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// shouldStopPSIParsing checks whether the PSI parsing should be stopped
|
||||
func shouldStopPSIParsing(tableType string) bool {
|
||||
return tableType == PSITableTypeNull || tableType == PSITableTypeUnknown
|
||||
}
|
||||
|
||||
// parsePSISectionHeader parses a PSI section header
|
||||
func parsePSISectionHeader(i []byte, offset *int) (h *PSISectionHeader, offsetStart, offsetSectionsStart, offsetSectionsEnd, offsetEnd int) {
|
||||
// Init
|
||||
h = &PSISectionHeader{}
|
||||
offsetStart = *offset
|
||||
|
||||
// Table ID
|
||||
h.TableID = int(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Table type
|
||||
h.TableType = psiTableType(h.TableID)
|
||||
|
||||
// Check whether we need to stop the parsing
|
||||
if shouldStopPSIParsing(h.TableType) {
|
||||
return
|
||||
}
|
||||
|
||||
// Section syntax indicator
|
||||
h.SectionSyntaxIndicator = i[*offset]&0x80 > 0
|
||||
|
||||
// Private bit
|
||||
h.PrivateBit = i[*offset]&0x40 > 0
|
||||
|
||||
// Section length
|
||||
h.SectionLength = uint16(i[*offset]&0xf)<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Offsets
|
||||
offsetSectionsStart = *offset
|
||||
offsetEnd = offsetSectionsStart + int(h.SectionLength)
|
||||
offsetSectionsEnd = offsetEnd
|
||||
if hasCRC32(h.TableType) {
|
||||
offsetSectionsEnd -= 4
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// hasCRC32 checks whether the table has a CRC32
|
||||
func hasCRC32(tableType string) bool {
|
||||
return tableType == PSITableTypePAT ||
|
||||
tableType == PSITableTypePMT ||
|
||||
tableType == PSITableTypeEIT ||
|
||||
tableType == PSITableTypeNIT ||
|
||||
tableType == PSITableTypeTOT ||
|
||||
tableType == PSITableTypeSDT
|
||||
}
|
||||
|
||||
// psiTableType returns the psi table type based on the table id
|
||||
// Page: 28 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
func psiTableType(tableID int) string {
|
||||
switch {
|
||||
case tableID == 0x4a:
|
||||
return PSITableTypeBAT
|
||||
case tableID >= 0x4e && tableID <= 0x6f:
|
||||
return PSITableTypeEIT
|
||||
case tableID == 0x7e:
|
||||
return PSITableTypeDIT
|
||||
case tableID == 0x40, tableID == 0x41:
|
||||
return PSITableTypeNIT
|
||||
case tableID == 0xff:
|
||||
return PSITableTypeNull
|
||||
case tableID == 0:
|
||||
return PSITableTypePAT
|
||||
case tableID == 2:
|
||||
return PSITableTypePMT
|
||||
case tableID == 0x71:
|
||||
return PSITableTypeRST
|
||||
case tableID == 0x42, tableID == 0x46:
|
||||
return PSITableTypeSDT
|
||||
case tableID == 0x7f:
|
||||
return PSITableTypeSIT
|
||||
case tableID == 0x72:
|
||||
return PSITableTypeST
|
||||
case tableID == 0x70:
|
||||
return PSITableTypeTDT
|
||||
case tableID == 0x73:
|
||||
return PSITableTypeTOT
|
||||
}
|
||||
// TODO Remove this log
|
||||
astilog.Debugf("astits: unlisted PSI table ID %d", tableID)
|
||||
return PSITableTypeUnknown
|
||||
}
|
||||
|
||||
// parsePSISectionSyntax parses a PSI section syntax
|
||||
func parsePSISectionSyntax(i []byte, offset *int, h *PSISectionHeader, offsetSectionsEnd int) (s *PSISectionSyntax) {
|
||||
// Init
|
||||
s = &PSISectionSyntax{}
|
||||
|
||||
// Header
|
||||
if hasPSISyntaxHeader(h.TableType) {
|
||||
s.Header = parsePSISectionSyntaxHeader(i, offset)
|
||||
}
|
||||
|
||||
// Parse data
|
||||
s.Data = parsePSISectionSyntaxData(i, offset, h, s.Header, offsetSectionsEnd)
|
||||
return
|
||||
}
|
||||
|
||||
// hasPSISyntaxHeader checks whether the section has a syntax header
|
||||
func hasPSISyntaxHeader(tableType string) bool {
|
||||
return tableType == PSITableTypeEIT ||
|
||||
tableType == PSITableTypeNIT ||
|
||||
tableType == PSITableTypePAT ||
|
||||
tableType == PSITableTypePMT ||
|
||||
tableType == PSITableTypeSDT
|
||||
}
|
||||
|
||||
// parsePSISectionSyntaxHeader parses a PSI section syntax header
|
||||
func parsePSISectionSyntaxHeader(i []byte, offset *int) (h *PSISectionSyntaxHeader) {
|
||||
// Init
|
||||
h = &PSISectionSyntaxHeader{}
|
||||
|
||||
// Table ID extension
|
||||
h.TableIDExtension = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Version number
|
||||
h.VersionNumber = uint8(i[*offset]&0x3f) >> 1
|
||||
|
||||
// Current/Next indicator
|
||||
h.CurrentNextIndicator = i[*offset]&0x1 > 0
|
||||
*offset += 1
|
||||
|
||||
// Section number
|
||||
h.SectionNumber = uint8(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Last section number
|
||||
h.LastSectionNumber = uint8(i[*offset])
|
||||
*offset += 1
|
||||
return
|
||||
}
|
||||
|
||||
// parsePSISectionSyntaxData parses a PSI section data
|
||||
func parsePSISectionSyntaxData(i []byte, offset *int, h *PSISectionHeader, sh *PSISectionSyntaxHeader, offsetSectionsEnd int) (d *PSISectionSyntaxData) {
|
||||
// Init
|
||||
d = &PSISectionSyntaxData{}
|
||||
|
||||
// Switch on table type
|
||||
switch h.TableType {
|
||||
case PSITableTypeBAT:
|
||||
// TODO Parse BAT
|
||||
case PSITableTypeDIT:
|
||||
// TODO Parse DIT
|
||||
case PSITableTypeEIT:
|
||||
d.EIT = parseEITSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
|
||||
case PSITableTypeNIT:
|
||||
d.NIT = parseNITSection(i, offset, sh.TableIDExtension)
|
||||
case PSITableTypePAT:
|
||||
d.PAT = parsePATSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
|
||||
case PSITableTypePMT:
|
||||
d.PMT = parsePMTSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
|
||||
case PSITableTypeRST:
|
||||
// TODO Parse RST
|
||||
case PSITableTypeSDT:
|
||||
d.SDT = parseSDTSection(i, offset, offsetSectionsEnd, sh.TableIDExtension)
|
||||
case PSITableTypeSIT:
|
||||
// TODO Parse SIT
|
||||
case PSITableTypeST:
|
||||
// TODO Parse ST
|
||||
case PSITableTypeTOT:
|
||||
d.TOT = parseTOTSection(i, offset)
|
||||
case PSITableTypeTDT:
|
||||
// TODO Parse TDT
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// toData parses the PSI tables and returns a set of Data
|
||||
func (d *PSIData) toData(firstPacket *Packet, pid uint16) (ds []*Data) {
|
||||
// Loop through sections
|
||||
for _, s := range d.Sections {
|
||||
// Switch on table type
|
||||
switch s.Header.TableType {
|
||||
case PSITableTypeEIT:
|
||||
ds = append(ds, &Data{EIT: s.Syntax.Data.EIT, FirstPacket: firstPacket, PID: pid})
|
||||
case PSITableTypeNIT:
|
||||
ds = append(ds, &Data{FirstPacket: firstPacket, NIT: s.Syntax.Data.NIT, PID: pid})
|
||||
case PSITableTypePAT:
|
||||
ds = append(ds, &Data{FirstPacket: firstPacket, PAT: s.Syntax.Data.PAT, PID: pid})
|
||||
case PSITableTypePMT:
|
||||
ds = append(ds, &Data{FirstPacket: firstPacket, PID: pid, PMT: s.Syntax.Data.PMT})
|
||||
case PSITableTypeSDT:
|
||||
ds = append(ds, &Data{FirstPacket: firstPacket, PID: pid, SDT: s.Syntax.Data.SDT})
|
||||
case PSITableTypeTOT:
|
||||
ds = append(ds, &Data{FirstPacket: firstPacket, PID: pid, TOT: s.Syntax.Data.TOT})
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
70
vendor/github.com/asticode/go-astits/data_sdt.go
generated
vendored
Normal file
70
vendor/github.com/asticode/go-astits/data_sdt.go
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
package astits
|
||||
|
||||
// Running statuses
|
||||
const (
|
||||
RunningStatusNotRunning = 1
|
||||
RunningStatusPausing = 3
|
||||
RunningStatusRunning = 4
|
||||
RunningStatusServiceOffAir = 5
|
||||
RunningStatusStartsInAFewSeconds = 2
|
||||
RunningStatusUndefined = 0
|
||||
)
|
||||
|
||||
// SDTData represents an SDT data
|
||||
// Page: 33 | Chapter: 5.2.3 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type SDTData struct {
|
||||
OriginalNetworkID uint16
|
||||
Services []*SDTDataService
|
||||
TransportStreamID uint16
|
||||
}
|
||||
|
||||
// SDTDataService represents an SDT data service
|
||||
type SDTDataService struct {
|
||||
Descriptors []*Descriptor
|
||||
HasEITPresentFollowing bool // When true indicates that EIT present/following information for the service is present in the current TS
|
||||
HasEITSchedule bool // When true indicates that EIT schedule information for the service is present in the current TS
|
||||
HasFreeCSAMode bool // When true indicates that access to one or more streams may be controlled by a CA system.
|
||||
RunningStatus uint8
|
||||
ServiceID uint16
|
||||
}
|
||||
|
||||
// parseSDTSection parses an SDT section
|
||||
func parseSDTSection(i []byte, offset *int, offsetSectionsEnd int, tableIDExtension uint16) (d *SDTData) {
|
||||
// Init
|
||||
d = &SDTData{TransportStreamID: tableIDExtension}
|
||||
|
||||
// Original network ID
|
||||
d.OriginalNetworkID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// Reserved for future use
|
||||
*offset += 1
|
||||
|
||||
// Loop until end of section data is reached
|
||||
for *offset < offsetSectionsEnd {
|
||||
// Service ID
|
||||
var s = &SDTDataService{}
|
||||
s.ServiceID = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
*offset += 2
|
||||
|
||||
// EIT schedule flag
|
||||
s.HasEITSchedule = uint8(i[*offset]&0x2) > 0
|
||||
|
||||
// EIT present/following flag
|
||||
s.HasEITPresentFollowing = uint8(i[*offset]&0x1) > 0
|
||||
*offset += 1
|
||||
|
||||
// Running status
|
||||
s.RunningStatus = uint8(i[*offset]) >> 5
|
||||
|
||||
// Free CA mode
|
||||
s.HasFreeCSAMode = uint8(i[*offset]&0x10) > 0
|
||||
|
||||
// Descriptors
|
||||
s.Descriptors = parseDescriptors(i, offset)
|
||||
|
||||
// Append service
|
||||
d.Services = append(d.Services, s)
|
||||
}
|
||||
return
|
||||
}
|
23
vendor/github.com/asticode/go-astits/data_tot.go
generated
vendored
Normal file
23
vendor/github.com/asticode/go-astits/data_tot.go
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
package astits
|
||||
|
||||
import "time"
|
||||
|
||||
// TOTData represents a TOT data
|
||||
// Page: 39 | Chapter: 5.2.6 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type TOTData struct {
|
||||
Descriptors []*Descriptor
|
||||
UTCTime time.Time
|
||||
}
|
||||
|
||||
// parseTOTSection parses a TOT section
|
||||
func parseTOTSection(i []byte, offset *int) (d *TOTData) {
|
||||
// Init
|
||||
d = &TOTData{}
|
||||
|
||||
// UTC time
|
||||
d.UTCTime = parseDVBTime(i, offset)
|
||||
|
||||
// Descriptors
|
||||
d.Descriptors = parseDescriptors(i, offset)
|
||||
return
|
||||
}
|
163
vendor/github.com/asticode/go-astits/demuxer.go
generated
vendored
Normal file
163
vendor/github.com/asticode/go-astits/demuxer.go
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// Sync byte
|
||||
const syncByte = '\x47'
|
||||
|
||||
// Errors
|
||||
var (
|
||||
ErrNoMorePackets = errors.New("astits: no more packets")
|
||||
ErrPacketMustStartWithASyncByte = errors.New("astits: packet must start with a sync byte")
|
||||
)
|
||||
|
||||
// Demuxer represents a demuxer
|
||||
// https://en.wikipedia.org/wiki/MPEG_transport_stream
|
||||
// http://seidl.cs.vsb.cz/download/dvb/DVB_Poster.pdf
|
||||
// http://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.13.01_40/en_300468v011301o.pdf
|
||||
type Demuxer struct {
|
||||
ctx context.Context
|
||||
dataBuffer []*Data
|
||||
optPacketSize int
|
||||
optPacketsParser PacketsParser
|
||||
packetBuffer *packetBuffer
|
||||
packetPool *packetPool
|
||||
programMap programMap
|
||||
r io.Reader
|
||||
}
|
||||
|
||||
// PacketsParser represents an object capable of parsing a set of packets containing a unique payload spanning over those packets
|
||||
// Use the skip returned argument to indicate whether the default process should still be executed on the set of packets
|
||||
type PacketsParser func(ps []*Packet) (ds []*Data, skip bool, err error)
|
||||
|
||||
// New creates a new transport stream based on a reader
|
||||
func New(ctx context.Context, r io.Reader, opts ...func(*Demuxer)) (d *Demuxer) {
|
||||
// Init
|
||||
d = &Demuxer{
|
||||
ctx: ctx,
|
||||
packetPool: newPacketPool(),
|
||||
programMap: newProgramMap(),
|
||||
r: r,
|
||||
}
|
||||
|
||||
// Apply options
|
||||
for _, opt := range opts {
|
||||
opt(d)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// OptPacketSize returns the option to set the packet size
|
||||
func OptPacketSize(packetSize int) func(*Demuxer) {
|
||||
return func(d *Demuxer) {
|
||||
d.optPacketSize = packetSize
|
||||
}
|
||||
}
|
||||
|
||||
// OptPacketsParser returns the option to set the packets parser
|
||||
func OptPacketsParser(p PacketsParser) func(*Demuxer) {
|
||||
return func(d *Demuxer) {
|
||||
d.optPacketsParser = p
|
||||
}
|
||||
}
|
||||
|
||||
// NextPacket retrieves the next packet
|
||||
func (dmx *Demuxer) NextPacket() (p *Packet, err error) {
|
||||
// Check ctx error
|
||||
// TODO Handle ctx error another way since if the read blocks, everything blocks
|
||||
// Maybe execute everything in a goroutine and listen the ctx channel in the same for loop
|
||||
if err = dmx.ctx.Err(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Create packet buffer if not exists
|
||||
if dmx.packetBuffer == nil {
|
||||
if dmx.packetBuffer, err = newPacketBuffer(dmx.r, dmx.optPacketSize); err != nil {
|
||||
err = errors.Wrap(err, "astits: creating packet buffer failed")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch next packet from buffer
|
||||
if p, err = dmx.packetBuffer.next(); err != nil {
|
||||
if err != ErrNoMorePackets {
|
||||
err = errors.Wrap(err, "astits: fetching next packet from buffer failed")
|
||||
}
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// NextData retrieves the next data
|
||||
func (dmx *Demuxer) NextData() (d *Data, err error) {
|
||||
// Check data buffer
|
||||
if len(dmx.dataBuffer) > 0 {
|
||||
d = dmx.dataBuffer[0]
|
||||
dmx.dataBuffer = dmx.dataBuffer[1:]
|
||||
return
|
||||
}
|
||||
|
||||
// Loop through packets
|
||||
var p *Packet
|
||||
var ps []*Packet
|
||||
var ds []*Data
|
||||
for {
|
||||
// Get next packet
|
||||
if p, err = dmx.NextPacket(); err != nil {
|
||||
// We don't dump the packet pool since we don't want incomplete data
|
||||
if err == ErrNoMorePackets {
|
||||
return
|
||||
}
|
||||
err = errors.Wrap(err, "astits: fetching next packet failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Add packet to the pool
|
||||
if ps = dmx.packetPool.add(p); len(ps) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse data
|
||||
if ds, err = parseData(ps, dmx.optPacketsParser, dmx.programMap); err != nil {
|
||||
err = errors.Wrap(err, "astits: building new data failed")
|
||||
return
|
||||
}
|
||||
|
||||
// Check whether there is data to be processed
|
||||
if len(ds) > 0 {
|
||||
// Process data
|
||||
d = ds[0]
|
||||
dmx.dataBuffer = append(dmx.dataBuffer, ds[1:]...)
|
||||
|
||||
// Update program map
|
||||
for _, v := range ds {
|
||||
if v.PAT != nil {
|
||||
for _, pgm := range v.PAT.Programs {
|
||||
// Program number 0 is reserved to NIT
|
||||
if pgm.ProgramNumber > 0 {
|
||||
dmx.programMap.set(pgm.ProgramMapID, pgm.ProgramNumber)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Rewind rewinds the demuxer reader
|
||||
func (dmx *Demuxer) Rewind() (n int64, err error) {
|
||||
dmx.dataBuffer = []*Data{}
|
||||
dmx.packetBuffer = nil
|
||||
dmx.packetPool = newPacketPool()
|
||||
if n, err = rewind(dmx.r); err != nil {
|
||||
err = errors.Wrap(err, "astits: rewinding reader failed")
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
918
vendor/github.com/asticode/go-astits/descriptor.go
generated
vendored
Normal file
918
vendor/github.com/asticode/go-astits/descriptor.go
generated
vendored
Normal file
@@ -0,0 +1,918 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/asticode/go-astilog"
|
||||
)
|
||||
|
||||
// Audio types
|
||||
// Page: 683 | https://books.google.fr/books?id=6dgWB3-rChYC&printsec=frontcover&hl=fr
|
||||
const (
|
||||
AudioTypeCleanEffects = 0x1
|
||||
AudioTypeHearingImpaired = 0x2
|
||||
AudioTypeVisualImpairedCommentary = 0x3
|
||||
)
|
||||
|
||||
// Data stream alignments
|
||||
// Page: 85 | Chapter:2.6.11 | Link: http://ecee.colorado.edu/~ecen5653/ecen5653/papers/iso13818-1.pdf
|
||||
const (
|
||||
DataStreamAligmentAudioSyncWord = 0x1
|
||||
DataStreamAligmentVideoSliceOrAccessUnit = 0x1
|
||||
DataStreamAligmentVideoAccessUnit = 0x2
|
||||
DataStreamAligmentVideoGOPOrSEQ = 0x3
|
||||
DataStreamAligmentVideoSEQ = 0x4
|
||||
)
|
||||
|
||||
// Descriptor tags
|
||||
// Page: 42 | Chapter: 6.1 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
const (
|
||||
DescriptorTagAC3 = 0x6a
|
||||
DescriptorTagAVCVideo = 0x28
|
||||
DescriptorTagComponent = 0x50
|
||||
DescriptorTagContent = 0x54
|
||||
DescriptorTagDataStreamAlignment = 0x6
|
||||
DescriptorTagEnhancedAC3 = 0x7a
|
||||
DescriptorTagExtendedEvent = 0x4e
|
||||
DescriptorTagExtension = 0x7f
|
||||
DescriptorTagISO639LanguageAndAudioType = 0xa
|
||||
DescriptorTagLocalTimeOffset = 0x58
|
||||
DescriptorTagMaximumBitrate = 0xe
|
||||
DescriptorTagNetworkName = 0x40
|
||||
DescriptorTagParentalRating = 0x55
|
||||
DescriptorTagPrivateDataIndicator = 0xf
|
||||
DescriptorTagPrivateDataSpecifier = 0x5f
|
||||
DescriptorTagRegistration = 0x5
|
||||
DescriptorTagService = 0x48
|
||||
DescriptorTagShortEvent = 0x4d
|
||||
DescriptorTagStreamIdentifier = 0x52
|
||||
DescriptorTagSubtitling = 0x59
|
||||
DescriptorTagTeletext = 0x56
|
||||
DescriptorTagVBIData = 0x45
|
||||
DescriptorTagVBITeletext = 0x46
|
||||
)
|
||||
|
||||
// Descriptor extension tags
|
||||
// Page: 111 | Chapter: 6.1 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
const (
|
||||
DescriptorTagExtensionSupplementaryAudio = 0x6
|
||||
)
|
||||
|
||||
// Service types
|
||||
// Page: 97 | Chapter: 6.2.33 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
// https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf / page 97
|
||||
const (
|
||||
ServiceTypeDigitalTelevisionService = 0x1
|
||||
)
|
||||
|
||||
// Teletext types
|
||||
// Page: 106 | Chapter: 6.2.43 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
const (
|
||||
TeletextTypeAdditionalInformationPage = 0x3
|
||||
TeletextTypeInitialTeletextPage = 0x1
|
||||
TeletextTypeProgramSchedulePage = 0x4
|
||||
TeletextTypeTeletextSubtitlePage = 0x2
|
||||
TeletextTypeTeletextSubtitlePageForHearingImpairedPeople = 0x5
|
||||
)
|
||||
|
||||
// VBI data service id
|
||||
// Page: 109 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
const (
|
||||
VBIDataServiceIDClosedCaptioning = 0x6
|
||||
VBIDataServiceIDEBUTeletext = 0x1
|
||||
VBIDataServiceIDInvertedTeletext = 0x2
|
||||
VBIDataServiceIDMonochrome442Samples = 0x7
|
||||
VBIDataServiceIDVPS = 0x4
|
||||
VBIDataServiceIDWSS = 0x5
|
||||
)
|
||||
|
||||
// Descriptor represents a descriptor
|
||||
// TODO Handle UTF8
|
||||
type Descriptor struct {
|
||||
AC3 *DescriptorAC3
|
||||
AVCVideo *DescriptorAVCVideo
|
||||
Component *DescriptorComponent
|
||||
Content *DescriptorContent
|
||||
DataStreamAlignment *DescriptorDataStreamAlignment
|
||||
EnhancedAC3 *DescriptorEnhancedAC3
|
||||
ExtendedEvent *DescriptorExtendedEvent
|
||||
Extension *DescriptorExtension
|
||||
ISO639LanguageAndAudioType *DescriptorISO639LanguageAndAudioType
|
||||
Length uint8
|
||||
LocalTimeOffset *DescriptorLocalTimeOffset
|
||||
MaximumBitrate *DescriptorMaximumBitrate
|
||||
NetworkName *DescriptorNetworkName
|
||||
ParentalRating *DescriptorParentalRating
|
||||
PrivateDataIndicator *DescriptorPrivateDataIndicator
|
||||
PrivateDataSpecifier *DescriptorPrivateDataSpecifier
|
||||
Registration *DescriptorRegistration
|
||||
Service *DescriptorService
|
||||
ShortEvent *DescriptorShortEvent
|
||||
StreamIdentifier *DescriptorStreamIdentifier
|
||||
Subtitling *DescriptorSubtitling
|
||||
Tag uint8 // the tag defines the structure of the contained data following the descriptor length.
|
||||
Teletext *DescriptorTeletext
|
||||
UserDefined []byte
|
||||
VBIData *DescriptorVBIData
|
||||
VBITeletext *DescriptorTeletext
|
||||
}
|
||||
|
||||
// DescriptorAC3 represents an AC3 descriptor
|
||||
// Page: 165 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorAC3 struct {
|
||||
AdditionalInfo []byte
|
||||
ASVC uint8
|
||||
BSID uint8
|
||||
ComponentType uint8
|
||||
HasASVC bool
|
||||
HasBSID bool
|
||||
HasComponentType bool
|
||||
HasMainID bool
|
||||
MainID uint8
|
||||
}
|
||||
|
||||
func newDescriptorAC3(i []byte) (d *DescriptorAC3) {
|
||||
var offset int
|
||||
d = &DescriptorAC3{}
|
||||
d.HasComponentType = uint8(i[offset]&0x80) > 0
|
||||
d.HasBSID = uint8(i[offset]&0x40) > 0
|
||||
d.HasMainID = uint8(i[offset]&0x20) > 0
|
||||
d.HasASVC = uint8(i[offset]&0x10) > 0
|
||||
offset += 1
|
||||
if d.HasComponentType {
|
||||
d.ComponentType = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasBSID {
|
||||
d.BSID = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasMainID {
|
||||
d.MainID = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasASVC {
|
||||
d.ASVC = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
for offset < len(i) {
|
||||
d.AdditionalInfo = append(d.AdditionalInfo, i[offset])
|
||||
offset += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorAVCVideo represents an AVC video descriptor
|
||||
// No doc found unfortunately, basing the implementation on https://github.com/gfto/bitstream/blob/master/mpeg/psi/desc_28.h
|
||||
type DescriptorAVCVideo struct {
|
||||
AVC24HourPictureFlag bool
|
||||
AVCStillPresent bool
|
||||
CompatibleFlags uint8
|
||||
ConstraintSet0Flag bool
|
||||
ConstraintSet1Flag bool
|
||||
ConstraintSet2Flag bool
|
||||
LevelIDC uint8
|
||||
ProfileIDC uint8
|
||||
}
|
||||
|
||||
func newDescriptorAVCVideo(i []byte) (d *DescriptorAVCVideo) {
|
||||
// Init
|
||||
d = &DescriptorAVCVideo{}
|
||||
var offset int
|
||||
|
||||
// Profile idc
|
||||
d.ProfileIDC = uint8(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Flags
|
||||
d.ConstraintSet0Flag = i[offset]&0x80 > 0
|
||||
d.ConstraintSet1Flag = i[offset]&0x40 > 0
|
||||
d.ConstraintSet2Flag = i[offset]&0x20 > 0
|
||||
d.CompatibleFlags = i[offset] & 0x1f
|
||||
offset += 1
|
||||
|
||||
// Level idc
|
||||
d.LevelIDC = uint8(i[offset])
|
||||
offset += 1
|
||||
|
||||
// AVC still present
|
||||
d.AVCStillPresent = i[offset]&0x80 > 0
|
||||
|
||||
// AVC 24 hour picture flag
|
||||
d.AVC24HourPictureFlag = i[offset]&0x40 > 0
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorComponent represents a component descriptor
|
||||
// Page: 51 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorComponent struct {
|
||||
ComponentTag uint8
|
||||
ComponentType uint8
|
||||
ISO639LanguageCode []byte
|
||||
StreamContent uint8
|
||||
StreamContentExt uint8
|
||||
Text []byte
|
||||
}
|
||||
|
||||
func newDescriptorComponent(i []byte) (d *DescriptorComponent) {
|
||||
// Init
|
||||
d = &DescriptorComponent{}
|
||||
var offset int
|
||||
|
||||
// Stream content ext
|
||||
d.StreamContentExt = uint8(i[offset] >> 4)
|
||||
|
||||
// Stream content
|
||||
d.StreamContent = uint8(i[offset] & 0xf)
|
||||
offset += 1
|
||||
|
||||
// Component type
|
||||
d.ComponentType = uint8(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Component tag
|
||||
d.ComponentTag = uint8(i[offset])
|
||||
offset += 1
|
||||
|
||||
// ISO639 language code
|
||||
d.ISO639LanguageCode = i[offset : offset+3]
|
||||
offset += 3
|
||||
|
||||
// Text
|
||||
for offset < len(i) {
|
||||
d.Text = append(d.Text, i[offset])
|
||||
offset += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorContent represents a content descriptor
|
||||
// Page: 58 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorContent struct {
|
||||
Items []*DescriptorContentItem
|
||||
}
|
||||
|
||||
// DescriptorContentItem represents a content item descriptor
|
||||
// Check page 59 of https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf for content nibble
|
||||
// levels associations
|
||||
type DescriptorContentItem struct {
|
||||
ContentNibbleLevel1 uint8
|
||||
ContentNibbleLevel2 uint8
|
||||
UserByte uint8
|
||||
}
|
||||
|
||||
func newDescriptorContent(i []byte) (d *DescriptorContent) {
|
||||
// Init
|
||||
d = &DescriptorContent{}
|
||||
var offset int
|
||||
|
||||
// Add items
|
||||
for offset < len(i) {
|
||||
d.Items = append(d.Items, &DescriptorContentItem{
|
||||
ContentNibbleLevel1: uint8(i[offset] >> 4),
|
||||
ContentNibbleLevel2: uint8(i[offset] & 0xf),
|
||||
UserByte: uint8(i[offset+1]),
|
||||
})
|
||||
offset += 2
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorDataStreamAlignment represents a data stream alignment descriptor
|
||||
type DescriptorDataStreamAlignment struct {
|
||||
Type uint8
|
||||
}
|
||||
|
||||
func newDescriptorDataStreamAlignment(i []byte) *DescriptorDataStreamAlignment {
|
||||
return &DescriptorDataStreamAlignment{Type: uint8(i[0])}
|
||||
}
|
||||
|
||||
// DescriptorEnhancedAC3 represents an enhanced AC3 descriptor
|
||||
// Page: 166 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorEnhancedAC3 struct {
|
||||
AdditionalInfo []byte
|
||||
ASVC uint8
|
||||
BSID uint8
|
||||
ComponentType uint8
|
||||
HasASVC bool
|
||||
HasBSID bool
|
||||
HasComponentType bool
|
||||
HasMainID bool
|
||||
HasSubStream1 bool
|
||||
HasSubStream2 bool
|
||||
HasSubStream3 bool
|
||||
MainID uint8
|
||||
MixInfoExists bool
|
||||
SubStream1 uint8
|
||||
SubStream2 uint8
|
||||
SubStream3 uint8
|
||||
}
|
||||
|
||||
func newDescriptorEnhancedAC3(i []byte) (d *DescriptorEnhancedAC3) {
|
||||
var offset int
|
||||
d = &DescriptorEnhancedAC3{}
|
||||
d.HasComponentType = uint8(i[offset]&0x80) > 0
|
||||
d.HasBSID = uint8(i[offset]&0x40) > 0
|
||||
d.HasMainID = uint8(i[offset]&0x20) > 0
|
||||
d.HasASVC = uint8(i[offset]&0x10) > 0
|
||||
d.MixInfoExists = uint8(i[offset]&0x8) > 0
|
||||
d.HasSubStream1 = uint8(i[offset]&0x4) > 0
|
||||
d.HasSubStream2 = uint8(i[offset]&0x2) > 0
|
||||
d.HasSubStream3 = uint8(i[offset]&0x1) > 0
|
||||
offset += 1
|
||||
if d.HasComponentType {
|
||||
d.ComponentType = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasBSID {
|
||||
d.BSID = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasMainID {
|
||||
d.MainID = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasASVC {
|
||||
d.ASVC = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasSubStream1 {
|
||||
d.SubStream1 = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasSubStream2 {
|
||||
d.SubStream2 = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
if d.HasSubStream3 {
|
||||
d.SubStream3 = uint8(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
for offset < len(i) {
|
||||
d.AdditionalInfo = append(d.AdditionalInfo, i[offset])
|
||||
offset += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorExtendedEvent represents an extended event descriptor
|
||||
type DescriptorExtendedEvent struct {
|
||||
ISO639LanguageCode []byte
|
||||
Items []*DescriptorExtendedEventItem
|
||||
LastDescriptorNumber uint8
|
||||
Number uint8
|
||||
Text []byte
|
||||
}
|
||||
|
||||
// DescriptorExtendedEventItem represents an extended event item descriptor
|
||||
type DescriptorExtendedEventItem struct {
|
||||
Content []byte
|
||||
Description []byte
|
||||
}
|
||||
|
||||
func newDescriptorExtendedEvent(i []byte) (d *DescriptorExtendedEvent) {
|
||||
// Init
|
||||
d = &DescriptorExtendedEvent{}
|
||||
var offset int
|
||||
|
||||
// Number
|
||||
d.Number = uint8(i[offset] >> 4)
|
||||
|
||||
// Last descriptor number
|
||||
d.LastDescriptorNumber = uint8(i[offset] & 0xf)
|
||||
offset += 1
|
||||
|
||||
// ISO 639 language code
|
||||
d.ISO639LanguageCode = i[offset : offset+3]
|
||||
offset += 3
|
||||
|
||||
// Items length
|
||||
var itemsLength = int(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Items
|
||||
var offsetEnd = offset + itemsLength
|
||||
for offset < offsetEnd {
|
||||
d.Items = append(d.Items, newDescriptorExtendedEventItem(i, &offset))
|
||||
}
|
||||
|
||||
// Text length
|
||||
var textLength = int(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Text
|
||||
offsetEnd = offset + textLength
|
||||
for offset < offsetEnd {
|
||||
d.Text = append(d.Text, i[offset])
|
||||
offset += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func newDescriptorExtendedEventItem(i []byte, offset *int) (d *DescriptorExtendedEventItem) {
|
||||
// Init
|
||||
d = &DescriptorExtendedEventItem{}
|
||||
|
||||
// Description length
|
||||
var descriptionLength = int(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Description
|
||||
var offsetEnd = *offset + descriptionLength
|
||||
for *offset < offsetEnd {
|
||||
d.Description = append(d.Description, i[*offset])
|
||||
*offset += 1
|
||||
}
|
||||
|
||||
// Content length
|
||||
var contentLength = int(i[*offset])
|
||||
*offset += 1
|
||||
|
||||
// Content
|
||||
offsetEnd = *offset + contentLength
|
||||
for *offset < offsetEnd {
|
||||
d.Content = append(d.Content, i[*offset])
|
||||
*offset += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorExtension represents an extension descriptor
|
||||
// Page: 72 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorExtension struct {
|
||||
SupplementaryAudio *DescriptorExtensionSupplementaryAudio
|
||||
Tag uint8
|
||||
}
|
||||
|
||||
func newDescriptorExtension(i []byte) (d *DescriptorExtension) {
|
||||
// Init
|
||||
d = &DescriptorExtension{Tag: uint8(i[0])}
|
||||
|
||||
// Switch on tag
|
||||
var b = i[1:]
|
||||
switch d.Tag {
|
||||
case DescriptorTagExtensionSupplementaryAudio:
|
||||
d.SupplementaryAudio = newDescriptorExtensionSupplementaryAudio(b)
|
||||
default:
|
||||
// TODO Remove this log
|
||||
astilog.Debugf("astits: unlisted extension tag 0x%x", d.Tag)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorExtensionSupplementaryAudio represents a supplementary audio extension descriptor
|
||||
// Page: 130 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorExtensionSupplementaryAudio struct {
|
||||
EditorialClassification uint8
|
||||
HasLanguageCode bool
|
||||
LanguageCode []byte
|
||||
MixType bool
|
||||
PrivateData []byte
|
||||
}
|
||||
|
||||
func newDescriptorExtensionSupplementaryAudio(i []byte) (d *DescriptorExtensionSupplementaryAudio) {
|
||||
// Init
|
||||
d = &DescriptorExtensionSupplementaryAudio{}
|
||||
var offset int
|
||||
|
||||
// Mix type
|
||||
d.MixType = i[offset]&0x80 > 0
|
||||
|
||||
// Editorial classification
|
||||
d.EditorialClassification = uint8(i[offset] >> 2 & 0x1f)
|
||||
|
||||
// Language code flag
|
||||
d.HasLanguageCode = i[offset]&0x1 > 0
|
||||
offset += 1
|
||||
|
||||
// Language code
|
||||
if d.HasLanguageCode {
|
||||
d.LanguageCode = i[offset : offset+3]
|
||||
offset += 3
|
||||
}
|
||||
|
||||
// Private data
|
||||
for offset < len(i) {
|
||||
d.PrivateData = append(d.PrivateData, i[offset])
|
||||
offset += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorISO639LanguageAndAudioType represents an ISO639 language descriptor
|
||||
type DescriptorISO639LanguageAndAudioType struct {
|
||||
Language []byte
|
||||
Type uint8
|
||||
}
|
||||
|
||||
func newDescriptorISO639LanguageAndAudioType(i []byte) *DescriptorISO639LanguageAndAudioType {
|
||||
return &DescriptorISO639LanguageAndAudioType{
|
||||
Language: i[0:3],
|
||||
Type: uint8(i[3]),
|
||||
}
|
||||
}
|
||||
|
||||
// DescriptorLocalTimeOffset represents a local time offset descriptor
|
||||
// Page: 84 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorLocalTimeOffset struct {
|
||||
Items []*DescriptorLocalTimeOffsetItem
|
||||
}
|
||||
|
||||
// DescriptorLocalTimeOffsetItem represents a local time offset item descriptor
|
||||
type DescriptorLocalTimeOffsetItem struct {
|
||||
CountryCode []byte
|
||||
CountryRegionID uint8
|
||||
LocalTimeOffset time.Duration
|
||||
LocalTimeOffsetPolarity bool
|
||||
NextTimeOffset time.Duration
|
||||
TimeOfChange time.Time
|
||||
}
|
||||
|
||||
func newDescriptorLocalTimeOffset(i []byte) (d *DescriptorLocalTimeOffset) {
|
||||
// Init
|
||||
d = &DescriptorLocalTimeOffset{}
|
||||
var offset int
|
||||
|
||||
// Add items
|
||||
for offset < len(i) {
|
||||
// Init
|
||||
var itm = &DescriptorLocalTimeOffsetItem{}
|
||||
d.Items = append(d.Items, itm)
|
||||
|
||||
// Country code
|
||||
itm.CountryCode = i[offset : offset+3]
|
||||
offset += 3
|
||||
|
||||
// Country region ID
|
||||
itm.CountryRegionID = uint8(i[offset] >> 2)
|
||||
|
||||
// Local time offset polarity
|
||||
itm.LocalTimeOffsetPolarity = i[offset]&0x1 > 0
|
||||
offset += 1
|
||||
|
||||
// Local time offset
|
||||
itm.LocalTimeOffset = parseDVBDurationMinutes(i, &offset)
|
||||
|
||||
// Time of change
|
||||
itm.TimeOfChange = parseDVBTime(i, &offset)
|
||||
|
||||
// Next time offset
|
||||
itm.NextTimeOffset = parseDVBDurationMinutes(i, &offset)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorMaximumBitrate represents a maximum bitrate descriptor
|
||||
type DescriptorMaximumBitrate struct {
|
||||
Bitrate uint32 // In bytes/second
|
||||
}
|
||||
|
||||
func newDescriptorMaximumBitrate(i []byte) *DescriptorMaximumBitrate {
|
||||
return &DescriptorMaximumBitrate{Bitrate: (uint32(i[0]&0x3f)<<16 | uint32(i[1])<<8 | uint32(i[2])) * 50}
|
||||
}
|
||||
|
||||
// DescriptorNetworkName represents a network name descriptor
|
||||
// Page: 93 | Chapter: 6.2.27 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorNetworkName struct {
|
||||
Name []byte
|
||||
}
|
||||
|
||||
func newDescriptorNetworkName(i []byte) *DescriptorNetworkName {
|
||||
return &DescriptorNetworkName{Name: i}
|
||||
}
|
||||
|
||||
// DescriptorParentalRating represents a parental rating descriptor
|
||||
// Page: 93 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorParentalRating struct {
|
||||
Items []*DescriptorParentalRatingItem
|
||||
}
|
||||
|
||||
// DescriptorParentalRatingItem represents a parental rating item descriptor
|
||||
type DescriptorParentalRatingItem struct {
|
||||
CountryCode []byte
|
||||
Rating uint8
|
||||
}
|
||||
|
||||
// MinimumAge returns the minimum age for the parental rating
|
||||
func (d DescriptorParentalRatingItem) MinimumAge() int {
|
||||
// Undefined or user defined ratings
|
||||
if d.Rating == 0 || d.Rating > 0x10 {
|
||||
return 0
|
||||
}
|
||||
return int(d.Rating) + 3
|
||||
}
|
||||
|
||||
func newDescriptorParentalRating(i []byte) (d *DescriptorParentalRating) {
|
||||
// Init
|
||||
d = &DescriptorParentalRating{}
|
||||
var offset int
|
||||
|
||||
// Add items
|
||||
for offset < len(i) {
|
||||
d.Items = append(d.Items, &DescriptorParentalRatingItem{
|
||||
CountryCode: i[offset : offset+3],
|
||||
Rating: uint8(i[offset+3]),
|
||||
})
|
||||
offset += 4
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorPrivateDataIndicator represents a private data Indicator descriptor
|
||||
type DescriptorPrivateDataIndicator struct {
|
||||
Indicator uint32
|
||||
}
|
||||
|
||||
func newDescriptorPrivateDataIndicator(i []byte) *DescriptorPrivateDataIndicator {
|
||||
return &DescriptorPrivateDataIndicator{Indicator: uint32(i[0])<<24 | uint32(i[1])<<16 | uint32(i[2])<<8 | uint32(i[3])}
|
||||
}
|
||||
|
||||
// DescriptorPrivateDataSpecifier represents a private data specifier descriptor
|
||||
type DescriptorPrivateDataSpecifier struct {
|
||||
Specifier uint32
|
||||
}
|
||||
|
||||
func newDescriptorPrivateDataSpecifier(i []byte) *DescriptorPrivateDataSpecifier {
|
||||
return &DescriptorPrivateDataSpecifier{Specifier: uint32(i[0])<<24 | uint32(i[1])<<16 | uint32(i[2])<<8 | uint32(i[3])}
|
||||
}
|
||||
|
||||
// DescriptorRegistration represents a registration descriptor
|
||||
// Page: 84 | http://ecee.colorado.edu/~ecen5653/ecen5653/papers/iso13818-1.pdf
|
||||
type DescriptorRegistration struct {
|
||||
AdditionalIdentificationInfo []byte
|
||||
FormatIdentifier uint32
|
||||
}
|
||||
|
||||
func newDescriptorRegistration(i []byte) (d *DescriptorRegistration) {
|
||||
d = &DescriptorRegistration{}
|
||||
d.FormatIdentifier = uint32(i[0])<<24 | uint32(i[1])<<16 | uint32(i[2])<<8 | uint32(i[3])
|
||||
var offset = 4
|
||||
for offset < len(i) {
|
||||
d.AdditionalIdentificationInfo = append(d.AdditionalIdentificationInfo, i[offset])
|
||||
offset += 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorService represents a service descriptor
|
||||
// Page: 96 | Chapter: 6.2.33 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorService struct {
|
||||
Name []byte
|
||||
Provider []byte
|
||||
Type uint8
|
||||
}
|
||||
|
||||
func newDescriptorService(i []byte) (d *DescriptorService) {
|
||||
var offset int
|
||||
d = &DescriptorService{Type: uint8(i[offset])}
|
||||
offset += 1
|
||||
var providerLength = int(i[offset])
|
||||
offset += 1
|
||||
d.Provider = i[offset : offset+providerLength]
|
||||
offset += providerLength
|
||||
var nameLength = int(i[offset])
|
||||
offset += 1
|
||||
d.Name = i[offset : offset+nameLength]
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorShortEvent represents a short event descriptor
|
||||
// Page: 99 | Chapter: 6.2.37 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorShortEvent struct {
|
||||
EventName []byte
|
||||
Language []byte
|
||||
Text []byte
|
||||
}
|
||||
|
||||
func newDescriptorShortEvent(i []byte) (d *DescriptorShortEvent) {
|
||||
var offset int
|
||||
d = &DescriptorShortEvent{}
|
||||
d.Language = i[:3]
|
||||
offset += 3
|
||||
var length = int(i[offset])
|
||||
offset += 1
|
||||
d.EventName = i[offset : offset+length]
|
||||
offset += length
|
||||
length = int(i[offset])
|
||||
offset += 1
|
||||
d.Text = i[offset : offset+length]
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorStreamIdentifier represents a stream identifier descriptor
|
||||
// Page: 102 | Chapter: 6.2.39 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorStreamIdentifier struct{ ComponentTag uint8 }
|
||||
|
||||
func newDescriptorStreamIdentifier(i []byte) *DescriptorStreamIdentifier {
|
||||
return &DescriptorStreamIdentifier{ComponentTag: uint8(i[0])}
|
||||
}
|
||||
|
||||
// DescriptorSubtitling represents a subtitling descriptor
|
||||
// Page: 103 | Chapter: 6.2.41 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorSubtitling struct {
|
||||
Items []*DescriptorSubtitlingItem
|
||||
}
|
||||
|
||||
// DescriptorSubtitlingItem represents subtitling descriptor item
|
||||
type DescriptorSubtitlingItem struct {
|
||||
AncillaryPageID uint16
|
||||
CompositionPageID uint16
|
||||
Language []byte
|
||||
Type uint8
|
||||
}
|
||||
|
||||
func newDescriptorSubtitling(i []byte) (d *DescriptorSubtitling) {
|
||||
d = &DescriptorSubtitling{}
|
||||
var offset int
|
||||
for offset < len(i) {
|
||||
itm := &DescriptorSubtitlingItem{}
|
||||
itm.Language = i[offset : offset+3]
|
||||
offset += 3
|
||||
itm.Type = uint8(i[offset])
|
||||
offset += 1
|
||||
itm.CompositionPageID = uint16(i[offset])<<8 | uint16(i[offset+1])
|
||||
offset += 2
|
||||
itm.AncillaryPageID = uint16(i[offset])<<8 | uint16(i[offset+1])
|
||||
offset += 2
|
||||
d.Items = append(d.Items, itm)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorTeletext represents a teletext descriptor
|
||||
// Page: 105 | Chapter: 6.2.43 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorTeletext struct {
|
||||
Items []*DescriptorTeletextItem
|
||||
}
|
||||
|
||||
// DescriptorTeletextItem represents a teletext descriptor item
|
||||
type DescriptorTeletextItem struct {
|
||||
Language []byte
|
||||
Magazine uint8
|
||||
Page uint8
|
||||
Type uint8
|
||||
}
|
||||
|
||||
func newDescriptorTeletext(i []byte) (d *DescriptorTeletext) {
|
||||
var offset int
|
||||
d = &DescriptorTeletext{}
|
||||
for offset < len(i) {
|
||||
itm := &DescriptorTeletextItem{}
|
||||
itm.Language = i[offset : offset+3]
|
||||
offset += 3
|
||||
itm.Type = uint8(i[offset]) >> 3
|
||||
itm.Magazine = uint8(i[offset] & 0x7)
|
||||
offset += 1
|
||||
itm.Page = uint8(i[offset])>>4*10 + uint8(i[offset]&0xf)
|
||||
offset += 1
|
||||
d.Items = append(d.Items, itm)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DescriptorVBIData represents a VBI data descriptor
|
||||
// Page: 108 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
type DescriptorVBIData struct {
|
||||
Services []*DescriptorVBIDataService
|
||||
}
|
||||
|
||||
// DescriptorVBIDataService represents a vbi data service descriptor
|
||||
type DescriptorVBIDataService struct {
|
||||
DataServiceID uint8
|
||||
Descriptors []*DescriptorVBIDataDescriptor
|
||||
}
|
||||
|
||||
// DescriptorVBIDataItem represents a vbi data descriptor item
|
||||
type DescriptorVBIDataDescriptor struct {
|
||||
FieldParity bool
|
||||
LineOffset uint8
|
||||
}
|
||||
|
||||
func newDescriptorVBIData(i []byte) (d *DescriptorVBIData) {
|
||||
// Init
|
||||
d = &DescriptorVBIData{}
|
||||
var offset int
|
||||
|
||||
// Items
|
||||
for offset < len(i) {
|
||||
// Init
|
||||
var srv = &DescriptorVBIDataService{}
|
||||
|
||||
// Data service ID
|
||||
srv.DataServiceID = uint8(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Data service descriptor length
|
||||
var dataServiceDescriptorLength = int(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Data service descriptor
|
||||
var offsetEnd = offset + dataServiceDescriptorLength
|
||||
for offset < offsetEnd {
|
||||
if srv.DataServiceID == VBIDataServiceIDClosedCaptioning ||
|
||||
srv.DataServiceID == VBIDataServiceIDEBUTeletext ||
|
||||
srv.DataServiceID == VBIDataServiceIDInvertedTeletext ||
|
||||
srv.DataServiceID == VBIDataServiceIDMonochrome442Samples ||
|
||||
srv.DataServiceID == VBIDataServiceIDVPS ||
|
||||
srv.DataServiceID == VBIDataServiceIDWSS {
|
||||
srv.Descriptors = append(srv.Descriptors, &DescriptorVBIDataDescriptor{
|
||||
FieldParity: i[offset]&0x20 > 0,
|
||||
LineOffset: uint8(i[offset] & 0x1f),
|
||||
})
|
||||
offset += 1
|
||||
}
|
||||
}
|
||||
|
||||
// Append service
|
||||
d.Services = append(d.Services, srv)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseDescriptors parses descriptors
|
||||
func parseDescriptors(i []byte, offset *int) (o []*Descriptor) {
|
||||
// Get length
|
||||
var length = int(uint16(i[*offset]&0xf)<<8 | uint16(i[*offset+1]))
|
||||
*offset += 2
|
||||
|
||||
// Loop
|
||||
if length > 0 {
|
||||
length += *offset
|
||||
for *offset < length {
|
||||
// Init
|
||||
var d = &Descriptor{
|
||||
Length: uint8(i[*offset+1]),
|
||||
Tag: uint8(i[*offset]),
|
||||
}
|
||||
*offset += 2
|
||||
|
||||
// Parse data
|
||||
if d.Length > 0 {
|
||||
// Get descriptor content
|
||||
var b = i[*offset : *offset+int(d.Length)]
|
||||
|
||||
// User defined
|
||||
if d.Tag >= 0x80 && d.Tag <= 0xfe {
|
||||
d.UserDefined = make([]byte, len(b))
|
||||
copy(d.UserDefined, b)
|
||||
} else {
|
||||
// Switch on tag
|
||||
switch d.Tag {
|
||||
case DescriptorTagAC3:
|
||||
d.AC3 = newDescriptorAC3(b)
|
||||
case DescriptorTagAVCVideo:
|
||||
d.AVCVideo = newDescriptorAVCVideo(b)
|
||||
case DescriptorTagComponent:
|
||||
d.Component = newDescriptorComponent(b)
|
||||
case DescriptorTagContent:
|
||||
d.Content = newDescriptorContent(b)
|
||||
case DescriptorTagDataStreamAlignment:
|
||||
d.DataStreamAlignment = newDescriptorDataStreamAlignment(b)
|
||||
case DescriptorTagEnhancedAC3:
|
||||
d.EnhancedAC3 = newDescriptorEnhancedAC3(b)
|
||||
case DescriptorTagExtendedEvent:
|
||||
d.ExtendedEvent = newDescriptorExtendedEvent(b)
|
||||
case DescriptorTagExtension:
|
||||
d.Extension = newDescriptorExtension(b)
|
||||
case DescriptorTagISO639LanguageAndAudioType:
|
||||
d.ISO639LanguageAndAudioType = newDescriptorISO639LanguageAndAudioType(b)
|
||||
case DescriptorTagLocalTimeOffset:
|
||||
d.LocalTimeOffset = newDescriptorLocalTimeOffset(b)
|
||||
case DescriptorTagMaximumBitrate:
|
||||
d.MaximumBitrate = newDescriptorMaximumBitrate(b)
|
||||
case DescriptorTagNetworkName:
|
||||
d.NetworkName = newDescriptorNetworkName(b)
|
||||
case DescriptorTagParentalRating:
|
||||
d.ParentalRating = newDescriptorParentalRating(b)
|
||||
case DescriptorTagPrivateDataIndicator:
|
||||
d.PrivateDataIndicator = newDescriptorPrivateDataIndicator(b)
|
||||
case DescriptorTagPrivateDataSpecifier:
|
||||
d.PrivateDataSpecifier = newDescriptorPrivateDataSpecifier(b)
|
||||
case DescriptorTagRegistration:
|
||||
d.Registration = newDescriptorRegistration(b)
|
||||
case DescriptorTagService:
|
||||
d.Service = newDescriptorService(b)
|
||||
case DescriptorTagShortEvent:
|
||||
d.ShortEvent = newDescriptorShortEvent(b)
|
||||
case DescriptorTagStreamIdentifier:
|
||||
d.StreamIdentifier = newDescriptorStreamIdentifier(b)
|
||||
case DescriptorTagSubtitling:
|
||||
d.Subtitling = newDescriptorSubtitling(b)
|
||||
case DescriptorTagTeletext:
|
||||
d.Teletext = newDescriptorTeletext(b)
|
||||
case DescriptorTagVBIData:
|
||||
d.VBIData = newDescriptorVBIData(b)
|
||||
case DescriptorTagVBITeletext:
|
||||
d.VBITeletext = newDescriptorTeletext(b)
|
||||
default:
|
||||
// TODO Remove this log
|
||||
astilog.Debugf("astits: unlisted descriptor tag 0x%x", d.Tag)
|
||||
}
|
||||
}
|
||||
*offset += int(d.Length)
|
||||
}
|
||||
o = append(o, d)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
53
vendor/github.com/asticode/go-astits/dvb.go
generated
vendored
Normal file
53
vendor/github.com/asticode/go-astits/dvb.go
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
// parseDVBTime parses a DVB time
|
||||
// This field is coded as 16 bits giving the 16 LSBs of MJD followed by 24 bits coded as 6 digits in 4 - bit Binary
|
||||
// Coded Decimal (BCD). If the start time is undefined (e.g. for an event in a NVOD reference service) all bits of the
|
||||
// field are set to "1".
|
||||
// I apologize for the computation which is really messy but details are given in the documentation
|
||||
// Page: 160 | Annex C | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
|
||||
func parseDVBTime(i []byte, offset *int) (t time.Time) {
|
||||
// Date
|
||||
var mjd = uint16(i[*offset])<<8 | uint16(i[*offset+1])
|
||||
var yt = int((float64(mjd) - 15078.2) / 365.25)
|
||||
var mt = int((float64(mjd) - 14956.1 - float64(int(float64(yt)*365.25))) / 30.6001)
|
||||
var d = int(float64(mjd) - 14956 - float64(int(float64(yt)*365.25)) - float64(int(float64(mt)*30.6001)))
|
||||
var k int
|
||||
if mt == 14 || mt == 15 {
|
||||
k = 1
|
||||
}
|
||||
var y = yt + k
|
||||
var m = mt - 1 - k*12
|
||||
t, _ = time.Parse("06-01-02", fmt.Sprintf("%d-%d-%d", y, m, d))
|
||||
*offset += 2
|
||||
|
||||
// Time
|
||||
t = t.Add(parseDVBDurationSeconds(i, offset))
|
||||
return
|
||||
}
|
||||
|
||||
// parseDVBDurationMinutes parses a minutes duration
|
||||
// 16 bit field containing the duration of the event in hours, minutes. format: 4 digits, 4 - bit BCD = 18 bit
|
||||
func parseDVBDurationMinutes(i []byte, offset *int) (d time.Duration) {
|
||||
d = parseDVBDurationByte(i[*offset])*time.Hour + parseDVBDurationByte(i[*offset+1])*time.Minute
|
||||
*offset += 2
|
||||
return
|
||||
}
|
||||
|
||||
// parseDVBDurationSeconds parses a seconds duration
|
||||
// 24 bit field containing the duration of the event in hours, minutes, seconds. format: 6 digits, 4 - bit BCD = 24 bit
|
||||
func parseDVBDurationSeconds(i []byte, offset *int) (d time.Duration) {
|
||||
d = parseDVBDurationByte(i[*offset])*time.Hour + parseDVBDurationByte(i[*offset+1])*time.Minute + parseDVBDurationByte(i[*offset+2])*time.Second
|
||||
*offset += 3
|
||||
return
|
||||
}
|
||||
|
||||
// parseDVBDurationByte parses a duration byte
|
||||
func parseDVBDurationByte(i byte) time.Duration {
|
||||
return time.Duration(uint8(i)>>4*10 + uint8(i)&0xf)
|
||||
}
|
207
vendor/github.com/asticode/go-astits/packet.go
generated
vendored
Normal file
207
vendor/github.com/asticode/go-astits/packet.go
generated
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
package astits
|
||||
|
||||
// Scrambling Controls
|
||||
const (
|
||||
ScramblingControlNotScrambled = 0
|
||||
ScramblingControlReservedForFutureUse = 1
|
||||
ScramblingControlScrambledWithEvenKey = 2
|
||||
ScramblingControlScrambledWithOddKey = 3
|
||||
)
|
||||
|
||||
// Packet represents a packet
|
||||
// https://en.wikipedia.org/wiki/MPEG_transport_stream
|
||||
type Packet struct {
|
||||
AdaptationField *PacketAdaptationField
|
||||
Bytes []byte // This is the whole packet content
|
||||
Header *PacketHeader
|
||||
Payload []byte // This is only the payload content
|
||||
}
|
||||
|
||||
// PacketHeader represents a packet header
|
||||
type PacketHeader struct {
|
||||
ContinuityCounter uint8 // Sequence number of payload packets (0x00 to 0x0F) within each stream (except PID 8191)
|
||||
HasAdaptationField bool
|
||||
HasPayload bool
|
||||
PayloadUnitStartIndicator bool // Set when a PES, PSI, or DVB-MIP packet begins immediately following the header.
|
||||
PID uint16 // Packet Identifier, describing the payload data.
|
||||
TransportErrorIndicator bool // Set when a demodulator can't correct errors from FEC data; indicating the packet is corrupt.
|
||||
TransportPriority bool // Set when the current packet has a higher priority than other packets with the same PID.
|
||||
TransportScramblingControl uint8
|
||||
}
|
||||
|
||||
// PacketAdaptationField represents a packet adaptation field
|
||||
type PacketAdaptationField struct {
|
||||
AdaptationExtensionField *PacketAdaptationExtensionField
|
||||
DiscontinuityIndicator bool // Set if current TS packet is in a discontinuity state with respect to either the continuity counter or the program clock reference
|
||||
ElementaryStreamPriorityIndicator bool // Set when this stream should be considered "high priority"
|
||||
HasAdaptationExtensionField bool
|
||||
HasOPCR bool
|
||||
HasPCR bool
|
||||
HasTransportPrivateData bool
|
||||
HasSplicingCountdown bool
|
||||
Length int
|
||||
OPCR *ClockReference // Original Program clock reference. Helps when one TS is copied into another
|
||||
PCR *ClockReference // Program clock reference
|
||||
RandomAccessIndicator bool // Set when the stream may be decoded without errors from this point
|
||||
SpliceCountdown int // Indicates how many TS packets from this one a splicing point occurs (Two's complement signed; may be negative)
|
||||
TransportPrivateDataLength int
|
||||
TransportPrivateData []byte
|
||||
}
|
||||
|
||||
// PacketAdaptationExtensionField represents a packet adaptation extension field
|
||||
type PacketAdaptationExtensionField struct {
|
||||
DTSNextAccessUnit *ClockReference // The PES DTS of the splice point. Split up as 3 bits, 1 marker bit (0x1), 15 bits, 1 marker bit, 15 bits, and 1 marker bit, for 33 data bits total.
|
||||
HasLegalTimeWindow bool
|
||||
HasPiecewiseRate bool
|
||||
HasSeamlessSplice bool
|
||||
LegalTimeWindowIsValid bool
|
||||
LegalTimeWindowOffset uint16 // Extra information for rebroadcasters to determine the state of buffers when packets may be missing.
|
||||
Length int
|
||||
PiecewiseRate uint32 // The rate of the stream, measured in 188-byte packets, to define the end-time of the LTW.
|
||||
SpliceType uint8 // Indicates the parameters of the H.262 splice.
|
||||
}
|
||||
|
||||
// parsePacket parses a packet
|
||||
func parsePacket(i []byte) (p *Packet, err error) {
|
||||
// Packet must start with a sync byte
|
||||
if i[0] != syncByte {
|
||||
err = ErrPacketMustStartWithASyncByte
|
||||
return
|
||||
}
|
||||
|
||||
// Init
|
||||
p = &Packet{Bytes: i}
|
||||
|
||||
// In case packet size is bigger than 188 bytes, we don't care for the first bytes
|
||||
i = i[len(i)-188+1:]
|
||||
|
||||
// Parse header
|
||||
p.Header = parsePacketHeader(i)
|
||||
|
||||
// Parse adaptation field
|
||||
if p.Header.HasAdaptationField {
|
||||
p.AdaptationField = parsePacketAdaptationField(i[3:])
|
||||
}
|
||||
|
||||
// Build payload
|
||||
if p.Header.HasPayload {
|
||||
p.Payload = i[payloadOffset(p.Header, p.AdaptationField):]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// payloadOffset returns the payload offset
|
||||
func payloadOffset(h *PacketHeader, a *PacketAdaptationField) (offset int) {
|
||||
offset = 3
|
||||
if h.HasAdaptationField {
|
||||
offset += 1 + a.Length
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parsePacketHeader parses the packet header
|
||||
func parsePacketHeader(i []byte) *PacketHeader {
|
||||
return &PacketHeader{
|
||||
ContinuityCounter: uint8(i[2] & 0xf),
|
||||
HasAdaptationField: i[2]&0x20 > 0,
|
||||
HasPayload: i[2]&0x10 > 0,
|
||||
PayloadUnitStartIndicator: i[0]&0x40 > 0,
|
||||
PID: uint16(i[0]&0x1f)<<8 | uint16(i[1]),
|
||||
TransportErrorIndicator: i[0]&0x80 > 0,
|
||||
TransportPriority: i[0]&0x20 > 0,
|
||||
TransportScramblingControl: uint8(i[2]) >> 6 & 0x3,
|
||||
}
|
||||
}
|
||||
|
||||
// parsePacketAdaptationField parses the packet adaptation field
|
||||
func parsePacketAdaptationField(i []byte) (a *PacketAdaptationField) {
|
||||
// Init
|
||||
a = &PacketAdaptationField{}
|
||||
var offset int
|
||||
|
||||
// Length
|
||||
a.Length = int(i[offset])
|
||||
offset += 1
|
||||
|
||||
// Valid length
|
||||
if a.Length > 0 {
|
||||
// Flags
|
||||
a.DiscontinuityIndicator = i[offset]&0x80 > 0
|
||||
a.RandomAccessIndicator = i[offset]&0x40 > 0
|
||||
a.ElementaryStreamPriorityIndicator = i[offset]&0x20 > 0
|
||||
a.HasPCR = i[offset]&0x10 > 0
|
||||
a.HasOPCR = i[offset]&0x08 > 0
|
||||
a.HasSplicingCountdown = i[offset]&0x04 > 0
|
||||
a.HasTransportPrivateData = i[offset]&0x02 > 0
|
||||
a.HasAdaptationExtensionField = i[offset]&0x01 > 0
|
||||
offset += 1
|
||||
|
||||
// PCR
|
||||
if a.HasPCR {
|
||||
a.PCR = parsePCR(i[offset:])
|
||||
offset += 6
|
||||
}
|
||||
|
||||
// OPCR
|
||||
if a.HasOPCR {
|
||||
a.OPCR = parsePCR(i[offset:])
|
||||
offset += 6
|
||||
}
|
||||
|
||||
// Splicing countdown
|
||||
if a.HasSplicingCountdown {
|
||||
a.SpliceCountdown = int(i[offset])
|
||||
offset += 1
|
||||
}
|
||||
|
||||
// Transport private data
|
||||
if a.HasTransportPrivateData {
|
||||
a.TransportPrivateDataLength = int(i[offset])
|
||||
offset += 1
|
||||
if a.TransportPrivateDataLength > 0 {
|
||||
a.TransportPrivateData = i[offset : offset+a.TransportPrivateDataLength]
|
||||
offset += a.TransportPrivateDataLength
|
||||
}
|
||||
}
|
||||
|
||||
// Adaptation extension
|
||||
if a.HasAdaptationExtensionField {
|
||||
a.AdaptationExtensionField = &PacketAdaptationExtensionField{Length: int(i[offset])}
|
||||
offset += 1
|
||||
if a.AdaptationExtensionField.Length > 0 {
|
||||
// Basic
|
||||
a.AdaptationExtensionField.HasLegalTimeWindow = i[offset]&0x80 > 0
|
||||
a.AdaptationExtensionField.HasPiecewiseRate = i[offset]&0x40 > 0
|
||||
a.AdaptationExtensionField.HasSeamlessSplice = i[offset]&0x20 > 0
|
||||
offset += 1
|
||||
|
||||
// Legal time window
|
||||
if a.AdaptationExtensionField.HasLegalTimeWindow {
|
||||
a.AdaptationExtensionField.LegalTimeWindowIsValid = i[offset]&0x80 > 0
|
||||
a.AdaptationExtensionField.LegalTimeWindowOffset = uint16(i[offset]&0x7f)<<8 | uint16(i[offset+1])
|
||||
offset += 2
|
||||
}
|
||||
|
||||
// Piecewise rate
|
||||
if a.AdaptationExtensionField.HasPiecewiseRate {
|
||||
a.AdaptationExtensionField.PiecewiseRate = uint32(i[offset]&0x3f)<<16 | uint32(i[offset+1])<<8 | uint32(i[offset+2])
|
||||
offset += 3
|
||||
}
|
||||
|
||||
// Seamless splice
|
||||
if a.AdaptationExtensionField.HasSeamlessSplice {
|
||||
a.AdaptationExtensionField.SpliceType = uint8(i[offset]&0xf0) >> 4
|
||||
a.AdaptationExtensionField.DTSNextAccessUnit = parsePTSOrDTS(i[offset:])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parsePCR parses a Program Clock Reference
|
||||
// Program clock reference, stored as 33 bits base, 6 bits reserved, 9 bits extension.
|
||||
func parsePCR(i []byte) *ClockReference {
|
||||
var pcr = uint64(i[0])<<40 | uint64(i[1])<<32 | uint64(i[2])<<24 | uint64(i[3])<<16 | uint64(i[4])<<8 | uint64(i[5])
|
||||
return newClockReference(int(pcr>>15), int(pcr&0x1ff))
|
||||
}
|
111
vendor/github.com/asticode/go-astits/packet_buffer.go
generated
vendored
Normal file
111
vendor/github.com/asticode/go-astits/packet_buffer.go
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// packetBuffer represents a packet buffer
|
||||
type packetBuffer struct {
|
||||
b []*Packet
|
||||
packetSize int
|
||||
r io.Reader
|
||||
}
|
||||
|
||||
// newPacketBuffer creates a new packet buffer
|
||||
func newPacketBuffer(r io.Reader, packetSize int) (pb *packetBuffer, err error) {
|
||||
// Init
|
||||
pb = &packetBuffer{
|
||||
packetSize: packetSize,
|
||||
r: r,
|
||||
}
|
||||
|
||||
// Packet size is not set
|
||||
if pb.packetSize == 0 {
|
||||
// Auto detect packet size
|
||||
if pb.packetSize, err = autoDetectPacketSize(r); err != nil {
|
||||
err = errors.Wrap(err, "astits: auto detecting packet size failed")
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// autoDetectPacketSize updates the packet size based on the first bytes
|
||||
// Minimum packet size is 188 and is bounded by 2 sync bytes
|
||||
// Assumption is made that the first byte of the reader is a sync byte
|
||||
func autoDetectPacketSize(r io.Reader) (packetSize int, err error) {
|
||||
// Read first bytes
|
||||
const l = 193
|
||||
var b = make([]byte, l)
|
||||
if _, err = r.Read(b); err != nil {
|
||||
err = errors.Wrapf(err, "astits: reading first %d bytes failed", l)
|
||||
return
|
||||
}
|
||||
|
||||
// Packet must start with a sync byte
|
||||
if b[0] != syncByte {
|
||||
err = ErrPacketMustStartWithASyncByte
|
||||
return
|
||||
}
|
||||
|
||||
// Look for sync bytes
|
||||
for idx, b := range b {
|
||||
if b == syncByte && idx >= 188 {
|
||||
// Update packet size
|
||||
packetSize = idx
|
||||
|
||||
// Rewind or sync reader
|
||||
var n int64
|
||||
if n, err = rewind(r); err != nil {
|
||||
err = errors.Wrap(err, "astits: rewinding failed")
|
||||
return
|
||||
} else if n == -1 {
|
||||
var ls = packetSize - (l - packetSize)
|
||||
if _, err = r.Read(make([]byte, ls)); err != nil {
|
||||
err = errors.Wrapf(err, "astits: reading %d bytes to sync reader failed", ls)
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
err = fmt.Errorf("astits: only one sync byte detected in first %d bytes", l)
|
||||
return
|
||||
}
|
||||
|
||||
// rewind rewinds the reader if possible, otherwise n = -1
|
||||
func rewind(r io.Reader) (n int64, err error) {
|
||||
if s, ok := r.(io.Seeker); ok {
|
||||
if n, err = s.Seek(0, 0); err != nil {
|
||||
err = errors.Wrap(err, "astits: seeking to 0 failed")
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
n = -1
|
||||
return
|
||||
}
|
||||
|
||||
// next fetches the next packet from the buffer
|
||||
func (pb *packetBuffer) next() (p *Packet, err error) {
|
||||
// Read
|
||||
var b = make([]byte, pb.packetSize)
|
||||
if _, err = io.ReadFull(pb.r, b); err != nil {
|
||||
if err == io.EOF || err == io.ErrUnexpectedEOF {
|
||||
err = ErrNoMorePackets
|
||||
} else {
|
||||
err = errors.Wrapf(err, "astits: reading %d bytes failed", pb.packetSize)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Parse packet
|
||||
if p, err = parsePacket(b); err != nil {
|
||||
err = errors.Wrap(err, "astits: building packet failed")
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
101
vendor/github.com/asticode/go-astits/packet_pool.go
generated
vendored
Normal file
101
vendor/github.com/asticode/go-astits/packet_pool.go
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
package astits
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// packetPool represents a pool of packets
|
||||
type packetPool struct {
|
||||
b map[uint16][]*Packet // Indexed by PID
|
||||
m *sync.Mutex
|
||||
}
|
||||
|
||||
// newPacketPool creates a new packet pool
|
||||
func newPacketPool() *packetPool {
|
||||
return &packetPool{
|
||||
b: make(map[uint16][]*Packet),
|
||||
m: &sync.Mutex{},
|
||||
}
|
||||
}
|
||||
|
||||
// add adds a new packet to the pool
|
||||
func (b *packetPool) add(p *Packet) (ps []*Packet) {
|
||||
// Throw away packet if error indicator
|
||||
if p.Header.TransportErrorIndicator {
|
||||
return
|
||||
}
|
||||
|
||||
// Throw away packets that don't have a payload until we figure out what we're going to do with them
|
||||
// TODO figure out what we're going to do with them :D
|
||||
if !p.Header.HasPayload {
|
||||
return
|
||||
}
|
||||
|
||||
// Lock
|
||||
b.m.Lock()
|
||||
defer b.m.Unlock()
|
||||
|
||||
// Init buffer
|
||||
var mps []*Packet
|
||||
var ok bool
|
||||
if mps, ok = b.b[p.Header.PID]; !ok {
|
||||
mps = []*Packet{}
|
||||
}
|
||||
|
||||
// Empty buffer if we detect a discontinuity
|
||||
if hasDiscontinuity(mps, p) {
|
||||
mps = []*Packet{}
|
||||
}
|
||||
|
||||
// Throw away packet if it's the same as the previous one
|
||||
if isSameAsPrevious(mps, p) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add packet
|
||||
if len(mps) > 0 || (len(mps) == 0 && p.Header.PayloadUnitStartIndicator) {
|
||||
mps = append(mps, p)
|
||||
}
|
||||
|
||||
// Check payload unit start indicator
|
||||
if p.Header.PayloadUnitStartIndicator && len(mps) > 1 {
|
||||
ps = mps[:len(mps)-1]
|
||||
mps = []*Packet{p}
|
||||
}
|
||||
|
||||
// Assign
|
||||
b.b[p.Header.PID] = mps
|
||||
return
|
||||
}
|
||||
|
||||
// dump dumps the packet pool by looking for the first item with packets inside
|
||||
func (b *packetPool) dump() (ps []*Packet) {
|
||||
b.m.Lock()
|
||||
defer b.m.Unlock()
|
||||
var keys []int
|
||||
for k := range b.b {
|
||||
keys = append(keys, int(k))
|
||||
}
|
||||
sort.Ints(keys)
|
||||
for _, k := range keys {
|
||||
ps = b.b[uint16(k)]
|
||||
delete(b.b, uint16(k))
|
||||
if len(ps) > 0 {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// hasDiscontinuity checks whether a packet is discontinuous with a set of packets
|
||||
func hasDiscontinuity(ps []*Packet, p *Packet) bool {
|
||||
return (p.Header.HasAdaptationField && p.AdaptationField.DiscontinuityIndicator) ||
|
||||
(len(ps) > 0 && p.Header.HasPayload && p.Header.ContinuityCounter != (ps[len(ps)-1].Header.ContinuityCounter+1)%16) ||
|
||||
(len(ps) > 0 && !p.Header.HasPayload && p.Header.ContinuityCounter != ps[len(ps)-1].Header.ContinuityCounter)
|
||||
}
|
||||
|
||||
// isSameAsPrevious checks whether a packet is the same as the last packet of a set of packets
|
||||
func isSameAsPrevious(ps []*Packet, p *Packet) bool {
|
||||
return len(ps) > 0 && p.Header.HasPayload && p.Header.ContinuityCounter == ps[len(ps)-1].Header.ContinuityCounter
|
||||
}
|
32
vendor/github.com/asticode/go-astits/program_map.go
generated
vendored
Normal file
32
vendor/github.com/asticode/go-astits/program_map.go
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
package astits
|
||||
|
||||
import "sync"
|
||||
|
||||
// programMap represents a program ids map
|
||||
type programMap struct {
|
||||
m *sync.Mutex
|
||||
p map[uint16]uint16 // map[ProgramMapID]ProgramNumber
|
||||
}
|
||||
|
||||
// newProgramMap creates a new program ids map
|
||||
func newProgramMap() programMap {
|
||||
return programMap{
|
||||
m: &sync.Mutex{},
|
||||
p: make(map[uint16]uint16),
|
||||
}
|
||||
}
|
||||
|
||||
// exists checks whether the program with this pid exists
|
||||
func (m programMap) exists(pid uint16) (ok bool) {
|
||||
m.m.Lock()
|
||||
defer m.m.Unlock()
|
||||
_, ok = m.p[pid]
|
||||
return
|
||||
}
|
||||
|
||||
// set sets a new program id
|
||||
func (m programMap) set(pid, number uint16) {
|
||||
m.m.Lock()
|
||||
defer m.m.Unlock()
|
||||
m.p[pid] = number
|
||||
}
|
Reference in New Issue
Block a user