Add 'mp4/' from commit 'bd71ca9823ec91410ccdf4d2ed783ba44b8a14d7'

git-subtree-dir: mp4
git-subtree-mainline: 8bb2ec1a5cdf4acae1ca4dfa09dd876b850bea24
git-subtree-split: bd71ca9823ec91410ccdf4d2ed783ba44b8a14d7
This commit is contained in:
nareix 2016-07-01 21:31:47 +08:00
commit 92528599ba
16 changed files with 5576 additions and 0 deletions

44
mp4/atom/atom.go Normal file
View File

@ -0,0 +1,44 @@
package atom
import (
"io"
)
func WalkFile(w Walker, r io.Reader) (err error) {
var moov *Movie
var moof *MovieFrag
for {
var lr *io.LimitedReader
var cc4 string
if lr, cc4, err = ReadAtomHeader(lr, ""); err != nil {
return
}
switch cc4 {
case "moov":
if moov, err = ReadMovie(lr); err != nil {
return
}
WalkMovie(w, moov)
case "moof":
if moof, err = ReadMovieFrag(lr); err != nil {
return
}
WalkMovieFrag(w, moof)
case "mdat":
w.StartStruct("MovieData")
w.Name("Length")
w.Int64(lr.N)
w.EndStruct()
}
if _, err = ReadDummy(r, int(lr.N)); err != nil {
return
}
}
return
}

95
mp4/atom/dumper.go Normal file
View File

@ -0,0 +1,95 @@
package atom
import (
"encoding/hex"
"fmt"
"io"
"strings"
)
type Walker interface {
FilterArrayItem(string, string, int, int) bool
ArrayLeft(int, int)
StartStruct(string)
EndStruct()
Name(string)
Int(int)
Int64(int64)
HexInt(int)
Fixed(Fixed)
String(string)
Bytes([]byte)
TimeStamp(TimeStamp)
Println(msg ...interface{})
}
type Dumper struct {
W io.Writer
depth int
name string
arrlen int
arridx int
}
func (self Dumper) tab() string {
return strings.Repeat(" ", self.depth*2)
}
func (self Dumper) Println(msg ...interface{}) {
fmt.Fprintln(self.W, self.tab()+fmt.Sprint(msg...))
}
func (self *Dumper) ArrayLeft(i int, n int) {
self.Println(fmt.Sprintf("... total %d elements", n))
}
func (self *Dumper) FilterArrayItem(name string, field string, i int, n int) bool {
if n > 20 && i > 20 {
return false
}
return true
}
func (self *Dumper) EndArray() {
}
func (self *Dumper) StartStruct(name string) {
self.depth++
self.Println(fmt.Sprintf("[%s]", name))
}
func (self *Dumper) EndStruct() {
self.depth--
}
func (self *Dumper) Name(name string) {
self.name = name
}
func (self Dumper) Int(val int) {
self.Int64(int64(val))
}
func (self Dumper) Int64(val int64) {
self.Println(fmt.Sprintf("%s: %d", self.name, val))
}
func (self Dumper) HexInt(val int) {
self.Println(fmt.Sprintf("%s: %x", self.name, val))
}
func (self Dumper) String(val string) {
self.Println(fmt.Sprintf("%s: %s", self.name, val))
}
func (self Dumper) Fixed(val Fixed) {
self.Println(fmt.Sprintf("%s: %d", self.name, FixedToInt(val)))
}
func (self Dumper) Bytes(val []byte) {
self.Println(fmt.Sprintf("%s: %s", self.name, hex.EncodeToString(val)))
}
func (self Dumper) TimeStamp(val TimeStamp) {
self.Println(fmt.Sprintf("%s: %d", self.name, int(val)))
}

274
mp4/atom/frag.go Normal file
View File

@ -0,0 +1,274 @@
package atom
import (
_ "bytes"
"fmt"
"github.com/nareix/bits"
"io"
)
const (
TFHD_BASE_DATA_OFFSET = 0x01
TFHD_STSD_ID = 0x02
TFHD_DEFAULT_DURATION = 0x08
TFHD_DEFAULT_SIZE = 0x10
TFHD_DEFAULT_FLAGS = 0x20
TFHD_DURATION_IS_EMPTY = 0x010000
TFHD_DEFAULT_BASE_IS_MOOF = 0x020000
)
type TrackFragHeader struct {
Version int
Flags int
Id int
DefaultSize int
DefaultDuration int
DefaultFlags int
BaseDataOffset int64
StsdId int
}
func WalkTrackFragHeader(w Walker, self *TrackFragHeader) {
w.StartStruct("TrackFragHeader")
w.Name("Flags")
w.HexInt(self.Flags)
w.Name("Id")
w.Int(self.Id)
w.Name("DefaultDuration")
w.Int(self.DefaultDuration)
w.Name("DefaultSize")
w.Int(self.DefaultSize)
w.Name("DefaultFlags")
w.HexInt(self.DefaultFlags)
w.EndStruct()
}
func WriteTrackFragHeader(w io.WriteSeeker, self *TrackFragHeader) (err error) {
panic("unimplmented")
return
}
func ReadTrackFragHeader(r *io.LimitedReader) (res *TrackFragHeader, err error) {
self := &TrackFragHeader{}
if self.Version, err = ReadInt(r, 1); err != nil {
return
}
if self.Flags, err = ReadInt(r, 3); err != nil {
return
}
if self.Id, err = ReadInt(r, 4); err != nil {
return
}
if self.Flags&TFHD_BASE_DATA_OFFSET != 0 {
if self.BaseDataOffset, err = bits.ReadInt64BE(r, 64); err != nil {
return
}
}
if self.Flags&TFHD_STSD_ID != 0 {
if self.StsdId, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TFHD_DEFAULT_DURATION != 0 {
if self.DefaultDuration, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TFHD_DEFAULT_SIZE != 0 {
if self.DefaultSize, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TFHD_DEFAULT_FLAGS != 0 {
if self.DefaultFlags, err = ReadInt(r, 4); err != nil {
return
}
}
res = self
return
}
const (
TRUN_DATA_OFFSET = 0x01
TRUN_FIRST_SAMPLE_FLAGS = 0x04
TRUN_SAMPLE_DURATION = 0x100
TRUN_SAMPLE_SIZE = 0x200
TRUN_SAMPLE_FLAGS = 0x400
TRUN_SAMPLE_CTS = 0x800
)
type TrackFragRunEntry struct {
Duration int
Size int
Flags int
Cts int
}
type TrackFragRun struct {
Version int
Flags int
FirstSampleFlags int
DataOffset int
Entries []TrackFragRunEntry
}
func WalkTrackFragRun(w Walker, self *TrackFragRun) {
w.StartStruct("TrackFragRun")
w.Name("Flags")
w.HexInt(self.Flags)
w.Name("FirstSampleFlags")
w.HexInt(self.FirstSampleFlags)
w.Name("DataOffset")
w.Int(self.DataOffset)
w.Name("EntriesCount")
w.Int(len(self.Entries))
for i := 0; i < 10 && i < len(self.Entries); i++ {
entry := self.Entries[i]
w.Println(fmt.Sprintf("Entry[%d] Flags=%x Duration=%d Size=%d Cts=%d",
i, entry.Flags, entry.Duration, entry.Size, entry.Cts))
}
w.EndStruct()
}
func WriteTrackFragRun(w io.WriteSeeker, self *TrackFragRun) (err error) {
panic("unimplmented")
return
}
func ReadTrackFragRun(r *io.LimitedReader) (res *TrackFragRun, err error) {
self := &TrackFragRun{}
if self.Version, err = ReadInt(r, 1); err != nil {
return
}
if self.Flags, err = ReadInt(r, 3); err != nil {
return
}
var count int
if count, err = ReadInt(r, 4); err != nil {
return
}
if self.Flags&TRUN_DATA_OFFSET != 0 {
if self.DataOffset, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TRUN_FIRST_SAMPLE_FLAGS != 0 {
if self.FirstSampleFlags, err = ReadInt(r, 4); err != nil {
return
}
}
for i := 0; i < count; i++ {
var flags int
if i > 0 {
flags = self.Flags
} else {
flags = self.FirstSampleFlags
}
entry := TrackFragRunEntry{}
if flags&TRUN_SAMPLE_DURATION != 0 {
if entry.Duration, err = ReadInt(r, 4); err != nil {
return
}
}
if flags&TRUN_SAMPLE_SIZE != 0 {
if entry.Size, err = ReadInt(r, 4); err != nil {
return
}
}
if flags&TRUN_SAMPLE_FLAGS != 0 {
if entry.Flags, err = ReadInt(r, 4); err != nil {
return
}
}
if flags&TRUN_SAMPLE_CTS != 0 {
if entry.Cts, err = ReadInt(r, 4); err != nil {
return
}
}
self.Entries = append(self.Entries, entry)
}
res = self
return
}
type TrackFragDecodeTime struct {
Version int
Flags int
Time int64
}
func ReadTrackFragDecodeTime(r *io.LimitedReader) (res *TrackFragDecodeTime, err error) {
self := &TrackFragDecodeTime{}
if self.Version, err = ReadInt(r, 1); err != nil {
return
}
if self.Flags, err = ReadInt(r, 3); err != nil {
return
}
if self.Version != 0 {
if self.Time, err = bits.ReadInt64BE(r, 64); err != nil {
return
}
} else {
if self.Time, err = bits.ReadInt64BE(r, 32); err != nil {
return
}
}
res = self
return
}
func WriteTrackFragDecodeTime(w io.WriteSeeker, self *TrackFragDecodeTime) (err error) {
var aw *Writer
if aw, err = WriteAtomHeader(w, "tfdt"); err != nil {
return
}
w = aw
if err = WriteInt(w, self.Version, 1); err != nil {
return
}
if err = WriteInt(w, self.Flags, 3); err != nil {
return
}
if self.Version != 0 {
if err = bits.WriteInt64BE(w, self.Time, 64); err != nil {
return
}
} else {
if err = bits.WriteInt64BE(w, self.Time, 32); err != nil {
return
}
}
if err = aw.Close(); err != nil {
return
}
return
}
func WalkTrackFragDecodeTime(w Walker, self *TrackFragDecodeTime) {
w.StartStruct("TrackFragDecodeTime")
w.Name("Version")
w.Int(self.Version)
w.Name("Flags")
w.Int(self.Flags)
w.Name("Time")
w.Int64(self.Time)
w.EndStruct()
return
}

875
mp4/atom/genStruct.js Normal file
View File

@ -0,0 +1,875 @@
var uc = x => x && x.substr(0,1).toUpperCase()+x.slice(1);
Array.prototype.nonull = function () {
return this.filter(x => x);
};
var atoms = {
movie: {
cc4: 'moov',
fields: [
['$atoms', [
['header', '*movieHeader'],
['iods', '*iods'],
['tracks', '[]*track'],
['movieExtend', '*movieExtend'],
]],
],
},
iods: {
cc4: 'iods',
fields: [
['data', '[]byte'],
],
},
movieHeader: {
cc4: 'mvhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['createTime', 'TimeStamp32'],
['modifyTime', 'TimeStamp32'],
['timeScale', 'int32'],
['duration', 'int32'],
['preferredRate', 'Fixed32'],
['preferredVolume', 'Fixed16'],
['_', '[10]byte'],
['matrix', '[9]int32'],
['previewTime', 'TimeStamp32'],
['previewDuration', 'TimeStamp32'],
['posterTime', 'TimeStamp32'],
['selectionTime', 'TimeStamp32'],
['selectionDuration', 'TimeStamp32'],
['currentTime', 'TimeStamp32'],
['nextTrackId', 'int32'],
],
},
track: {
cc4: 'trak',
fields: [
['$atoms', [
['header', '*trackHeader'],
['media', '*media'],
]],
],
},
trackHeader: {
cc4: 'tkhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['createTime', 'TimeStamp32'],
['modifyTime', 'TimeStamp32'],
['trackId', 'int32'],
['_', '[4]byte'],
['duration', 'int32'],
['_', '[8]byte'],
['layer', 'int16'],
['alternateGroup', 'int16'],
['volume', 'Fixed16'],
['_', '[2]byte'],
['matrix', '[9]int32'],
['trackWidth', 'Fixed32'],
['trackHeight', 'Fixed32'],
],
},
handlerRefer: {
cc4: 'hdlr',
fields: [
['version', 'int8'],
['flags', 'int24'],
['type', '[4]char'],
['subType', '[4]char'],
['name', '[]char'],
],
},
media: {
cc4: 'mdia',
fields: [
['$atoms', [
['header', '*mediaHeader'],
['handler', '*handlerRefer'],
['info', '*mediaInfo'],
]],
],
},
mediaHeader: {
cc4: 'mdhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['createTime', 'TimeStamp32'],
['modifyTime', 'TimeStamp32'],
['timeScale', 'int32'],
['duration', 'int32'],
['language', 'int16'],
['quality', 'int16'],
],
},
mediaInfo: {
cc4: 'minf',
fields: [
['$atoms', [
['sound', '*soundMediaInfo'],
['video', '*videoMediaInfo'],
['data', '*dataInfo'],
['sample', '*sampleTable'],
]],
],
},
dataInfo: {
cc4: 'dinf',
fields: [
['$atoms', [
['refer', '*dataRefer'],
]],
],
},
dataRefer: {
cc4: 'dref',
fields: [
['version', 'int8'],
['flags', 'int24'],
['$atomsCount', 'int32'],
['$atoms', [
['url', '*dataReferUrl'],
]],
],
},
dataReferUrl: {
cc4: 'url ',
fields: [
['version', 'int8'],
['flags', 'int24'],
],
},
soundMediaInfo: {
cc4: 'smhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['balance', 'int16'],
['_', 'int16'],
],
},
videoMediaInfo: {
cc4: 'vmhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['graphicsMode', 'int16'],
['opcolor', '[3]int16'],
],
},
sampleTable: {
cc4: 'stbl',
fields: [
['$atoms', [
['sampleDesc', '*sampleDesc'],
['timeToSample', '*timeToSample'],
['compositionOffset', '*compositionOffset'],
['sampleToChunk', '*sampleToChunk'],
['syncSample', '*syncSample'],
['chunkOffset', '*chunkOffset'],
['sampleSize', '*sampleSize'],
]],
],
},
sampleDesc: {
cc4: 'stsd',
fields: [
['version', 'int8'],
['_', '[3]byte'],
['$atomsCount', 'int32'],
['$atoms', [
['avc1Desc', '*avc1Desc'],
['mp4aDesc', '*mp4aDesc'],
]],
],
},
mp4aDesc: {
cc4: 'mp4a',
fields: [
['_', '[6]byte'],
['dataRefIdx', 'int16'],
['version', 'int16'],
['revisionLevel', 'int16'],
['vendor', 'int32'],
['numberOfChannels', 'int16'],
['sampleSize', 'int16'],
['compressionId', 'int16'],
['_', 'int16'],
['sampleRate', 'Fixed32'],
['$atoms', [
['conf', '*elemStreamDesc'],
]],
],
},
elemStreamDesc: {
cc4: 'esds',
fields: [
['version', 'int32'],
['data', '[]byte'],
],
},
avc1Desc: {
cc4: 'avc1',
fields: [
['_', '[6]byte'],
['dataRefIdx', 'int16'],
['version', 'int16'],
['revision', 'int16'],
['vendor', 'int32'],
['temporalQuality', 'int32'],
['spatialQuality', 'int32'],
['width', 'int16'],
['height', 'int16'],
['horizontalResolution', 'Fixed32'],
['vorizontalResolution', 'Fixed32'],
['_', 'int32'],
['frameCount', 'int16'],
['compressorName', '[32]char'],
['depth', 'int16'],
['colorTableId', 'int16'],
['$atoms', [
['conf', '*avc1Conf'],
]],
],
},
avc1Conf: {
cc4: 'avcC',
fields: [
['data', '[]byte'],
],
},
timeToSample: {
cc4: 'stts',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]timeToSampleEntry'],
],
},
timeToSampleEntry: {
fields: [
['count', 'int32'],
['duration', 'int32'],
],
},
sampleToChunk: {
cc4: 'stsc',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]sampleToChunkEntry'],
],
},
sampleToChunkEntry: {
fields: [
['firstChunk', 'int32'],
['samplesPerChunk', 'int32'],
['sampleDescId', 'int32'],
],
},
compositionOffset: {
cc4: 'ctts',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]compositionOffsetEntry'],
],
},
compositionOffsetEntry: {
fields: [
['count', 'int32'],
['offset', 'int32'],
],
},
syncSample: {
cc4: 'stss',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]int32'],
],
},
sampleSize: {
cc4: 'stsz',
fields: [
['version', 'int8'],
['flags', 'int24'],
['sampleSize', 'int32'],
['entries', '[int32]int32'],
],
},
chunkOffset: {
cc4: 'stco',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]int32'],
],
},
movieFrag: {
cc4: 'moof',
fields: [
['$atoms', [
['header', '*movieFragHeader'],
['tracks', '[]*trackFrag'],
]],
],
},
trackFragDecodeTime: {
cc4: 'tfdt',
},
movieFragHeader: {
cc4: 'mfhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['seqNum', 'int32'],
],
},
trackFrag: {
cc4: 'traf',
fields: [
['$atoms', [
['header', '*trackFragHeader'],
['decodeTime', '*trackFragDecodeTime'],
['run', '*trackFragRun'],
]],
],
},
trackFragRun: {
cc4: 'trun',
},
trackFragHeader: {
cc4: 'tfhd',
},
movieExtend: {
cc4: 'mvex',
fields: [
['$atoms', [
['tracks', '[]*trackExtend'],
]],
],
},
trackExtend: {
cc4: 'trex',
fields: [
['version', 'int8'],
['flags', 'int24'],
['trackId', 'int32'],
['defaultSampleDescIdx', 'int32'],
['defaultSampleDuration', 'int32'],
['defaultSampleSize', 'int32'],
['defaultSampleFlags', 'int32'],
],
},
/*
// need hand write
trackFragRun: {
cc4: 'trun',
fields: [
['version', 'int8'],
['flags', 'int24'],
['sampleCount', 'int32'],
['dataOffset', 'int32'],
['entries', '[]int32'],
],
},
trackFragHeader: {
cc4: 'tfhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['id', 'int32'],
['sampleDescriptionIndex', 'int32'],
['_', '[12]byte'],
],
},
*/
};
var DeclReadFunc = (opts) => {
var stmts = [];
var DebugStmt = type => `// ${JSON.stringify(type)}`;
var ReadArr = (name, type) => {
return [
//StrStmt('// ReadArr'),
//DebugStmt(type),
type.varcount && [
DeclVar('count', 'int'),
CallCheckAssign('ReadInt', ['r', type.varcount], ['count']),
`${name} = make(${typeStr(type)}, count)`,
],
For(RangeN('i', type.varcount ? 'count' : type.count), [
ReadCommnType(name+'[i]', type),
]),
];
};
var elemTypeStr = type => typeStr(Object.assign({}, type, {arr: false}));
var ReadAtoms = fields => [
For(`r.N > 0`, [
DeclVar('cc4', 'string'),
DeclVar('ar', '*io.LimitedReader'),
CallCheckAssign('ReadAtomHeader', ['r', '""'], ['ar', 'cc4']),
Switch('cc4', fields.map(field => [
`"${atoms[field.type.struct].cc4}"`, [
field.type.arr ? [
DeclVar('item', elemTypeStr(field.type)),
CallCheckAssign('Read'+field.type.Struct, ['ar'], ['item']),
`self.${field.name} = append(self.${field.name}, item)`,
] : [
CallCheckAssign('Read'+field.type.Struct, ['ar'], [`self.${field.name}`]),
],
]
]), showlog && [`log.Println("skip", cc4)`]),
CallCheckAssign('ReadDummy', ['ar', 'int(ar.N)'], ['_']),
])
];
var ReadCommnType = (name, type) => {
if (type.struct)
return CallCheckAssign(
'Read'+type.Struct, ['r'], [name]);
return [
//DebugStmt(type),
CallCheckAssign(
'Read'+type.fn, ['r', type.len||'int(r.N)'], [name]),
]
};
var ReadField = (name, type) => {
if (name == '_')
return CallCheckAssign('ReadDummy', ['r', type.len], ['_']);
if (name == '$atoms')
return ReadAtoms(type.list);
if (name == '$atomsCount')
return CallCheckAssign('ReadDummy', ['r', type.len], ['_']);
if (type.arr && type.fn != 'Bytes')
return ReadArr('self.'+name, type);
return ReadCommnType('self.'+name, type);
};
var ReadFields = () => opts.fields.map(field => {
var name = field.name;
var type = field.type;
return ReadField(name, type);
}).nonull();
var ptr = opts.cc4;
return Func(
'Read'+opts.type,
[['r', '*io.LimitedReader']],
[[ptr?'res':'self', (ptr?'*':'')+opts.type], ['err', 'error']],
[
ptr && `self := &${opts.type}{}`,
ReadFields(),
ptr && `res = self`,
]
);
};
var DeclWriteFunc = (opts) => {
var SavePos = [
DeclVar('aw', '*Writer'),
CallCheckAssign('WriteAtomHeader', ['w', `"${opts.cc4}"`], ['aw']),
`w = aw`,
];
var RestorePosSetSize = [
CallCheckAssign('aw.Close', [], []),
];
var WriteAtoms = fields => fields.map(field => {
var name = 'self.'+field.name;
return [
`if ${name} != nil {`,
field.type.arr ? WriteArr(name, field.type) : WriteCommnType(name, field.type),
atomsCount && `${atomsCount.name}++`,
`}`,
];
});
var WriteArr = (name, type) => {
return [
type.varcount && CallCheckAssign('WriteInt', ['w', `len(${name})`, type.varcount], []),
For(`_, elem := range ${name}`, [
WriteCommnType('elem', type),
]),
];
};
var WriteCommnType = (name, type) => {
if (type.struct)
return CallCheckAssign(
'Write'+type.Struct, ['w', name], []);
return [
CallCheckAssign(
'Write'+type.fn, ['w', name, type.len||`len(${name})`], []),
]
};
var atomsCount;
var WriteAtomsCountStart = (type) => {
atomsCount = {
name: 'atomsCount',
namePos: 'atomsCountPos',
type: type,
}
return [
DeclVar(atomsCount.name, 'int'),
DeclVar(atomsCount.namePos, 'int64'),
CallCheckAssign('WriteEmptyInt', ['w', type.len], [atomsCount.namePos]),
];
};
var WriteAtomsCountEnd = (type) => {
return [
CallCheckAssign('RefillInt',
['w', atomsCount.namePos, atomsCount.name, atomsCount.type.len],
[]
),
];
};
var WriteField = (name, type) => {
if (name == '_')
return CallCheckAssign('WriteDummy', ['w', type.len], []);
if (name == '$atoms')
return WriteAtoms(type.list);
if (name == '$atomsCount')
return WriteAtomsCountStart(type);
if (type.arr && type.fn != 'Bytes')
return WriteArr('self.'+name, type);
return WriteCommnType('self.'+name, type);
};
var WriteFields = () => opts.fields
.map(field => WriteField(field.name, field.type))
.concat(atomsCount && WriteAtomsCountEnd())
return Func(
'Write'+opts.type,
[['w', 'io.WriteSeeker'], ['self', (opts.cc4?'*':'')+opts.type]],
[['err', 'error']],
[
opts.cc4 && SavePos,
WriteFields(),
opts.cc4 && RestorePosSetSize,
]
);
};
var DeclDumpFunc = (opts) => {
var dumpStruct = (name, type) => {
if (type.ptr)
return If(`${name} != nil`, Call('Walk'+type.Struct, ['w', name]));
return Call('Walk'+type.Struct, ['w', name]);
};
var dumpArr = (name, type, id) => {
return [
//Call('w.StartArray', [`"${id}"`, `len(${name})`]),
For(`i, item := range(${name})`, If(
`w.FilterArrayItem("${opts.type}", "${id}", i, len(${name}))`,
dumpCommonType('item', type, id),
[`w.ArrayLeft(i, len(${name}))`, 'break']
)),
//Call('w.EndArray', []),
];
};
var dumpCommonType = (name, type, id) => {
if (type.struct)
return dumpStruct(name, type);
return [
Call('w.Name', [`"${id}"`]),
Call('w.'+type.fn, [name]),
];
};
var dumpField = (name, type, noarr) => {
if (name == '_')
return;
if (name == '$atomsCount')
return;
if (name == '$atoms') {
return type.list.map(field => dumpField(field.name, field.type));
}
if (!noarr && type.arr && type.fn != 'Bytes')
return dumpArr('self.'+name, type, name);
return dumpCommonType('self.'+name, type, name);
};
var dumpFields = fields =>
[ Call('w.StartStruct', [`"${opts.type}"`]) ]
.concat(fields.map(field => dumpField(field.name, field.type)))
.concat([Call('w.EndStruct', [])]);
return Func(
'Walk'+opts.type,
[['w', 'Walker'], ['self', (opts.cc4?'*':'')+opts.type]],
[],
dumpFields(opts.fields)
)
};
var D = (cls, ...fields) => {
global[cls] = (...args) => {
var obj = {cls: cls};
fields.forEach((k, i) => obj[k] = args[i]);
return obj;
};
};
D('Func', 'name', 'args', 'rets', 'body');
D('If', 'cond', 'action', 'else');
D('Call', 'fn', 'args');
D('CallCheckAssign', 'fn', 'args', 'rets', 'action');
D('DeclVar', 'name', 'type');
D('For', 'cond', 'body');
D('RangeN', 'i', 'n');
D('DeclStruct', 'name', 'body');
D('StrStmt', 'content');
D('Switch', 'cond', 'cases', 'default');
var showlog = false;
var S = s => s && s || '';
var dumpFn = f => {
var dumpArgs = x => x.map(x => x.join(' ')).join(',');
return `func ${f.name}(${dumpArgs(f.args)}) (${dumpArgs(f.rets)}) {
${S(showlog && 'log.Println("'+f.name+'")')}
${dumpStmts(f.body)}
return
}`;
};
var dumpStmts = stmt => {
if (typeof(stmt) == 'string') {
return stmt;
} else if (stmt instanceof Array) {
return stmt.nonull().map(dumpStmts).join('\n');
} else if (stmt.cls == 'If') {
var s = `if ${stmt.cond} {
${dumpStmts(stmt.action)}
}`;
if (stmt.else) {
s += ` else {
${dumpStmts(stmt.else)}
}`;
}
return s;
} else if (stmt.cls == 'Call') {
return `${stmt.fn}(${stmt.args.join(',')})`;
} else if (stmt.cls == 'CallCheckAssign') {
return `if ${stmt.rets.concat(['err']).join(',')} = ${stmt.fn}(${stmt.args.join(',')}); err != nil {
${stmt.action ? stmt.action : 'return'}
}`;
} else if (stmt.cls == 'DeclVar') {
return `var ${stmt.name} ${stmt.type}`;
} else if (stmt.cls == 'For') {
return `for ${dumpStmts(stmt.cond)} {
${dumpStmts(stmt.body)}
}`;
} else if (stmt.cls == 'RangeN') {
return `${stmt.i} := 0; ${stmt.i} < ${stmt.n}; ${stmt.i}++`;
} else if (stmt.cls == 'DeclStruct') {
return `type ${stmt.name} struct {
${stmt.body.map(line => line.join(' ')).join('\n')}
}`;
} else if (stmt.cls == 'Func') {
return dumpFn(stmt);
} else if (stmt.cls == 'StrStmt') {
return stmt.content;
} else if (stmt.cls == 'Switch') {
var dumpCase = c => `case ${c[0]}: { ${dumpStmts(c[1])} }`;
var dumpDefault = c => `default: { ${dumpStmts(c)} }`;
return `switch ${stmt.cond} {
${stmt.cases.map(dumpCase).join('\n')}
${stmt.default && dumpDefault(stmt.default) || ''}
}`;
}
};
var parseType = s => {
var r = {};
var bracket = /^\[(.*)\]/;
var lenDiv = 8;
var types = /^(int|TimeStamp|byte|Fixed|char)/;
var number = /^[0-9]+/;
if (s.match(bracket)) {
var count = s.match(bracket)[1];
if (count.substr(0,3) == 'int') {
r.varcount = +count.substr(3)/8;
} else {
r.count = +count;
}
r.arr = true;
s = s.replace(bracket, '');
}
if (s.substr(0,1) == '*') {
r.ptr = true;
s = s.slice(1);
}
if (s.match(types)) {
r.type = s.match(types)[0];
r.fn = uc(r.type);
s = s.replace(types, '');
}
if (r.type == 'byte' && r.arr) {
r.len = r.count;
r.fn = 'Bytes';
}
if (r.type == 'char' && r.arr) {
r.len = r.count;
r.fn = 'String';
r.type = 'string';
r.arr = false;
lenDiv = 1;
}
if (s.match(number)) {
r.len = +s.match(number)[0]/lenDiv;
s = s.replace(number, '');
}
if (s != '') {
r.struct = s;
r.Struct = uc(s);
}
return r;
};
var typeStr = (t) => {
var s = '';
if (t.arr)
s += '['+(t.count||'')+']';
if (t.ptr)
s += '*';
if (t.struct)
s += t.Struct;
if (t.type)
s += t.type;
return s;
};
var nameShouldHide = (name) => name == '_'
var allStmts = () => {
var stmts = [];
var parseFields = fields => fields.map(field => {
return {
name: uc(field[0]),
type: field[0] == '$atoms' ? {list: parseFields(field[1])} : parseType(field[1]),
};
});
var genStructFields = fields => fields.map(field => {
if (field.name == '_')
return;
if (field.name == '$atomsCount')
return;
if (field.name == '$atoms')
return field.type.list;
return [field];
}).nonull().reduce((prev, cur) => prev.concat(cur)).map(field => [
field.name, typeStr(field.type)]);
for (var k in atoms) {
var atom = atoms[k];
var name = uc(k);
if (atom.fields == null)
continue;
var fields = parseFields(atom.fields);
stmts = stmts.concat([
DeclStruct(name, genStructFields(fields)),
DeclReadFunc({
type: name,
fields: fields,
cc4: atom.cc4,
}),
DeclWriteFunc({
type: name,
fields: fields,
cc4: atom.cc4,
}),
DeclDumpFunc({
type: name,
fields: fields,
cc4: atom.cc4,
}),
]);
}
return stmts;
};
console.log(`
// THIS FILE IS AUTO GENERATED
package atom
import (
"io"
${showlog && '"log"' || ''}
)
`, dumpStmts(allStmts()));

7
mp4/atom/genStruct.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
node genStruct.js > struct.go && gofmt -w struct.go && go build . || {
echo
echo "Please use node version > 6.0.0"
}

115
mp4/atom/reader.go Normal file
View File

@ -0,0 +1,115 @@
package atom
import (
"io"
"io/ioutil"
"log"
)
func ReadBytes(r io.Reader, n int) (res []byte, err error) {
res = make([]byte, n)
if n, err = r.Read(res); err != nil {
return
}
return
}
func ReadUInt(r io.Reader, n int) (res uint, err error) {
var b []byte
if b, err = ReadBytes(r, n); err != nil {
return
}
for i := 0; i < n; i++ {
res <<= 8
res += uint(b[i])
}
return
}
func ReadInt(r io.Reader, n int) (res int, err error) {
var uval uint
if uval, err = ReadUInt(r, n); err != nil {
return
}
if uval&(1<<uint(n*8-1)) != 0 {
res = -int((1 << uint(n*8)) - uval)
} else {
res = int(uval)
}
return
}
func ReadFixed(r io.Reader, n int) (res Fixed, err error) {
var ui uint
if ui, err = ReadUInt(r, n); err != nil {
return
}
if n == 2 {
res = Fixed(ui << 8)
} else if n == 4 {
res = Fixed(ui)
} else {
panic("only fixed32 and fixed16 is supported")
}
return
}
func ReadTimeStamp(r io.Reader, n int) (res TimeStamp, err error) {
var ui uint
if ui, err = ReadUInt(r, n); err != nil {
return
}
res = TimeStamp(ui)
return
}
func ReadString(r io.Reader, n int) (res string, err error) {
var b []byte
if b, err = ReadBytes(r, n); err != nil {
return
}
res = string(b)
return
}
func ReadDummy(r io.Reader, n int) (res int, err error) {
_, err = io.CopyN(ioutil.Discard, r, int64(n))
return
}
func ReadAtomHeader(r io.Reader, targetCC4 string) (res *io.LimitedReader, cc4 string, err error) {
for {
var size int
if size, err = ReadInt(r, 4); err != nil {
return
}
if size == 0 {
continue
}
if cc4, err = ReadString(r, 4); err != nil {
return
}
size = size - 8
if false {
log.Println(cc4, targetCC4, size, cc4 == targetCC4)
}
if targetCC4 != "" && cc4 != targetCC4 {
log.Println("ReadAtomHeader skip:", cc4)
if _, err = ReadDummy(r, size); err != nil {
return
}
continue
}
res = &io.LimitedReader{
R: r,
N: int64(size),
}
return
}
}

2874
mp4/atom/struct.go Normal file

File diff suppressed because it is too large Load Diff

12
mp4/atom/types.go Normal file
View File

@ -0,0 +1,12 @@
package atom
type Fixed uint32
type TimeStamp uint32
func IntToFixed(val int) Fixed {
return Fixed(val << 16)
}
func FixedToInt(val Fixed) int {
return int(val >> 16)
}

29
mp4/atom/utils.go Normal file
View File

@ -0,0 +1,29 @@
package atom
func GetAvc1ConfByTrack(stream *Track) (avc1 *Avc1Conf) {
if media := stream.Media; media != nil {
if info := media.Info; info != nil {
if sample := info.Sample; sample != nil {
if desc := sample.SampleDesc; desc != nil {
if avc1 := desc.Avc1Desc; avc1 != nil {
return avc1.Conf
}
}
}
}
}
return
}
func GetMp4aDescByTrack(stream *Track) (mp4a *Mp4aDesc) {
if media := stream.Media; media != nil {
if info := media.Info; info != nil {
if sample := info.Sample; sample != nil {
if desc := sample.SampleDesc; desc != nil {
return desc.Mp4aDesc
}
}
}
}
return
}

119
mp4/atom/writer.go Normal file
View File

@ -0,0 +1,119 @@
package atom
import (
"io"
"log"
)
func WriteBytes(w io.Writer, b []byte, n int) (err error) {
if len(b) < n {
b = append(b, make([]byte, n-len(b))...)
}
_, err = w.Write(b[:n])
return
}
func WriteUInt(w io.Writer, val uint, n int) (err error) {
var b [8]byte
for i := n - 1; i >= 0; i-- {
b[i] = byte(val)
val >>= 8
}
return WriteBytes(w, b[:], n)
}
func WriteInt(w io.Writer, val int, n int) (err error) {
var uval uint
if val < 0 {
uval = uint((1 << uint(n*8)) + val)
} else {
uval = uint(val)
}
return WriteUInt(w, uval, n)
}
func WriteFixed(w io.Writer, val Fixed, n int) (err error) {
var uval uint
if n == 2 {
uval = uint(val) >> 8
} else if n == 4 {
uval = uint(val)
} else {
panic("only fixed32 and fixed16 is supported")
}
return WriteUInt(w, uval, n)
}
func WriteTimeStamp(w io.Writer, ts TimeStamp, n int) (err error) {
return WriteUInt(w, uint(ts), n)
}
func WriteString(w io.Writer, val string, n int) (err error) {
return WriteBytes(w, []byte(val), n)
}
func WriteDummy(w io.Writer, n int) (err error) {
return WriteBytes(w, []byte{}, n)
}
type Writer struct {
io.WriteSeeker
sizePos int64
}
func WriteEmptyInt(w io.WriteSeeker, n int) (pos int64, err error) {
if pos, err = w.Seek(0, 1); err != nil {
return
}
if err = WriteInt(w, 0, n); err != nil {
return
}
return
}
func RefillInt(w io.WriteSeeker, pos int64, val int, n int) (err error) {
var curPos int64
if curPos, err = w.Seek(0, 1); err != nil {
return
}
if _, err = w.Seek(pos, 0); err != nil {
return
}
if err = WriteInt(w, val, n); err != nil {
return
}
if _, err = w.Seek(curPos, 0); err != nil {
return
}
return
}
func (self *Writer) Close() (err error) {
var curPos int64
if curPos, err = self.Seek(0, 1); err != nil {
return
}
if err = RefillInt(self, self.sizePos, int(curPos-self.sizePos), 4); err != nil {
return
}
if false {
log.Println("writeback", self.sizePos, curPos, curPos-self.sizePos)
}
return
}
func WriteAtomHeader(w io.WriteSeeker, cc4 string) (res *Writer, err error) {
self := &Writer{WriteSeeker: w}
if self.sizePos, err = WriteEmptyInt(w, 4); err != nil {
return
}
if err = WriteString(self, cc4, 4); err != nil {
return
}
res = self
return
}

460
mp4/demuxer.go Normal file
View File

@ -0,0 +1,460 @@
package mp4
import (
"bytes"
"time"
"fmt"
"github.com/nareix/av"
"github.com/nareix/codec/aacparser"
"github.com/nareix/codec/h264parser"
"github.com/nareix/mp4/atom"
"github.com/nareix/mp4/isom"
"io"
)
type Demuxer struct {
R io.ReadSeeker
streams []*Stream
movieAtom *atom.Movie
}
func (self *Demuxer) Streams() (streams []av.CodecData, err error) {
if err = self.probe(); err != nil {
return
}
for _, stream := range self.streams {
streams = append(streams, stream.CodecData)
}
return
}
func (self *Demuxer) probe() (err error) {
if self.movieAtom != nil {
return
}
var N int64
var moov *atom.Movie
if N, err = self.R.Seek(0, 2); err != nil {
return
}
if _, err = self.R.Seek(0, 0); err != nil {
return
}
lr := &io.LimitedReader{R: self.R, N: N}
for lr.N > 0 {
var ar *io.LimitedReader
var cc4 string
if ar, cc4, err = atom.ReadAtomHeader(lr, ""); err != nil {
return
}
if cc4 == "moov" {
if moov, err = atom.ReadMovie(ar); err != nil {
return
}
}
if _, err = atom.ReadDummy(lr, int(ar.N)); err != nil {
return
}
}
if moov == nil {
err = fmt.Errorf("mp4: 'moov' atom not found")
return
}
self.streams = []*Stream{}
for i, atrack := range moov.Tracks {
stream := &Stream{
trackAtom: atrack,
r: self.R,
idx: i,
}
if atrack.Media != nil && atrack.Media.Info != nil && atrack.Media.Info.Sample != nil {
stream.sample = atrack.Media.Info.Sample
stream.timeScale = int64(atrack.Media.Header.TimeScale)
} else {
err = fmt.Errorf("mp4: sample table not found")
return
}
if avc1 := atom.GetAvc1ConfByTrack(atrack); avc1 != nil {
if stream.CodecData, err = h264parser.NewCodecDataFromAVCDecoderConfRecord(avc1.Data); err != nil {
return
}
self.streams = append(self.streams, stream)
} else if mp4a := atom.GetMp4aDescByTrack(atrack); mp4a != nil && mp4a.Conf != nil {
var config []byte
if config, err = isom.ReadElemStreamDesc(bytes.NewReader(mp4a.Conf.Data)); err != nil {
return
}
if stream.CodecData, err = aacparser.NewCodecDataFromMPEG4AudioConfigBytes(config); err != nil {
return
}
self.streams = append(self.streams, stream)
}
}
self.movieAtom = moov
return
}
func (self *Stream) setSampleIndex(index int) (err error) {
found := false
start := 0
self.chunkGroupIndex = 0
for self.chunkIndex = range self.sample.ChunkOffset.Entries {
if self.chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) &&
self.chunkIndex+1 == self.sample.SampleToChunk.Entries[self.chunkGroupIndex+1].FirstChunk {
self.chunkGroupIndex++
}
n := self.sample.SampleToChunk.Entries[self.chunkGroupIndex].SamplesPerChunk
if index >= start && index < start+n {
found = true
self.sampleIndexInChunk = index - start
break
}
start += n
}
if !found {
err = fmt.Errorf("mp4: stream[%d]: cannot locate sample index in chunk", self.idx)
return
}
if self.sample.SampleSize.SampleSize != 0 {
self.sampleOffsetInChunk = int64(self.sampleIndexInChunk * self.sample.SampleSize.SampleSize)
} else {
if index >= len(self.sample.SampleSize.Entries) {
err = fmt.Errorf("mp4: stream[%d]: sample index out of range", self.idx)
return
}
self.sampleOffsetInChunk = int64(0)
for i := index - self.sampleIndexInChunk; i < index; i++ {
self.sampleOffsetInChunk += int64(self.sample.SampleSize.Entries[i])
}
}
self.dts = int64(0)
start = 0
found = false
self.sttsEntryIndex = 0
for self.sttsEntryIndex < len(self.sample.TimeToSample.Entries) {
entry := self.sample.TimeToSample.Entries[self.sttsEntryIndex]
n := entry.Count
if index >= start && index < start+n {
self.sampleIndexInSttsEntry = index - start
self.dts += int64((index - start) * entry.Duration)
found = true
break
}
start += n
self.dts += int64(n * entry.Duration)
self.sttsEntryIndex++
}
if !found {
err = fmt.Errorf("mp4: stream[%d]: cannot locate sample index in stts entry", self.idx)
return
}
if self.sample.CompositionOffset != nil && len(self.sample.CompositionOffset.Entries) > 0 {
start = 0
found = false
self.cttsEntryIndex = 0
for self.cttsEntryIndex < len(self.sample.CompositionOffset.Entries) {
n := self.sample.CompositionOffset.Entries[self.cttsEntryIndex].Count
if index >= start && index < start+n {
self.sampleIndexInCttsEntry = index - start
found = true
break
}
start += n
self.cttsEntryIndex++
}
if !found {
err = fmt.Errorf("mp4: stream[%d]: cannot locate sample index in ctts entry", self.idx)
return
}
}
if self.sample.SyncSample != nil {
self.syncSampleIndex = 0
for self.syncSampleIndex < len(self.sample.SyncSample.Entries)-1 {
if self.sample.SyncSample.Entries[self.syncSampleIndex+1]-1 > index {
break
}
self.syncSampleIndex++
}
}
if false {
fmt.Printf("mp4: stream[%d]: setSampleIndex chunkGroupIndex=%d chunkIndex=%d sampleOffsetInChunk=%d\n",
self.idx, self.chunkGroupIndex, self.chunkIndex, self.sampleOffsetInChunk)
}
self.sampleIndex = index
return
}
func (self *Stream) isSampleValid() bool {
if self.chunkIndex >= len(self.sample.ChunkOffset.Entries) {
return false
}
if self.chunkGroupIndex >= len(self.sample.SampleToChunk.Entries) {
return false
}
if self.sttsEntryIndex >= len(self.sample.TimeToSample.Entries) {
return false
}
if self.sample.CompositionOffset != nil && len(self.sample.CompositionOffset.Entries) > 0 {
if self.cttsEntryIndex >= len(self.sample.CompositionOffset.Entries) {
return false
}
}
if self.sample.SyncSample != nil {
if self.syncSampleIndex >= len(self.sample.SyncSample.Entries) {
return false
}
}
if self.sample.SampleSize.SampleSize != 0 {
if self.sampleIndex >= len(self.sample.SampleSize.Entries) {
return false
}
}
return true
}
func (self *Stream) incSampleIndex() (duration int64) {
if false {
fmt.Printf("incSampleIndex sampleIndex=%d sampleOffsetInChunk=%d sampleIndexInChunk=%d chunkGroupIndex=%d chunkIndex=%d\n",
self.sampleIndex, self.sampleOffsetInChunk, self.sampleIndexInChunk, self.chunkGroupIndex, self.chunkIndex)
}
self.sampleIndexInChunk++
if self.sampleIndexInChunk == self.sample.SampleToChunk.Entries[self.chunkGroupIndex].SamplesPerChunk {
self.chunkIndex++
self.sampleIndexInChunk = 0
self.sampleOffsetInChunk = int64(0)
} else {
if self.sample.SampleSize.SampleSize != 0 {
self.sampleOffsetInChunk += int64(self.sample.SampleSize.SampleSize)
} else {
self.sampleOffsetInChunk += int64(self.sample.SampleSize.Entries[self.sampleIndex])
}
}
if self.chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) &&
self.chunkIndex+1 == self.sample.SampleToChunk.Entries[self.chunkGroupIndex+1].FirstChunk {
self.chunkGroupIndex++
}
sttsEntry := self.sample.TimeToSample.Entries[self.sttsEntryIndex]
duration = int64(sttsEntry.Duration)
self.sampleIndexInSttsEntry++
self.dts += duration
if self.sampleIndexInSttsEntry == sttsEntry.Count {
self.sampleIndexInSttsEntry = 0
self.sttsEntryIndex++
}
if self.sample.CompositionOffset != nil && len(self.sample.CompositionOffset.Entries) > 0 {
self.sampleIndexInCttsEntry++
if self.sampleIndexInCttsEntry == self.sample.CompositionOffset.Entries[self.cttsEntryIndex].Count {
self.sampleIndexInCttsEntry = 0
self.cttsEntryIndex++
}
}
if self.sample.SyncSample != nil {
entries := self.sample.SyncSample.Entries
if self.syncSampleIndex+1 < len(entries) && entries[self.syncSampleIndex+1]-1 == self.sampleIndex+1 {
self.syncSampleIndex++
}
}
self.sampleIndex++
return
}
func (self *Stream) sampleCount() int {
if self.sample.SampleSize.SampleSize == 0 {
chunkGroupIndex := 0
count := 0
for chunkIndex := range self.sample.ChunkOffset.Entries {
n := self.sample.SampleToChunk.Entries[chunkGroupIndex].SamplesPerChunk
count += n
if chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) &&
chunkIndex+1 == self.sample.SampleToChunk.Entries[chunkGroupIndex+1].FirstChunk {
chunkGroupIndex++
}
}
return count
} else {
return len(self.sample.SampleSize.Entries)
}
}
func (self *Demuxer) ReadPacket() (pkt av.Packet, err error) {
if err = self.probe(); err != nil {
return
}
var chosen *Stream
var chosenidx int
for i, stream := range self.streams {
if chosen == nil || stream.tsToTime(stream.dts) < chosen.tsToTime(chosen.dts) {
chosen = stream
chosenidx = i
}
}
if false {
fmt.Printf("ReadPacket: chosen index=%v time=%v\n", chosen.idx, chosen.tsToTime(chosen.dts))
}
tm := chosen.tsToTime(chosen.dts)
if pkt, err = chosen.readPacket(); err != nil {
return
}
pkt.Time = tm
pkt.Idx = int8(chosenidx)
return
}
func (self *Demuxer) CurrentTime() (tm time.Duration) {
if len(self.streams) > 0 {
stream := self.streams[0]
tm = stream.tsToTime(stream.dts)
}
return
}
func (self *Demuxer) SeekToTime(tm time.Duration) (err error) {
for _, stream := range self.streams {
if stream.Type().IsVideo() {
if err = stream.seekToTime(tm); err != nil {
return
}
tm = stream.tsToTime(stream.dts)
break
}
}
for _, stream := range self.streams {
if !stream.Type().IsVideo() {
if err = stream.seekToTime(tm); err != nil {
return
}
}
}
return
}
func (self *Stream) readPacket() (pkt av.Packet, err error) {
if !self.isSampleValid() {
err = io.EOF
return
}
//fmt.Println("readPacket", self.sampleIndex)
chunkOffset := self.sample.ChunkOffset.Entries[self.chunkIndex]
sampleSize := 0
if self.sample.SampleSize.SampleSize != 0 {
sampleSize = self.sample.SampleSize.SampleSize
} else {
sampleSize = self.sample.SampleSize.Entries[self.sampleIndex]
}
sampleOffset := int64(chunkOffset) + self.sampleOffsetInChunk
if _, err = self.r.Seek(sampleOffset, 0); err != nil {
return
}
pkt.Data = make([]byte, sampleSize)
if _, err = self.r.Read(pkt.Data); err != nil {
return
}
switch self.Type() {
case av.H264:
var ok bool
if pkt.Data, ok = h264parser.FindDataNALUInAVCCNALUs(pkt.Data); !ok {
err = fmt.Errorf("mp4: input h264 format invalid")
return
}
}
if self.sample.SyncSample != nil {
if self.sample.SyncSample.Entries[self.syncSampleIndex]-1 == self.sampleIndex {
pkt.IsKeyFrame = true
}
}
//println("pts/dts", self.ptsEntryIndex, self.dtsEntryIndex)
if self.sample.CompositionOffset != nil && len(self.sample.CompositionOffset.Entries) > 0 {
cts := int64(self.sample.CompositionOffset.Entries[self.cttsEntryIndex].Offset)
pkt.CompositionTime = self.tsToTime(cts)
}
self.incSampleIndex()
return
}
func (self *Stream) seekToTime(tm time.Duration) (err error) {
index := self.timeToSampleIndex(tm)
if err = self.setSampleIndex(index); err != nil {
return
}
if false {
fmt.Printf("stream[%d]: seekToTime index=%v time=%v cur=%v\n", self.idx, index, tm, self.tsToTime(self.dts))
}
return
}
func (self *Stream) timeToSampleIndex(tm time.Duration) int {
targetTs := self.timeToTs(tm)
targetIndex := 0
startTs := int64(0)
endTs := int64(0)
startIndex := 0
endIndex := 0
found := false
for _, entry := range self.sample.TimeToSample.Entries {
endTs = startTs + int64(entry.Count*entry.Duration)
endIndex = startIndex + entry.Count
if targetTs >= startTs && targetTs < endTs {
targetIndex = startIndex + int((targetTs-startTs)/int64(entry.Duration))
found = true
}
startTs = endTs
startIndex = endIndex
}
if !found {
if targetTs < 0 {
targetIndex = 0
} else {
targetIndex = endIndex - 1
}
}
if self.sample.SyncSample != nil {
entries := self.sample.SyncSample.Entries
for i := len(entries) - 1; i >= 0; i-- {
if entries[i]-1 < targetIndex {
targetIndex = entries[i] - 1
break
}
}
}
return targetIndex
}

18
mp4/handler.go Normal file
View File

@ -0,0 +1,18 @@
package mp4
import (
"io"
"github.com/nareix/av"
"github.com/nareix/av/avutil"
)
func Handler(h *avutil.RegisterHandler) {
h.Ext = ".mp4"
h.ReaderDemuxer = func(r io.Reader) av.Demuxer {
return &Demuxer{R: r.(io.ReadSeeker)}
}
h.WriterMuxer = func(w io.Writer) av.Muxer {
return &Muxer{W: w.(io.WriteSeeker)}
}
}

262
mp4/isom/isom.go Normal file
View File

@ -0,0 +1,262 @@
package isom
import (
"bytes"
"fmt"
"github.com/nareix/bits"
"io"
"io/ioutil"
)
// copied from libavformat/isom.h
const (
MP4ESDescrTag = 3
MP4DecConfigDescrTag = 4
MP4DecSpecificDescrTag = 5
)
var debugReader = false
var debugWriter = false
func readDesc(r io.Reader) (tag uint, data []byte, err error) {
if tag, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
var length uint
for i := 0; i < 4; i++ {
var c uint
if c, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
length = (length << 7) | (c & 0x7f)
if c&0x80 == 0 {
break
}
}
data = make([]byte, length)
if _, err = r.Read(data); err != nil {
return
}
return
}
func writeDesc(w io.Writer, tag uint, data []byte) (err error) {
if err = bits.WriteUIntBE(w, tag, 8); err != nil {
return
}
length := uint(len(data))
for i := 3; i > 0; i-- {
if err = bits.WriteUIntBE(w, (length>>uint(7*i))&0x7f|0x80, 8); err != nil {
return
}
}
if err = bits.WriteUIntBE(w, length&0x7f, 8); err != nil {
return
}
if _, err = w.Write(data); err != nil {
return
}
return
}
func readESDesc(r io.Reader) (err error) {
var ES_ID uint
// ES_ID
if ES_ID, err = bits.ReadUIntBE(r, 16); err != nil {
return
}
var flags uint
if flags, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
//streamDependenceFlag
if flags&0x80 != 0 {
if _, err = bits.ReadUIntBE(r, 16); err != nil {
return
}
}
//URL_Flag
if flags&0x40 != 0 {
var length uint
if length, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
if _, err = io.CopyN(ioutil.Discard, r, int64(length)); err != nil {
return
}
}
//OCRstreamFlag
if flags&0x20 != 0 {
if _, err = bits.ReadUIntBE(r, 16); err != nil {
return
}
}
if debugReader {
println("readESDesc:", ES_ID, flags)
}
return
}
func writeESDesc(w io.Writer, ES_ID uint) (err error) {
// ES_ID
if err = bits.WriteUIntBE(w, ES_ID, 16); err != nil {
return
}
// flags
if err = bits.WriteUIntBE(w, 0, 8); err != nil {
return
}
return
}
func readDescByTag(r io.Reader, targetTag uint) (data []byte, err error) {
var found bool
for {
if tag, _data, err := readDesc(r); err != nil {
break
} else {
if tag == targetTag {
data = _data
found = true
}
if debugReader {
println("readDescByTag:", tag, len(_data))
}
}
}
if !found {
err = fmt.Errorf("tag not found")
return
}
return
}
// copied from libavformat/isom.c ff_mp4_read_dec_config_descr()
func readDecConfDesc(r io.Reader) (decConfig []byte, err error) {
var objectId uint
var streamType uint
var bufSize uint
var maxBitrate uint
var avgBitrate uint
// objectId
if objectId, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
// streamType
if streamType, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
// buffer size db
if bufSize, err = bits.ReadUIntBE(r, 24); err != nil {
return
}
// max bitrate
if maxBitrate, err = bits.ReadUIntBE(r, 32); err != nil {
return
}
// avg bitrate
if avgBitrate, err = bits.ReadUIntBE(r, 32); err != nil {
return
}
if debugReader {
println("readDecConfDesc:", objectId, streamType, bufSize, maxBitrate, avgBitrate)
}
if decConfig, err = readDescByTag(r, MP4DecSpecificDescrTag); err != nil {
return
}
return
}
// copied from libavformat/movenc.c mov_write_esds_tag()
func writeDecConfDesc(w io.Writer, objectId uint, streamType uint, decConfig []byte) (err error) {
// objectId
if err = bits.WriteUIntBE(w, objectId, 8); err != nil {
return
}
// streamType
if err = bits.WriteUIntBE(w, streamType, 8); err != nil {
return
}
// buffer size db
if err = bits.WriteUIntBE(w, 0, 24); err != nil {
return
}
// max bitrate
if err = bits.WriteUIntBE(w, 200000, 32); err != nil {
return
}
// avg bitrate
if err = bits.WriteUIntBE(w, 0, 32); err != nil {
return
}
if err = writeDesc(w, MP4DecSpecificDescrTag, decConfig); err != nil {
return
}
return
}
// copied from libavformat/mov.c ff_mov_read_esds()
func ReadElemStreamDesc(r io.Reader) (decConfig []byte, err error) {
if debugReader {
println("ReadElemStreamDesc: start")
}
var data []byte
if data, err = readDescByTag(r, MP4ESDescrTag); err != nil {
return
}
r = bytes.NewReader(data)
if err = readESDesc(r); err != nil {
return
}
if data, err = readDescByTag(r, MP4DecConfigDescrTag); err != nil {
return
}
r = bytes.NewReader(data)
if decConfig, err = readDecConfDesc(r); err != nil {
return
}
if debugReader {
println("ReadElemStreamDesc: end")
}
return
}
func WriteElemStreamDesc(w io.Writer, decConfig []byte, trackId uint) (err error) {
// MP4ESDescrTag(ESDesc MP4DecConfigDescrTag(objectId streamType bufSize avgBitrate MP4DecSpecificDescrTag(decConfig)))
data := decConfig
buf := &bytes.Buffer{}
// 0x40 = ObjectType AAC
// 0x15 = Audiostream
writeDecConfDesc(buf, 0x40, 0x15, data)
data = buf.Bytes()
buf = &bytes.Buffer{}
writeDesc(buf, MP4DecConfigDescrTag, data) // 4
data = buf.Bytes()
buf = &bytes.Buffer{}
writeESDesc(buf, trackId)
buf.Write(data)
writeDesc(buf, 0x06, []byte{0x02})
data = buf.Bytes()
buf = &bytes.Buffer{}
writeDesc(buf, MP4ESDescrTag, data) // 3
data = buf.Bytes()
if _, err = w.Write(data); err != nil {
return
}
return
}

53
mp4/isom/isom_test.go Normal file
View File

@ -0,0 +1,53 @@
package isom
import (
"bytes"
"encoding/hex"
"testing"
)
func TestReadElemStreamDesc(t *testing.T) {
debugReader = true
debugWriter = true
var err error
data, _ := hex.DecodeString("03808080220002000480808014401500000000030d400000000005808080021210068080800102")
t.Logf("elemDesc=%x", data)
t.Logf("length=%d", len(data))
var aconfig MPEG4AudioConfig
if aconfig, err = ReadElemStreamDescAAC(bytes.NewReader(data)); err != nil {
t.Error(err)
}
aconfig = aconfig.Complete()
t.Logf("aconfig=%v", aconfig)
bw := &bytes.Buffer{}
WriteMPEG4AudioConfig(bw, aconfig)
bw = &bytes.Buffer{}
WriteElemStreamDescAAC(bw, aconfig, 2)
t.Logf("elemDesc=%x", bw.Bytes())
data = bw.Bytes()
t.Logf("length=%d", len(data))
if aconfig, err = ReadElemStreamDescAAC(bytes.NewReader(data)); err != nil {
t.Error(err)
}
t.Logf("aconfig=%v", aconfig.Complete())
//00000000 ff f1 50 80 04 3f fc de 04 00 00 6c 69 62 66 61 |..P..?.....libfa|
//00000010 61 63 20 31 2e 32 38 00 00 42 40 93 20 04 32 00 |ac 1.28..B@. .2.|
//00000020 47 ff f1 50 80 05 1f fc 21 42 fe ed b2 5c a8 00 |G..P....!B...\..|
data, _ = hex.DecodeString("fff15080043ffcde040000")
var n, framelen int
aconfig, _, n, _, _ = ReadADTSFrame(data)
t.Logf("%v n=%d", aconfig.Complete(), n)
data = MakeADTSHeader(aconfig, 1024*3, 33)
data = append(data, []byte{1, 2, 3, 4, 5}...)
t.Logf("%x", data)
aconfig, _, n, framelen, err = ReadADTSFrame(data)
t.Logf("%v n=%d framelen=%d err=%v", aconfig.Complete(), n, framelen, err)
}

280
mp4/muxer.go Normal file
View File

@ -0,0 +1,280 @@
package mp4
import (
"bytes"
"fmt"
"time"
"github.com/nareix/av"
"github.com/nareix/pio"
"github.com/nareix/codec/aacparser"
"github.com/nareix/codec/h264parser"
"github.com/nareix/mp4/atom"
"github.com/nareix/mp4/isom"
"io"
)
type Muxer struct {
W io.WriteSeeker
streams []*Stream
mdatWriter *atom.Writer
}
func (self *Muxer) isCodecSupported(codec av.CodecData) bool {
switch codec.Type() {
case av.H264, av.AAC:
return true
default:
return false
}
}
func (self *Muxer) newStream(codec av.CodecData) (err error) {
if !self.isCodecSupported(codec) {
err = fmt.Errorf("mp4: codec type=%v is not supported", codec.Type())
return
}
stream := &Stream{CodecData: codec}
stream.sample = &atom.SampleTable{
SampleDesc: &atom.SampleDesc{},
TimeToSample: &atom.TimeToSample{},
SampleToChunk: &atom.SampleToChunk{
Entries: []atom.SampleToChunkEntry{
{
FirstChunk: 1,
SampleDescId: 1,
SamplesPerChunk: 1,
},
},
},
SampleSize: &atom.SampleSize{},
ChunkOffset: &atom.ChunkOffset{},
}
stream.trackAtom = &atom.Track{
Header: &atom.TrackHeader{
TrackId: len(self.streams) + 1,
Flags: 0x0003, // Track enabled | Track in movie
Duration: 0, // fill later
Matrix: [9]int{0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000},
},
Media: &atom.Media{
Header: &atom.MediaHeader{
TimeScale: 0, // fill later
Duration: 0, // fill later
Language: 21956,
},
Info: &atom.MediaInfo{
Sample: stream.sample,
Data: &atom.DataInfo{
Refer: &atom.DataRefer{
Url: &atom.DataReferUrl{
Flags: 0x000001, // Self reference
},
},
},
},
},
}
switch codec.Type() {
case av.H264:
stream.sample.SyncSample = &atom.SyncSample{}
}
stream.timeScale = 90000
stream.muxer = self
self.streams = append(self.streams, stream)
return
}
func (self *Stream) fillTrackAtom() (err error) {
self.trackAtom.Media.Header.TimeScale = int(self.timeScale)
self.trackAtom.Media.Header.Duration = int(self.duration)
if self.Type() == av.H264 {
codec := self.CodecData.(h264parser.CodecData)
width, height := codec.Width(), codec.Height()
self.sample.SampleDesc.Avc1Desc = &atom.Avc1Desc{
DataRefIdx: 1,
HorizontalResolution: 72,
VorizontalResolution: 72,
Width: int(width),
Height: int(height),
FrameCount: 1,
Depth: 24,
ColorTableId: -1,
Conf: &atom.Avc1Conf{Data: codec.AVCDecoderConfRecordBytes()},
}
self.trackAtom.Media.Handler = &atom.HandlerRefer{
SubType: "vide",
Name: "Video Media Handler",
}
self.trackAtom.Media.Info.Video = &atom.VideoMediaInfo{
Flags: 0x000001,
}
self.trackAtom.Header.TrackWidth = atom.IntToFixed(int(width))
self.trackAtom.Header.TrackHeight = atom.IntToFixed(int(height))
} else if self.Type() == av.AAC {
codec := self.CodecData.(aacparser.CodecData)
buf := &bytes.Buffer{}
if err = isom.WriteElemStreamDesc(buf, codec.MPEG4AudioConfigBytes(), uint(self.trackAtom.Header.TrackId)); err != nil {
return
}
self.sample.SampleDesc.Mp4aDesc = &atom.Mp4aDesc{
DataRefIdx: 1,
NumberOfChannels: codec.ChannelLayout().Count(),
SampleSize: codec.SampleFormat().BytesPerSample(),
SampleRate: atom.IntToFixed(codec.SampleRate()),
Conf: &atom.ElemStreamDesc{
Data: buf.Bytes(),
},
}
self.trackAtom.Header.Volume = atom.IntToFixed(1)
self.trackAtom.Header.AlternateGroup = 1
self.trackAtom.Media.Handler = &atom.HandlerRefer{
SubType: "soun",
Name: "Sound Handler",
}
self.trackAtom.Media.Info.Sound = &atom.SoundMediaInfo{}
} else {
err = fmt.Errorf("mp4: codec type=%d invalid", self.Type())
}
return
}
func (self *Muxer) WriteHeader(streams []av.CodecData) (err error) {
self.streams = []*Stream{}
for _, stream := range streams {
if err = self.newStream(stream); err != nil {
return
}
}
if self.mdatWriter, err = atom.WriteAtomHeader(self.W, "mdat"); err != nil {
return
}
for _, stream := range self.streams {
if stream.Type().IsVideo() {
stream.sample.CompositionOffset = &atom.CompositionOffset{}
}
}
return
}
func (self *Muxer) WritePacket(pkt av.Packet) (err error) {
stream := self.streams[pkt.Idx]
if err = stream.writePacket(pkt); err != nil {
return
}
return
}
func (self *Stream) writePacket(pkt av.Packet) (err error) {
if self.lasttime == 0 {
self.lasttime = pkt.Time
return
}
rawdur := pkt.Time - self.lasttime
if rawdur < 0 {
err = fmt.Errorf("mp4: stream#%d time=%v < lasttime=%v", pkt.Idx, pkt.Time, self.lasttime)
return
}
self.lasttime = pkt.Time
var filePos int64
var sampleSize int
if filePos, err = self.muxer.mdatWriter.Seek(0, 1); err != nil {
return
}
if self.Type() == av.H264 {
if typ := h264parser.CheckNALUsType(pkt.Data); typ != h264parser.NALU_RAW {
err = fmt.Errorf("mp4: nalu format=%d is not raw", typ)
return
}
var b [4]byte
pio.PutU32BE(b[:], uint32(len(pkt.Data)))
sampleSize += len(pkt.Data)+4
if _, err = self.muxer.mdatWriter.Write(b[:]); err != nil {
return
}
if _, err = self.muxer.mdatWriter.Write(pkt.Data); err != nil {
return
}
} else {
sampleSize = len(pkt.Data)
if _, err = self.muxer.mdatWriter.Write(pkt.Data); err != nil {
return
}
}
if pkt.IsKeyFrame && self.sample.SyncSample != nil {
self.sample.SyncSample.Entries = append(self.sample.SyncSample.Entries, self.sampleIndex+1)
}
duration := int(self.timeToTs(rawdur))
if self.sttsEntry == nil || duration != self.sttsEntry.Duration {
self.sample.TimeToSample.Entries = append(self.sample.TimeToSample.Entries, atom.TimeToSampleEntry{Duration: duration})
self.sttsEntry = &self.sample.TimeToSample.Entries[len(self.sample.TimeToSample.Entries)-1]
}
self.sttsEntry.Count++
if self.sample.CompositionOffset != nil {
offset := int(self.timeToTs(pkt.CompositionTime))
if self.cttsEntry == nil || offset != self.cttsEntry.Offset {
table := self.sample.CompositionOffset
table.Entries = append(table.Entries, atom.CompositionOffsetEntry{Offset: offset})
self.cttsEntry = &table.Entries[len(table.Entries)-1]
}
self.cttsEntry.Count++
}
self.duration += int64(duration)
self.sampleIndex++
self.sample.ChunkOffset.Entries = append(self.sample.ChunkOffset.Entries, int(filePos))
self.sample.SampleSize.Entries = append(self.sample.SampleSize.Entries, sampleSize)
return
}
func (self *Muxer) WriteTrailer() (err error) {
moov := &atom.Movie{}
moov.Header = &atom.MovieHeader{
PreferredRate: atom.IntToFixed(1),
PreferredVolume: atom.IntToFixed(1),
Matrix: [9]int{0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000},
NextTrackId: 2,
}
maxDur := time.Duration(0)
timeScale := int64(10000)
for _, stream := range self.streams {
if err = stream.fillTrackAtom(); err != nil {
return
}
dur := stream.tsToTime(stream.duration)
stream.trackAtom.Header.Duration = int(timeToTs(dur, timeScale))
if dur > maxDur {
maxDur = dur
}
moov.Tracks = append(moov.Tracks, stream.trackAtom)
}
moov.Header.TimeScale = int(timeScale)
moov.Header.Duration = int(timeToTs(maxDur, timeScale))
if err = self.mdatWriter.Close(); err != nil {
return
}
if err = atom.WriteMovie(self.W, moov); err != nil {
return
}
return
}

59
mp4/stream.go Normal file
View File

@ -0,0 +1,59 @@
package mp4
import (
"github.com/nareix/av"
"github.com/nareix/mp4/atom"
"time"
"io"
)
type Stream struct {
av.CodecData
trackAtom *atom.Track
r io.ReadSeeker
idx int
lasttime time.Duration
timeScale int64
duration int64
muxer *Muxer
sample *atom.SampleTable
sampleIndex int
sampleOffsetInChunk int64
syncSampleIndex int
dts int64
sttsEntryIndex int
sampleIndexInSttsEntry int
cttsEntryIndex int
sampleIndexInCttsEntry int
chunkGroupIndex int
chunkIndex int
sampleIndexInChunk int
sttsEntry *atom.TimeToSampleEntry
cttsEntry *atom.CompositionOffsetEntry
}
func timeToTs(tm time.Duration, timeScale int64) int64 {
return int64(tm*time.Duration(timeScale) / time.Second)
}
func tsToTime(ts int64, timeScale int64) time.Duration {
return time.Duration(ts)*time.Second / time.Duration(timeScale)
}
func (self *Stream) timeToTs(tm time.Duration) int64 {
return int64(tm*time.Duration(self.timeScale) / time.Second)
}
func (self *Stream) tsToTime(ts int64) time.Duration {
return time.Duration(ts)*time.Second / time.Duration(self.timeScale)
}