mp4: rewrite mp4io improve atom parse

This commit is contained in:
nareix 2016-07-28 23:06:08 +08:00
parent 3fec89ce48
commit 747c21a957
16 changed files with 130 additions and 4915 deletions

View File

@ -1,44 +0,0 @@
package atom
import (
"io"
)
func WalkFile(w Walker, r io.Reader) (err error) {
var moov *Movie
var moof *MovieFrag
for {
var lr *io.LimitedReader
var cc4 string
if lr, cc4, err = ReadAtomHeader(lr, ""); err != nil {
return
}
switch cc4 {
case "moov":
if moov, err = ReadMovie(lr); err != nil {
return
}
WalkMovie(w, moov)
case "moof":
if moof, err = ReadMovieFrag(lr); err != nil {
return
}
WalkMovieFrag(w, moof)
case "mdat":
w.StartStruct("MovieData")
w.Name("Length")
w.Int64(lr.N)
w.EndStruct()
}
if _, err = ReadDummy(r, int(lr.N)); err != nil {
return
}
}
return
}

View File

@ -1,95 +0,0 @@
package atom
import (
"encoding/hex"
"fmt"
"io"
"strings"
)
type Walker interface {
FilterArrayItem(string, string, int, int) bool
ArrayLeft(int, int)
StartStruct(string)
EndStruct()
Name(string)
Int(int)
Int64(int64)
HexInt(int)
Fixed(Fixed)
String(string)
Bytes([]byte)
TimeStamp(TimeStamp)
Println(msg ...interface{})
}
type Dumper struct {
W io.Writer
depth int
name string
arrlen int
arridx int
}
func (self Dumper) tab() string {
return strings.Repeat(" ", self.depth*2)
}
func (self Dumper) Println(msg ...interface{}) {
fmt.Fprintln(self.W, self.tab()+fmt.Sprint(msg...))
}
func (self *Dumper) ArrayLeft(i int, n int) {
self.Println(fmt.Sprintf("... total %d elements", n))
}
func (self *Dumper) FilterArrayItem(name string, field string, i int, n int) bool {
if n > 20 && i > 20 {
return false
}
return true
}
func (self *Dumper) EndArray() {
}
func (self *Dumper) StartStruct(name string) {
self.depth++
self.Println(fmt.Sprintf("[%s]", name))
}
func (self *Dumper) EndStruct() {
self.depth--
}
func (self *Dumper) Name(name string) {
self.name = name
}
func (self Dumper) Int(val int) {
self.Int64(int64(val))
}
func (self Dumper) Int64(val int64) {
self.Println(fmt.Sprintf("%s: %d", self.name, val))
}
func (self Dumper) HexInt(val int) {
self.Println(fmt.Sprintf("%s: %x", self.name, val))
}
func (self Dumper) String(val string) {
self.Println(fmt.Sprintf("%s: %s", self.name, val))
}
func (self Dumper) Fixed(val Fixed) {
self.Println(fmt.Sprintf("%s: %d", self.name, FixedToInt(val)))
}
func (self Dumper) Bytes(val []byte) {
self.Println(fmt.Sprintf("%s: %s", self.name, hex.EncodeToString(val)))
}
func (self Dumper) TimeStamp(val TimeStamp) {
self.Println(fmt.Sprintf("%s: %d", self.name, int(val)))
}

View File

@ -1,274 +0,0 @@
package atom
import (
_ "bytes"
"fmt"
"github.com/nareix/bits"
"io"
)
const (
TFHD_BASE_DATA_OFFSET = 0x01
TFHD_STSD_ID = 0x02
TFHD_DEFAULT_DURATION = 0x08
TFHD_DEFAULT_SIZE = 0x10
TFHD_DEFAULT_FLAGS = 0x20
TFHD_DURATION_IS_EMPTY = 0x010000
TFHD_DEFAULT_BASE_IS_MOOF = 0x020000
)
type TrackFragHeader struct {
Version int
Flags int
Id int
DefaultSize int
DefaultDuration int
DefaultFlags int
BaseDataOffset int64
StsdId int
}
func WalkTrackFragHeader(w Walker, self *TrackFragHeader) {
w.StartStruct("TrackFragHeader")
w.Name("Flags")
w.HexInt(self.Flags)
w.Name("Id")
w.Int(self.Id)
w.Name("DefaultDuration")
w.Int(self.DefaultDuration)
w.Name("DefaultSize")
w.Int(self.DefaultSize)
w.Name("DefaultFlags")
w.HexInt(self.DefaultFlags)
w.EndStruct()
}
func WriteTrackFragHeader(w io.WriteSeeker, self *TrackFragHeader) (err error) {
panic("unimplmented")
return
}
func ReadTrackFragHeader(r *io.LimitedReader) (res *TrackFragHeader, err error) {
self := &TrackFragHeader{}
if self.Version, err = ReadInt(r, 1); err != nil {
return
}
if self.Flags, err = ReadInt(r, 3); err != nil {
return
}
if self.Id, err = ReadInt(r, 4); err != nil {
return
}
if self.Flags&TFHD_BASE_DATA_OFFSET != 0 {
if self.BaseDataOffset, err = bits.ReadInt64BE(r, 64); err != nil {
return
}
}
if self.Flags&TFHD_STSD_ID != 0 {
if self.StsdId, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TFHD_DEFAULT_DURATION != 0 {
if self.DefaultDuration, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TFHD_DEFAULT_SIZE != 0 {
if self.DefaultSize, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TFHD_DEFAULT_FLAGS != 0 {
if self.DefaultFlags, err = ReadInt(r, 4); err != nil {
return
}
}
res = self
return
}
const (
TRUN_DATA_OFFSET = 0x01
TRUN_FIRST_SAMPLE_FLAGS = 0x04
TRUN_SAMPLE_DURATION = 0x100
TRUN_SAMPLE_SIZE = 0x200
TRUN_SAMPLE_FLAGS = 0x400
TRUN_SAMPLE_CTS = 0x800
)
type TrackFragRunEntry struct {
Duration int
Size int
Flags int
Cts int
}
type TrackFragRun struct {
Version int
Flags int
FirstSampleFlags int
DataOffset int
Entries []TrackFragRunEntry
}
func WalkTrackFragRun(w Walker, self *TrackFragRun) {
w.StartStruct("TrackFragRun")
w.Name("Flags")
w.HexInt(self.Flags)
w.Name("FirstSampleFlags")
w.HexInt(self.FirstSampleFlags)
w.Name("DataOffset")
w.Int(self.DataOffset)
w.Name("EntriesCount")
w.Int(len(self.Entries))
for i := 0; i < 10 && i < len(self.Entries); i++ {
entry := self.Entries[i]
w.Println(fmt.Sprintf("Entry[%d] Flags=%x Duration=%d Size=%d Cts=%d",
i, entry.Flags, entry.Duration, entry.Size, entry.Cts))
}
w.EndStruct()
}
func WriteTrackFragRun(w io.WriteSeeker, self *TrackFragRun) (err error) {
panic("unimplmented")
return
}
func ReadTrackFragRun(r *io.LimitedReader) (res *TrackFragRun, err error) {
self := &TrackFragRun{}
if self.Version, err = ReadInt(r, 1); err != nil {
return
}
if self.Flags, err = ReadInt(r, 3); err != nil {
return
}
var count int
if count, err = ReadInt(r, 4); err != nil {
return
}
if self.Flags&TRUN_DATA_OFFSET != 0 {
if self.DataOffset, err = ReadInt(r, 4); err != nil {
return
}
}
if self.Flags&TRUN_FIRST_SAMPLE_FLAGS != 0 {
if self.FirstSampleFlags, err = ReadInt(r, 4); err != nil {
return
}
}
for i := 0; i < count; i++ {
var flags int
if i > 0 {
flags = self.Flags
} else {
flags = self.FirstSampleFlags
}
entry := TrackFragRunEntry{}
if flags&TRUN_SAMPLE_DURATION != 0 {
if entry.Duration, err = ReadInt(r, 4); err != nil {
return
}
}
if flags&TRUN_SAMPLE_SIZE != 0 {
if entry.Size, err = ReadInt(r, 4); err != nil {
return
}
}
if flags&TRUN_SAMPLE_FLAGS != 0 {
if entry.Flags, err = ReadInt(r, 4); err != nil {
return
}
}
if flags&TRUN_SAMPLE_CTS != 0 {
if entry.Cts, err = ReadInt(r, 4); err != nil {
return
}
}
self.Entries = append(self.Entries, entry)
}
res = self
return
}
type TrackFragDecodeTime struct {
Version int
Flags int
Time int64
}
func ReadTrackFragDecodeTime(r *io.LimitedReader) (res *TrackFragDecodeTime, err error) {
self := &TrackFragDecodeTime{}
if self.Version, err = ReadInt(r, 1); err != nil {
return
}
if self.Flags, err = ReadInt(r, 3); err != nil {
return
}
if self.Version != 0 {
if self.Time, err = bits.ReadInt64BE(r, 64); err != nil {
return
}
} else {
if self.Time, err = bits.ReadInt64BE(r, 32); err != nil {
return
}
}
res = self
return
}
func WriteTrackFragDecodeTime(w io.WriteSeeker, self *TrackFragDecodeTime) (err error) {
var aw *Writer
if aw, err = WriteAtomHeader(w, "tfdt"); err != nil {
return
}
w = aw
if err = WriteInt(w, self.Version, 1); err != nil {
return
}
if err = WriteInt(w, self.Flags, 3); err != nil {
return
}
if self.Version != 0 {
if err = bits.WriteInt64BE(w, self.Time, 64); err != nil {
return
}
} else {
if err = bits.WriteInt64BE(w, self.Time, 32); err != nil {
return
}
}
if err = aw.Close(); err != nil {
return
}
return
}
func WalkTrackFragDecodeTime(w Walker, self *TrackFragDecodeTime) {
w.StartStruct("TrackFragDecodeTime")
w.Name("Version")
w.Int(self.Version)
w.Name("Flags")
w.Int(self.Flags)
w.Name("Time")
w.Int64(self.Time)
w.EndStruct()
return
}

View File

@ -1,869 +0,0 @@
var uc = x => x && x.substr(0,1).toUpperCase()+x.slice(1);
Array.prototype.nonull = function () {
return this.filter(x => x);
};
var atoms = {
movie: {
cc4: 'moov',
fields: [
['$atoms', [
['header', '*movieHeader'],
['iods', '*iods'],
['tracks', '[]*track'],
['movieExtend', '*movieExtend'],
]],
],
},
iods: {
cc4: 'iods',
fields: [
['data', '[]byte'],
],
},
movieHeader: {
cc4: 'mvhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['createTime', 'TimeStamp32'],
['modifyTime', 'TimeStamp32'],
['timeScale', 'int32'],
['duration', 'int32'],
['preferredRate', 'Fixed32'],
['preferredVolume', 'Fixed16'],
['_', '[10]byte'],
['matrix', '[9]int32'],
['previewTime', 'TimeStamp32'],
['previewDuration', 'TimeStamp32'],
['posterTime', 'TimeStamp32'],
['selectionTime', 'TimeStamp32'],
['selectionDuration', 'TimeStamp32'],
['currentTime', 'TimeStamp32'],
['nextTrackId', 'int32'],
],
},
track: {
cc4: 'trak',
fields: [
['$atoms', [
['header', '*trackHeader'],
['media', '*media'],
]],
],
},
trackHeader: {
cc4: 'tkhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['createTime', 'TimeStamp32'],
['modifyTime', 'TimeStamp32'],
['trackId', 'int32'],
['_', '[4]byte'],
['duration', 'int32'],
['_', '[8]byte'],
['layer', 'int16'],
['alternateGroup', 'int16'],
['volume', 'Fixed16'],
['_', '[2]byte'],
['matrix', '[9]int32'],
['trackWidth', 'Fixed32'],
['trackHeight', 'Fixed32'],
],
},
handlerRefer: {
cc4: 'hdlr',
fields: [
['version', 'int8'],
['flags', 'int24'],
['type', '[4]char'],
['subType', '[4]char'],
['name', '[]char'],
],
},
media: {
cc4: 'mdia',
fields: [
['$atoms', [
['header', '*mediaHeader'],
['handler', '*handlerRefer'],
['info', '*mediaInfo'],
]],
],
},
mediaHeader: {
cc4: 'mdhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['createTime', 'TimeStamp32'],
['modifyTime', 'TimeStamp32'],
['timeScale', 'int32'],
['duration', 'int32'],
['language', 'int16'],
['quality', 'int16'],
],
},
mediaInfo: {
cc4: 'minf',
fields: [
['$atoms', [
['sound', '*soundMediaInfo'],
['video', '*videoMediaInfo'],
['data', '*dataInfo'],
['sample', '*sampleTable'],
]],
],
},
dataInfo: {
cc4: 'dinf',
fields: [
['$atoms', [
['refer', '*dataRefer'],
]],
],
},
dataRefer: {
cc4: 'dref',
fields: [
['version', 'int8'],
['flags', 'int24'],
['$atomsCount', 'int32'],
['$atoms', [
['url', '*dataReferUrl'],
]],
],
},
dataReferUrl: {
cc4: 'url ',
fields: [
['version', 'int8'],
['flags', 'int24'],
],
},
soundMediaInfo: {
cc4: 'smhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['balance', 'int16'],
['_', 'int16'],
],
},
videoMediaInfo: {
cc4: 'vmhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['graphicsMode', 'int16'],
['opcolor', '[3]int16'],
],
},
sampleTable: {
cc4: 'stbl',
fields: [
['$atoms', [
['sampleDesc', '*sampleDesc'],
['timeToSample', '*timeToSample'],
['compositionOffset', '*compositionOffset'],
['sampleToChunk', '*sampleToChunk'],
['syncSample', '*syncSample'],
['chunkOffset', '*chunkOffset'],
['sampleSize', '*sampleSize'],
]],
],
},
sampleDesc: {
cc4: 'stsd',
fields: [
['version', 'int8'],
['_', '[3]byte'],
['$atomsCount', 'int32'],
['$atoms', [
['avc1Desc', '*avc1Desc'],
['mp4aDesc', '*mp4aDesc'],
]],
],
},
mp4aDesc: {
cc4: 'mp4a',
fields: [
['_', '[6]byte'],
['dataRefIdx', 'int16'],
['version', 'int16'],
['revisionLevel', 'int16'],
['vendor', 'int32'],
['numberOfChannels', 'int16'],
['sampleSize', 'int16'],
['compressionId', 'int16'],
['_', 'int16'],
['sampleRate', 'Fixed32'],
['$atoms', [
['conf', '*elemStreamDesc'],
]],
],
},
elemStreamDesc: {
cc4: 'esds',
fields: [
['version', 'int32'],
['data', '[]byte'],
],
},
avc1Desc: {
cc4: 'avc1',
fields: [
['_', '[6]byte'],
['dataRefIdx', 'int16'],
['version', 'int16'],
['revision', 'int16'],
['vendor', 'int32'],
['temporalQuality', 'int32'],
['spatialQuality', 'int32'],
['width', 'int16'],
['height', 'int16'],
['horizontalResolution', 'Fixed32'],
['vorizontalResolution', 'Fixed32'],
['_', 'int32'],
['frameCount', 'int16'],
['compressorName', '[32]char'],
['depth', 'int16'],
['colorTableId', 'int16'],
['$atoms', [
['conf', '*avc1Conf'],
]],
],
},
avc1Conf: {
cc4: 'avcC',
fields: [
['data', '[]byte'],
],
},
timeToSample: {
cc4: 'stts',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]timeToSampleEntry'],
],
},
timeToSampleEntry: {
fields: [
['count', 'int32'],
['duration', 'int32'],
],
},
sampleToChunk: {
cc4: 'stsc',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]sampleToChunkEntry'],
],
},
sampleToChunkEntry: {
fields: [
['firstChunk', 'int32'],
['samplesPerChunk', 'int32'],
['sampleDescId', 'int32'],
],
},
compositionOffset: {
cc4: 'ctts',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]compositionOffsetEntry'],
],
},
compositionOffsetEntry: {
fields: [
['count', 'int32'],
['offset', 'int32'],
],
},
syncSample: {
cc4: 'stss',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]int32'],
],
},
sampleSize: {
cc4: 'stsz',
},
chunkOffset: {
cc4: 'stco',
fields: [
['version', 'int8'],
['flags', 'int24'],
['entries', '[int32]int32'],
],
},
movieFrag: {
cc4: 'moof',
fields: [
['$atoms', [
['header', '*movieFragHeader'],
['tracks', '[]*trackFrag'],
]],
],
},
trackFragDecodeTime: {
cc4: 'tfdt',
},
movieFragHeader: {
cc4: 'mfhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['seqNum', 'int32'],
],
},
trackFrag: {
cc4: 'traf',
fields: [
['$atoms', [
['header', '*trackFragHeader'],
['decodeTime', '*trackFragDecodeTime'],
['run', '*trackFragRun'],
]],
],
},
trackFragRun: {
cc4: 'trun',
},
trackFragHeader: {
cc4: 'tfhd',
},
movieExtend: {
cc4: 'mvex',
fields: [
['$atoms', [
['tracks', '[]*trackExtend'],
]],
],
},
trackExtend: {
cc4: 'trex',
fields: [
['version', 'int8'],
['flags', 'int24'],
['trackId', 'int32'],
['defaultSampleDescIdx', 'int32'],
['defaultSampleDuration', 'int32'],
['defaultSampleSize', 'int32'],
['defaultSampleFlags', 'int32'],
],
},
/*
// need hand write
trackFragRun: {
cc4: 'trun',
fields: [
['version', 'int8'],
['flags', 'int24'],
['sampleCount', 'int32'],
['dataOffset', 'int32'],
['entries', '[]int32'],
],
},
trackFragHeader: {
cc4: 'tfhd',
fields: [
['version', 'int8'],
['flags', 'int24'],
['id', 'int32'],
['sampleDescriptionIndex', 'int32'],
['_', '[12]byte'],
],
},
*/
};
var DeclReadFunc = (opts) => {
var stmts = [];
var DebugStmt = type => `// ${JSON.stringify(type)}`;
var ReadArr = (name, type) => {
return [
//StrStmt('// ReadArr'),
//DebugStmt(type),
type.varcount && [
DeclVar('count', 'int'),
CallCheckAssign('ReadInt', ['r', type.varcount], ['count']),
`${name} = make(${typeStr(type)}, count)`,
],
For(RangeN('i', type.varcount ? 'count' : type.count), [
ReadCommnType(name+'[i]', type),
]),
];
};
var elemTypeStr = type => typeStr(Object.assign({}, type, {arr: false}));
var ReadAtoms = fields => [
For(`r.N > 0`, [
DeclVar('cc4', 'string'),
DeclVar('ar', '*io.LimitedReader'),
CallCheckAssign('ReadAtomHeader', ['r', '""'], ['ar', 'cc4']),
Switch('cc4', fields.map(field => [
`"${atoms[field.type.struct].cc4}"`, [
field.type.arr ? [
DeclVar('item', elemTypeStr(field.type)),
CallCheckAssign('Read'+field.type.Struct, ['ar'], ['item']),
`self.${field.name} = append(self.${field.name}, item)`,
] : [
CallCheckAssign('Read'+field.type.Struct, ['ar'], [`self.${field.name}`]),
],
]
]), showlog && [`log.Println("skip", cc4)`]),
CallCheckAssign('ReadDummy', ['ar', 'int(ar.N)'], ['_']),
])
];
var ReadCommnType = (name, type) => {
if (type.struct)
return CallCheckAssign(
'Read'+type.Struct, ['r'], [name]);
return [
//DebugStmt(type),
CallCheckAssign(
'Read'+type.fn, ['r', type.len||'int(r.N)'], [name]),
]
};
var ReadField = (name, type) => {
if (name == '_')
return CallCheckAssign('ReadDummy', ['r', type.len], ['_']);
if (name == '$atoms')
return ReadAtoms(type.list);
if (name == '$atomsCount')
return CallCheckAssign('ReadDummy', ['r', type.len], ['_']);
if (type.arr && type.fn != 'Bytes')
return ReadArr('self.'+name, type);
return ReadCommnType('self.'+name, type);
};
var ReadFields = () => opts.fields.map(field => {
var name = field.name;
var type = field.type;
return ReadField(name, type);
}).nonull();
var ptr = opts.cc4;
return Func(
'Read'+opts.type,
[['r', '*io.LimitedReader']],
[[ptr?'res':'self', (ptr?'*':'')+opts.type], ['err', 'error']],
[
ptr && `self := &${opts.type}{}`,
ReadFields(),
ptr && `res = self`,
]
);
};
var DeclWriteFunc = (opts) => {
var SavePos = [
DeclVar('aw', '*Writer'),
CallCheckAssign('WriteAtomHeader', ['w', `"${opts.cc4}"`], ['aw']),
`w = aw`,
];
var RestorePosSetSize = [
CallCheckAssign('aw.Close', [], []),
];
var WriteAtoms = fields => fields.map(field => {
var name = 'self.'+field.name;
return [
`if ${name} != nil {`,
field.type.arr ? WriteArr(name, field.type) : WriteCommnType(name, field.type),
atomsCount && `${atomsCount.name}++`,
`}`,
];
});
var WriteArr = (name, type) => {
return [
type.varcount && CallCheckAssign('WriteInt', ['w', `len(${name})`, type.varcount], []),
For(`_, elem := range ${name}`, [
WriteCommnType('elem', type),
]),
];
};
var WriteCommnType = (name, type) => {
if (type.struct)
return CallCheckAssign(
'Write'+type.Struct, ['w', name], []);
return [
CallCheckAssign(
'Write'+type.fn, ['w', name, type.len||`len(${name})`], []),
]
};
var atomsCount;
var WriteAtomsCountStart = (type) => {
atomsCount = {
name: 'atomsCount',
namePos: 'atomsCountPos',
type: type,
}
return [
DeclVar(atomsCount.name, 'int'),
DeclVar(atomsCount.namePos, 'int64'),
CallCheckAssign('WriteEmptyInt', ['w', type.len], [atomsCount.namePos]),
];
};
var WriteAtomsCountEnd = (type) => {
return [
CallCheckAssign('RefillInt',
['w', atomsCount.namePos, atomsCount.name, atomsCount.type.len],
[]
),
];
};
var WriteField = (name, type) => {
if (name == '_')
return CallCheckAssign('WriteDummy', ['w', type.len], []);
if (name == '$atoms')
return WriteAtoms(type.list);
if (name == '$atomsCount')
return WriteAtomsCountStart(type);
if (type.arr && type.fn != 'Bytes')
return WriteArr('self.'+name, type);
return WriteCommnType('self.'+name, type);
};
var WriteFields = () => opts.fields
.map(field => WriteField(field.name, field.type))
.concat(atomsCount && WriteAtomsCountEnd())
return Func(
'Write'+opts.type,
[['w', 'io.WriteSeeker'], ['self', (opts.cc4?'*':'')+opts.type]],
[['err', 'error']],
[
opts.cc4 && SavePos,
WriteFields(),
opts.cc4 && RestorePosSetSize,
]
);
};
var DeclDumpFunc = (opts) => {
var dumpStruct = (name, type) => {
if (type.ptr)
return If(`${name} != nil`, Call('Walk'+type.Struct, ['w', name]));
return Call('Walk'+type.Struct, ['w', name]);
};
var dumpArr = (name, type, id) => {
return [
//Call('w.StartArray', [`"${id}"`, `len(${name})`]),
For(`i, item := range(${name})`, If(
`w.FilterArrayItem("${opts.type}", "${id}", i, len(${name}))`,
dumpCommonType('item', type, id),
[`w.ArrayLeft(i, len(${name}))`, 'break']
)),
//Call('w.EndArray', []),
];
};
var dumpCommonType = (name, type, id) => {
if (type.struct)
return dumpStruct(name, type);
return [
Call('w.Name', [`"${id}"`]),
Call('w.'+type.fn, [name]),
];
};
var dumpField = (name, type, noarr) => {
if (name == '_')
return;
if (name == '$atomsCount')
return;
if (name == '$atoms') {
return type.list.map(field => dumpField(field.name, field.type));
}
if (!noarr && type.arr && type.fn != 'Bytes')
return dumpArr('self.'+name, type, name);
return dumpCommonType('self.'+name, type, name);
};
var dumpFields = fields =>
[ Call('w.StartStruct', [`"${opts.type}"`]) ]
.concat(fields.map(field => dumpField(field.name, field.type)))
.concat([Call('w.EndStruct', [])]);
return Func(
'Walk'+opts.type,
[['w', 'Walker'], ['self', (opts.cc4?'*':'')+opts.type]],
[],
dumpFields(opts.fields)
)
};
var D = (cls, ...fields) => {
global[cls] = (...args) => {
var obj = {cls: cls};
fields.forEach((k, i) => obj[k] = args[i]);
return obj;
};
};
D('Func', 'name', 'args', 'rets', 'body');
D('If', 'cond', 'action', 'else');
D('Call', 'fn', 'args');
D('CallCheckAssign', 'fn', 'args', 'rets', 'action');
D('DeclVar', 'name', 'type');
D('For', 'cond', 'body');
D('RangeN', 'i', 'n');
D('DeclStruct', 'name', 'body');
D('StrStmt', 'content');
D('Switch', 'cond', 'cases', 'default');
var showlog = false;
var S = s => s && s || '';
var dumpFn = f => {
var dumpArgs = x => x.map(x => x.join(' ')).join(',');
return `func ${f.name}(${dumpArgs(f.args)}) (${dumpArgs(f.rets)}) {
${S(showlog && 'log.Println("'+f.name+'")')}
${dumpStmts(f.body)}
return
}`;
};
var dumpStmts = stmt => {
if (typeof(stmt) == 'string') {
return stmt;
} else if (stmt instanceof Array) {
return stmt.nonull().map(dumpStmts).join('\n');
} else if (stmt.cls == 'If') {
var s = `if ${stmt.cond} {
${dumpStmts(stmt.action)}
}`;
if (stmt.else) {
s += ` else {
${dumpStmts(stmt.else)}
}`;
}
return s;
} else if (stmt.cls == 'Call') {
return `${stmt.fn}(${stmt.args.join(',')})`;
} else if (stmt.cls == 'CallCheckAssign') {
return `if ${stmt.rets.concat(['err']).join(',')} = ${stmt.fn}(${stmt.args.join(',')}); err != nil {
${stmt.action ? stmt.action : 'return'}
}`;
} else if (stmt.cls == 'DeclVar') {
return `var ${stmt.name} ${stmt.type}`;
} else if (stmt.cls == 'For') {
return `for ${dumpStmts(stmt.cond)} {
${dumpStmts(stmt.body)}
}`;
} else if (stmt.cls == 'RangeN') {
return `${stmt.i} := 0; ${stmt.i} < ${stmt.n}; ${stmt.i}++`;
} else if (stmt.cls == 'DeclStruct') {
return `type ${stmt.name} struct {
${stmt.body.map(line => line.join(' ')).join('\n')}
}`;
} else if (stmt.cls == 'Func') {
return dumpFn(stmt);
} else if (stmt.cls == 'StrStmt') {
return stmt.content;
} else if (stmt.cls == 'Switch') {
var dumpCase = c => `case ${c[0]}: { ${dumpStmts(c[1])} }`;
var dumpDefault = c => `default: { ${dumpStmts(c)} }`;
return `switch ${stmt.cond} {
${stmt.cases.map(dumpCase).join('\n')}
${stmt.default && dumpDefault(stmt.default) || ''}
}`;
}
};
var parseType = s => {
var r = {};
var bracket = /^\[(.*)\]/;
var lenDiv = 8;
var types = /^(int|TimeStamp|byte|Fixed|char)/;
var number = /^[0-9]+/;
if (s.match(bracket)) {
var count = s.match(bracket)[1];
if (count.substr(0,3) == 'int') {
r.varcount = +count.substr(3)/8;
} else {
r.count = +count;
}
r.arr = true;
s = s.replace(bracket, '');
}
if (s.substr(0,1) == '*') {
r.ptr = true;
s = s.slice(1);
}
if (s.match(types)) {
r.type = s.match(types)[0];
r.fn = uc(r.type);
s = s.replace(types, '');
}
if (r.type == 'byte' && r.arr) {
r.len = r.count;
r.fn = 'Bytes';
}
if (r.type == 'char' && r.arr) {
r.len = r.count;
r.fn = 'String';
r.type = 'string';
r.arr = false;
lenDiv = 1;
}
if (s.match(number)) {
r.len = +s.match(number)[0]/lenDiv;
s = s.replace(number, '');
}
if (s != '') {
r.struct = s;
r.Struct = uc(s);
}
return r;
};
var typeStr = (t) => {
var s = '';
if (t.arr)
s += '['+(t.count||'')+']';
if (t.ptr)
s += '*';
if (t.struct)
s += t.Struct;
if (t.type)
s += t.type;
return s;
};
var nameShouldHide = (name) => name == '_'
var allStmts = () => {
var stmts = [];
var parseFields = fields => fields.map(field => {
return {
name: uc(field[0]),
type: field[0] == '$atoms' ? {list: parseFields(field[1])} : parseType(field[1]),
};
});
var genStructFields = fields => fields.map(field => {
if (field.name == '_')
return;
if (field.name == '$atomsCount')
return;
if (field.name == '$atoms')
return field.type.list;
return [field];
}).nonull().reduce((prev, cur) => prev.concat(cur)).map(field => [
field.name, typeStr(field.type)]);
for (var k in atoms) {
var atom = atoms[k];
var name = uc(k);
if (atom.fields == null)
continue;
var fields = parseFields(atom.fields);
stmts = stmts.concat([
DeclStruct(name, genStructFields(fields)),
DeclReadFunc({
type: name,
fields: fields,
cc4: atom.cc4,
}),
DeclWriteFunc({
type: name,
fields: fields,
cc4: atom.cc4,
}),
DeclDumpFunc({
type: name,
fields: fields,
cc4: atom.cc4,
}),
]);
}
return stmts;
};
console.log(`
// THIS FILE IS AUTO GENERATED
package atom
import (
"io"
${showlog && '"log"' || ''}
)
`, dumpStmts(allStmts()));

View File

@ -1,7 +0,0 @@
#!/bin/bash
node genStruct.js > struct.go && gofmt -w struct.go && go build . || {
echo
echo "Please use node version > 6.0.0"
}

View File

@ -1,115 +0,0 @@
package atom
import (
"io"
"io/ioutil"
"log"
)
func ReadBytes(r io.Reader, n int) (res []byte, err error) {
res = make([]byte, n)
if n, err = r.Read(res); err != nil {
return
}
return
}
func ReadUInt(r io.Reader, n int) (res uint, err error) {
var b []byte
if b, err = ReadBytes(r, n); err != nil {
return
}
for i := 0; i < n; i++ {
res <<= 8
res += uint(b[i])
}
return
}
func ReadInt(r io.Reader, n int) (res int, err error) {
var uval uint
if uval, err = ReadUInt(r, n); err != nil {
return
}
if uval&(1<<uint(n*8-1)) != 0 {
res = -int((1 << uint(n*8)) - uval)
} else {
res = int(uval)
}
return
}
func ReadFixed(r io.Reader, n int) (res Fixed, err error) {
var ui uint
if ui, err = ReadUInt(r, n); err != nil {
return
}
if n == 2 {
res = Fixed(ui << 8)
} else if n == 4 {
res = Fixed(ui)
} else {
panic("only fixed32 and fixed16 is supported")
}
return
}
func ReadTimeStamp(r io.Reader, n int) (res TimeStamp, err error) {
var ui uint
if ui, err = ReadUInt(r, n); err != nil {
return
}
res = TimeStamp(ui)
return
}
func ReadString(r io.Reader, n int) (res string, err error) {
var b []byte
if b, err = ReadBytes(r, n); err != nil {
return
}
res = string(b)
return
}
func ReadDummy(r io.Reader, n int) (res int, err error) {
_, err = io.CopyN(ioutil.Discard, r, int64(n))
return
}
func ReadAtomHeader(r io.Reader, targetCC4 string) (res *io.LimitedReader, cc4 string, err error) {
for {
var size int
if size, err = ReadInt(r, 4); err != nil {
return
}
if size == 0 {
continue
}
if cc4, err = ReadString(r, 4); err != nil {
return
}
size = size - 8
if false {
log.Println(cc4, targetCC4, size, cc4 == targetCC4)
}
if targetCC4 != "" && cc4 != targetCC4 {
log.Println("ReadAtomHeader skip:", cc4)
if _, err = ReadDummy(r, size); err != nil {
return
}
continue
}
res = &io.LimitedReader{
R: r,
N: int64(size),
}
return
}
}

View File

@ -1,97 +0,0 @@
package atom
import (
"io"
)
type SampleSize struct {
Version int
Flags int
SampleSize int
Entries []int
}
func ReadSampleSize(r *io.LimitedReader) (res *SampleSize, err error) {
self := &SampleSize{}
if self.Version, err = ReadInt(r, 1); err != nil {
return
}
if self.Flags, err = ReadInt(r, 3); err != nil {
return
}
if self.SampleSize, err = ReadInt(r, 4); err != nil {
return
}
if self.SampleSize != 0 {
return
}
var count int
if count, err = ReadInt(r, 4); err != nil {
return
}
self.Entries = make([]int, count)
for i := 0; i < count; i++ {
if self.Entries[i], err = ReadInt(r, 4); err != nil {
return
}
}
res = self
return
}
func WriteSampleSize(w io.WriteSeeker, self *SampleSize) (err error) {
var aw *Writer
if aw, err = WriteAtomHeader(w, "stsz"); err != nil {
return
}
w = aw
if err = WriteInt(w, self.Version, 1); err != nil {
return
}
if err = WriteInt(w, self.Flags, 3); err != nil {
return
}
if err = WriteInt(w, self.SampleSize, 4); err != nil {
return
}
if self.SampleSize != 0 {
return
}
if err = WriteInt(w, len(self.Entries), 4); err != nil {
return
}
for _, elem := range self.Entries {
if err = WriteInt(w, elem, 4); err != nil {
return
}
}
if err = aw.Close(); err != nil {
return
}
return
}
func WalkSampleSize(w Walker, self *SampleSize) {
w.StartStruct("SampleSize")
w.Name("Version")
w.Int(self.Version)
w.Name("Flags")
w.Int(self.Flags)
w.Name("SampleSize")
w.Int(self.SampleSize)
for i, item := range self.Entries {
if w.FilterArrayItem("SampleSize", "Entries", i, len(self.Entries)) {
w.Name("Entries")
w.Int(item)
} else {
w.ArrayLeft(i, len(self.Entries))
break
}
}
w.EndStruct()
return
}

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +0,0 @@
package atom
type Fixed uint32
type TimeStamp uint32
func IntToFixed(val int) Fixed {
return Fixed(val << 16)
}
func FixedToInt(val Fixed) int {
return int(val >> 16)
}

View File

@ -1,29 +0,0 @@
package atom
func GetAvc1ConfByTrack(stream *Track) (avc1 *Avc1Conf) {
if media := stream.Media; media != nil {
if info := media.Info; info != nil {
if sample := info.Sample; sample != nil {
if desc := sample.SampleDesc; desc != nil {
if avc1 := desc.Avc1Desc; avc1 != nil {
return avc1.Conf
}
}
}
}
}
return
}
func GetMp4aDescByTrack(stream *Track) (mp4a *Mp4aDesc) {
if media := stream.Media; media != nil {
if info := media.Info; info != nil {
if sample := info.Sample; sample != nil {
if desc := sample.SampleDesc; desc != nil {
return desc.Mp4aDesc
}
}
}
}
return
}

View File

@ -1,119 +0,0 @@
package atom
import (
"io"
"log"
)
func WriteBytes(w io.Writer, b []byte, n int) (err error) {
if len(b) < n {
b = append(b, make([]byte, n-len(b))...)
}
_, err = w.Write(b[:n])
return
}
func WriteUInt(w io.Writer, val uint, n int) (err error) {
var b [8]byte
for i := n - 1; i >= 0; i-- {
b[i] = byte(val)
val >>= 8
}
return WriteBytes(w, b[:], n)
}
func WriteInt(w io.Writer, val int, n int) (err error) {
var uval uint
if val < 0 {
uval = uint((1 << uint(n*8)) + val)
} else {
uval = uint(val)
}
return WriteUInt(w, uval, n)
}
func WriteFixed(w io.Writer, val Fixed, n int) (err error) {
var uval uint
if n == 2 {
uval = uint(val) >> 8
} else if n == 4 {
uval = uint(val)
} else {
panic("only fixed32 and fixed16 is supported")
}
return WriteUInt(w, uval, n)
}
func WriteTimeStamp(w io.Writer, ts TimeStamp, n int) (err error) {
return WriteUInt(w, uint(ts), n)
}
func WriteString(w io.Writer, val string, n int) (err error) {
return WriteBytes(w, []byte(val), n)
}
func WriteDummy(w io.Writer, n int) (err error) {
return WriteBytes(w, []byte{}, n)
}
type Writer struct {
io.WriteSeeker
sizePos int64
}
func WriteEmptyInt(w io.WriteSeeker, n int) (pos int64, err error) {
if pos, err = w.Seek(0, 1); err != nil {
return
}
if err = WriteInt(w, 0, n); err != nil {
return
}
return
}
func RefillInt(w io.WriteSeeker, pos int64, val int, n int) (err error) {
var curPos int64
if curPos, err = w.Seek(0, 1); err != nil {
return
}
if _, err = w.Seek(pos, 0); err != nil {
return
}
if err = WriteInt(w, val, n); err != nil {
return
}
if _, err = w.Seek(curPos, 0); err != nil {
return
}
return
}
func (self *Writer) Close() (err error) {
var curPos int64
if curPos, err = self.Seek(0, 1); err != nil {
return
}
if err = RefillInt(self, self.sizePos, int(curPos-self.sizePos), 4); err != nil {
return
}
if false {
log.Println("writeback", self.sizePos, curPos, curPos-self.sizePos)
}
return
}
func WriteAtomHeader(w io.WriteSeeker, cc4 string) (res *Writer, err error) {
self := &Writer{WriteSeeker: w}
if self.sizePos, err = WriteEmptyInt(w, 4); err != nil {
return
}
if err = WriteString(self, cc4, 4); err != nil {
return
}
res = self
return
}

View File

@ -1,14 +1,12 @@
package mp4 package mp4
import ( import (
"bytes"
"time" "time"
"fmt" "fmt"
"github.com/nareix/joy4/av" "github.com/nareix/joy4/av"
"github.com/nareix/joy4/codec/aacparser" "github.com/nareix/joy4/codec/aacparser"
"github.com/nareix/joy4/codec/h264parser" "github.com/nareix/joy4/codec/h264parser"
"github.com/nareix/joy4/format/mp4/atom" "github.com/nareix/joy4/format/mp4/mp4io"
"github.com/nareix/joy4/format/mp4/isom"
"io" "io"
) )
@ -16,7 +14,7 @@ type Demuxer struct {
r io.ReadSeeker r io.ReadSeeker
streams []*Stream streams []*Stream
movieAtom *atom.Movie movieAtom *mp4io.Movie
} }
func NewDemuxer(r io.ReadSeeker) *Demuxer { func NewDemuxer(r io.ReadSeeker) *Demuxer {
@ -38,34 +36,15 @@ func (self *Demuxer) probe() (err error) {
return return
} }
var N int64 var moov *mp4io.Movie
var moov *atom.Movie var atoms []mp4io.Atom
if N, err = self.r.Seek(0, 2); err != nil { if atoms, err = mp4io.ReadFileAtoms(self.r); err != nil {
return return
} }
if _, err = self.r.Seek(0, 0); err != nil { for _, atom := range atoms {
return if atom.Tag() == mp4io.MOOV {
} moov = atom.(*mp4io.Movie)
lr := &io.LimitedReader{R: self.r, N: N}
for lr.N > 0 {
var ar *io.LimitedReader
var cc4 string
if ar, cc4, err = atom.ReadAtomHeader(lr, ""); err != nil {
return
}
if cc4 == "moov" {
if moov, err = atom.ReadMovie(ar); err != nil {
err = fmt.Errorf("mp4: moov invalid")
return
}
}
if _, err = atom.ReadDummy(lr, int(ar.N)); err != nil {
return
} }
} }
@ -89,22 +68,16 @@ func (self *Demuxer) probe() (err error) {
return return
} }
if avc1 := atom.GetAvc1ConfByTrack(atrack); avc1 != nil { if avc1 := atrack.GetAVC1Conf(); avc1 != nil {
if stream.CodecData, err = h264parser.NewCodecDataFromAVCDecoderConfRecord(avc1.Data); err != nil { if stream.CodecData, err = h264parser.NewCodecDataFromAVCDecoderConfRecord(avc1.Data); err != nil {
return return
} }
self.streams = append(self.streams, stream) self.streams = append(self.streams, stream)
} else if esds := atrack.GetElemStreamDesc(); esds != nil {
} else if mp4a := atom.GetMp4aDescByTrack(atrack); mp4a != nil && mp4a.Conf != nil { if stream.CodecData, err = aacparser.NewCodecDataFromMPEG4AudioConfigBytes(esds.DecConfig); err != nil {
var config []byte
if config, err = isom.ReadElemStreamDesc(bytes.NewReader(mp4a.Conf.Data)); err != nil {
return
}
if stream.CodecData, err = aacparser.NewCodecDataFromMPEG4AudioConfigBytes(config); err != nil {
return return
} }
self.streams = append(self.streams, stream) self.streams = append(self.streams, stream)
} }
} }
@ -119,10 +92,10 @@ func (self *Stream) setSampleIndex(index int) (err error) {
for self.chunkIndex = range self.sample.ChunkOffset.Entries { for self.chunkIndex = range self.sample.ChunkOffset.Entries {
if self.chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) && if self.chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) &&
self.chunkIndex+1 == self.sample.SampleToChunk.Entries[self.chunkGroupIndex+1].FirstChunk { uint32(self.chunkIndex+1) == self.sample.SampleToChunk.Entries[self.chunkGroupIndex+1].FirstChunk {
self.chunkGroupIndex++ self.chunkGroupIndex++
} }
n := self.sample.SampleToChunk.Entries[self.chunkGroupIndex].SamplesPerChunk n := int(self.sample.SampleToChunk.Entries[self.chunkGroupIndex].SamplesPerChunk)
if index >= start && index < start+n { if index >= start && index < start+n {
found = true found = true
self.sampleIndexInChunk = index - start self.sampleIndexInChunk = index - start
@ -136,7 +109,7 @@ func (self *Stream) setSampleIndex(index int) (err error) {
} }
if self.sample.SampleSize.SampleSize != 0 { if self.sample.SampleSize.SampleSize != 0 {
self.sampleOffsetInChunk = int64(self.sampleIndexInChunk * self.sample.SampleSize.SampleSize) self.sampleOffsetInChunk = int64(self.sampleIndexInChunk)*int64(self.sample.SampleSize.SampleSize)
} else { } else {
if index >= len(self.sample.SampleSize.Entries) { if index >= len(self.sample.SampleSize.Entries) {
err = fmt.Errorf("mp4: stream[%d]: sample index out of range", self.idx) err = fmt.Errorf("mp4: stream[%d]: sample index out of range", self.idx)
@ -154,15 +127,15 @@ func (self *Stream) setSampleIndex(index int) (err error) {
self.sttsEntryIndex = 0 self.sttsEntryIndex = 0
for self.sttsEntryIndex < len(self.sample.TimeToSample.Entries) { for self.sttsEntryIndex < len(self.sample.TimeToSample.Entries) {
entry := self.sample.TimeToSample.Entries[self.sttsEntryIndex] entry := self.sample.TimeToSample.Entries[self.sttsEntryIndex]
n := entry.Count n := int(entry.Count)
if index >= start && index < start+n { if index >= start && index < start+n {
self.sampleIndexInSttsEntry = index - start self.sampleIndexInSttsEntry = index - start
self.dts += int64((index - start) * entry.Duration) self.dts += int64(index-start)*int64(entry.Duration)
found = true found = true
break break
} }
start += n start += n
self.dts += int64(n * entry.Duration) self.dts += int64(n)*int64(entry.Duration)
self.sttsEntryIndex++ self.sttsEntryIndex++
} }
if !found { if !found {
@ -175,7 +148,7 @@ func (self *Stream) setSampleIndex(index int) (err error) {
found = false found = false
self.cttsEntryIndex = 0 self.cttsEntryIndex = 0
for self.cttsEntryIndex < len(self.sample.CompositionOffset.Entries) { for self.cttsEntryIndex < len(self.sample.CompositionOffset.Entries) {
n := self.sample.CompositionOffset.Entries[self.cttsEntryIndex].Count n := int(self.sample.CompositionOffset.Entries[self.cttsEntryIndex].Count)
if index >= start && index < start+n { if index >= start && index < start+n {
self.sampleIndexInCttsEntry = index - start self.sampleIndexInCttsEntry = index - start
found = true found = true
@ -193,7 +166,7 @@ func (self *Stream) setSampleIndex(index int) (err error) {
if self.sample.SyncSample != nil { if self.sample.SyncSample != nil {
self.syncSampleIndex = 0 self.syncSampleIndex = 0
for self.syncSampleIndex < len(self.sample.SyncSample.Entries)-1 { for self.syncSampleIndex < len(self.sample.SyncSample.Entries)-1 {
if self.sample.SyncSample.Entries[self.syncSampleIndex+1]-1 > index { if self.sample.SyncSample.Entries[self.syncSampleIndex+1]-1 > uint32(index) {
break break
} }
self.syncSampleIndex++ self.syncSampleIndex++
@ -244,7 +217,7 @@ func (self *Stream) incSampleIndex() (duration int64) {
} }
self.sampleIndexInChunk++ self.sampleIndexInChunk++
if self.sampleIndexInChunk == self.sample.SampleToChunk.Entries[self.chunkGroupIndex].SamplesPerChunk { if uint32(self.sampleIndexInChunk) == self.sample.SampleToChunk.Entries[self.chunkGroupIndex].SamplesPerChunk {
self.chunkIndex++ self.chunkIndex++
self.sampleIndexInChunk = 0 self.sampleIndexInChunk = 0
self.sampleOffsetInChunk = int64(0) self.sampleOffsetInChunk = int64(0)
@ -257,7 +230,7 @@ func (self *Stream) incSampleIndex() (duration int64) {
} }
if self.chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) && if self.chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) &&
self.chunkIndex+1 == self.sample.SampleToChunk.Entries[self.chunkGroupIndex+1].FirstChunk { uint32(self.chunkIndex+1) == self.sample.SampleToChunk.Entries[self.chunkGroupIndex+1].FirstChunk {
self.chunkGroupIndex++ self.chunkGroupIndex++
} }
@ -265,14 +238,14 @@ func (self *Stream) incSampleIndex() (duration int64) {
duration = int64(sttsEntry.Duration) duration = int64(sttsEntry.Duration)
self.sampleIndexInSttsEntry++ self.sampleIndexInSttsEntry++
self.dts += duration self.dts += duration
if self.sampleIndexInSttsEntry == sttsEntry.Count { if uint32(self.sampleIndexInSttsEntry) == sttsEntry.Count {
self.sampleIndexInSttsEntry = 0 self.sampleIndexInSttsEntry = 0
self.sttsEntryIndex++ self.sttsEntryIndex++
} }
if self.sample.CompositionOffset != nil && len(self.sample.CompositionOffset.Entries) > 0 { if self.sample.CompositionOffset != nil && len(self.sample.CompositionOffset.Entries) > 0 {
self.sampleIndexInCttsEntry++ self.sampleIndexInCttsEntry++
if self.sampleIndexInCttsEntry == self.sample.CompositionOffset.Entries[self.cttsEntryIndex].Count { if uint32(self.sampleIndexInCttsEntry) == self.sample.CompositionOffset.Entries[self.cttsEntryIndex].Count {
self.sampleIndexInCttsEntry = 0 self.sampleIndexInCttsEntry = 0
self.cttsEntryIndex++ self.cttsEntryIndex++
} }
@ -280,7 +253,7 @@ func (self *Stream) incSampleIndex() (duration int64) {
if self.sample.SyncSample != nil { if self.sample.SyncSample != nil {
entries := self.sample.SyncSample.Entries entries := self.sample.SyncSample.Entries
if self.syncSampleIndex+1 < len(entries) && entries[self.syncSampleIndex+1]-1 == self.sampleIndex+1 { if self.syncSampleIndex+1 < len(entries) && entries[self.syncSampleIndex+1]-1 == uint32(self.sampleIndex+1) {
self.syncSampleIndex++ self.syncSampleIndex++
} }
} }
@ -294,10 +267,10 @@ func (self *Stream) sampleCount() int {
chunkGroupIndex := 0 chunkGroupIndex := 0
count := 0 count := 0
for chunkIndex := range self.sample.ChunkOffset.Entries { for chunkIndex := range self.sample.ChunkOffset.Entries {
n := self.sample.SampleToChunk.Entries[chunkGroupIndex].SamplesPerChunk n := int(self.sample.SampleToChunk.Entries[chunkGroupIndex].SamplesPerChunk)
count += n count += n
if chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) && if chunkGroupIndex+1 < len(self.sample.SampleToChunk.Entries) &&
chunkIndex+1 == self.sample.SampleToChunk.Entries[chunkGroupIndex+1].FirstChunk { uint32(chunkIndex+1) == self.sample.SampleToChunk.Entries[chunkGroupIndex+1].FirstChunk {
chunkGroupIndex++ chunkGroupIndex++
} }
} }
@ -370,7 +343,7 @@ func (self *Stream) readPacket() (pkt av.Packet, err error) {
//fmt.Println("readPacket", self.sampleIndex) //fmt.Println("readPacket", self.sampleIndex)
chunkOffset := self.sample.ChunkOffset.Entries[self.chunkIndex] chunkOffset := self.sample.ChunkOffset.Entries[self.chunkIndex]
sampleSize := 0 sampleSize := uint32(0)
if self.sample.SampleSize.SampleSize != 0 { if self.sample.SampleSize.SampleSize != 0 {
sampleSize = self.sample.SampleSize.SampleSize sampleSize = self.sample.SampleSize.SampleSize
} else { } else {
@ -388,7 +361,7 @@ func (self *Stream) readPacket() (pkt av.Packet, err error) {
} }
if self.sample.SyncSample != nil { if self.sample.SyncSample != nil {
if self.sample.SyncSample.Entries[self.syncSampleIndex]-1 == self.sampleIndex { if self.sample.SyncSample.Entries[self.syncSampleIndex]-1 == uint32(self.sampleIndex) {
pkt.IsKeyFrame = true pkt.IsKeyFrame = true
} }
} }
@ -426,7 +399,7 @@ func (self *Stream) timeToSampleIndex(tm time.Duration) int {
found := false found := false
for _, entry := range self.sample.TimeToSample.Entries { for _, entry := range self.sample.TimeToSample.Entries {
endTs = startTs + int64(entry.Count*entry.Duration) endTs = startTs + int64(entry.Count*entry.Duration)
endIndex = startIndex + entry.Count endIndex = startIndex + int(entry.Count)
if targetTs >= startTs && targetTs < endTs { if targetTs >= startTs && targetTs < endTs {
targetIndex = startIndex + int((targetTs-startTs)/int64(entry.Duration)) targetIndex = startIndex + int((targetTs-startTs)/int64(entry.Duration))
found = true found = true
@ -445,8 +418,8 @@ func (self *Stream) timeToSampleIndex(tm time.Duration) int {
if self.sample.SyncSample != nil { if self.sample.SyncSample != nil {
entries := self.sample.SyncSample.Entries entries := self.sample.SyncSample.Entries
for i := len(entries) - 1; i >= 0; i-- { for i := len(entries) - 1; i >= 0; i-- {
if entries[i]-1 < targetIndex { if entries[i]-1 < uint32(targetIndex) {
targetIndex = entries[i] - 1 targetIndex = int(entries[i]-1)
break break
} }
} }

View File

@ -1,262 +0,0 @@
package isom
import (
"bytes"
"fmt"
"github.com/nareix/bits"
"io"
"io/ioutil"
)
// copied from libavformat/isom.h
const (
MP4ESDescrTag = 3
MP4DecConfigDescrTag = 4
MP4DecSpecificDescrTag = 5
)
var debugReader = false
var debugWriter = false
func readDesc(r io.Reader) (tag uint, data []byte, err error) {
if tag, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
var length uint
for i := 0; i < 4; i++ {
var c uint
if c, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
length = (length << 7) | (c & 0x7f)
if c&0x80 == 0 {
break
}
}
data = make([]byte, length)
if _, err = r.Read(data); err != nil {
return
}
return
}
func writeDesc(w io.Writer, tag uint, data []byte) (err error) {
if err = bits.WriteUIntBE(w, tag, 8); err != nil {
return
}
length := uint(len(data))
for i := 3; i > 0; i-- {
if err = bits.WriteUIntBE(w, (length>>uint(7*i))&0x7f|0x80, 8); err != nil {
return
}
}
if err = bits.WriteUIntBE(w, length&0x7f, 8); err != nil {
return
}
if _, err = w.Write(data); err != nil {
return
}
return
}
func readESDesc(r io.Reader) (err error) {
var ES_ID uint
// ES_ID
if ES_ID, err = bits.ReadUIntBE(r, 16); err != nil {
return
}
var flags uint
if flags, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
//streamDependenceFlag
if flags&0x80 != 0 {
if _, err = bits.ReadUIntBE(r, 16); err != nil {
return
}
}
//URL_Flag
if flags&0x40 != 0 {
var length uint
if length, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
if _, err = io.CopyN(ioutil.Discard, r, int64(length)); err != nil {
return
}
}
//OCRstreamFlag
if flags&0x20 != 0 {
if _, err = bits.ReadUIntBE(r, 16); err != nil {
return
}
}
if debugReader {
println("readESDesc:", ES_ID, flags)
}
return
}
func writeESDesc(w io.Writer, ES_ID uint) (err error) {
// ES_ID
if err = bits.WriteUIntBE(w, ES_ID, 16); err != nil {
return
}
// flags
if err = bits.WriteUIntBE(w, 0, 8); err != nil {
return
}
return
}
func readDescByTag(r io.Reader, targetTag uint) (data []byte, err error) {
var found bool
for {
if tag, _data, err := readDesc(r); err != nil {
break
} else {
if tag == targetTag {
data = _data
found = true
}
if debugReader {
println("readDescByTag:", tag, len(_data))
}
}
}
if !found {
err = fmt.Errorf("tag not found")
return
}
return
}
// copied from libavformat/isom.c ff_mp4_read_dec_config_descr()
func readDecConfDesc(r io.Reader) (decConfig []byte, err error) {
var objectId uint
var streamType uint
var bufSize uint
var maxBitrate uint
var avgBitrate uint
// objectId
if objectId, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
// streamType
if streamType, err = bits.ReadUIntBE(r, 8); err != nil {
return
}
// buffer size db
if bufSize, err = bits.ReadUIntBE(r, 24); err != nil {
return
}
// max bitrate
if maxBitrate, err = bits.ReadUIntBE(r, 32); err != nil {
return
}
// avg bitrate
if avgBitrate, err = bits.ReadUIntBE(r, 32); err != nil {
return
}
if debugReader {
println("readDecConfDesc:", objectId, streamType, bufSize, maxBitrate, avgBitrate)
}
if decConfig, err = readDescByTag(r, MP4DecSpecificDescrTag); err != nil {
return
}
return
}
// copied from libavformat/movenc.c mov_write_esds_tag()
func writeDecConfDesc(w io.Writer, objectId uint, streamType uint, decConfig []byte) (err error) {
// objectId
if err = bits.WriteUIntBE(w, objectId, 8); err != nil {
return
}
// streamType
if err = bits.WriteUIntBE(w, streamType, 8); err != nil {
return
}
// buffer size db
if err = bits.WriteUIntBE(w, 0, 24); err != nil {
return
}
// max bitrate
if err = bits.WriteUIntBE(w, 200000, 32); err != nil {
return
}
// avg bitrate
if err = bits.WriteUIntBE(w, 0, 32); err != nil {
return
}
if err = writeDesc(w, MP4DecSpecificDescrTag, decConfig); err != nil {
return
}
return
}
// copied from libavformat/mov.c ff_mov_read_esds()
func ReadElemStreamDesc(r io.Reader) (decConfig []byte, err error) {
if debugReader {
println("ReadElemStreamDesc: start")
}
var data []byte
if data, err = readDescByTag(r, MP4ESDescrTag); err != nil {
return
}
r = bytes.NewReader(data)
if err = readESDesc(r); err != nil {
return
}
if data, err = readDescByTag(r, MP4DecConfigDescrTag); err != nil {
return
}
r = bytes.NewReader(data)
if decConfig, err = readDecConfDesc(r); err != nil {
return
}
if debugReader {
println("ReadElemStreamDesc: end")
}
return
}
func WriteElemStreamDesc(w io.Writer, decConfig []byte, trackId uint) (err error) {
// MP4ESDescrTag(ESDesc MP4DecConfigDescrTag(objectId streamType bufSize avgBitrate MP4DecSpecificDescrTag(decConfig)))
data := decConfig
buf := &bytes.Buffer{}
// 0x40 = ObjectType AAC
// 0x15 = Audiostream
writeDecConfDesc(buf, 0x40, 0x15, data)
data = buf.Bytes()
buf = &bytes.Buffer{}
writeDesc(buf, MP4DecConfigDescrTag, data) // 4
data = buf.Bytes()
buf = &bytes.Buffer{}
writeESDesc(buf, trackId)
buf.Write(data)
writeDesc(buf, 0x06, []byte{0x02})
data = buf.Bytes()
buf = &bytes.Buffer{}
writeDesc(buf, MP4ESDescrTag, data) // 3
data = buf.Bytes()
if _, err = w.Write(data); err != nil {
return
}
return
}

View File

@ -1,53 +0,0 @@
package isom
import (
"bytes"
"encoding/hex"
"testing"
)
func TestReadElemStreamDesc(t *testing.T) {
debugReader = true
debugWriter = true
var err error
data, _ := hex.DecodeString("03808080220002000480808014401500000000030d400000000005808080021210068080800102")
t.Logf("elemDesc=%x", data)
t.Logf("length=%d", len(data))
var aconfig MPEG4AudioConfig
if aconfig, err = ReadElemStreamDescAAC(bytes.NewReader(data)); err != nil {
t.Error(err)
}
aconfig = aconfig.Complete()
t.Logf("aconfig=%v", aconfig)
bw := &bytes.Buffer{}
WriteMPEG4AudioConfig(bw, aconfig)
bw = &bytes.Buffer{}
WriteElemStreamDescAAC(bw, aconfig, 2)
t.Logf("elemDesc=%x", bw.Bytes())
data = bw.Bytes()
t.Logf("length=%d", len(data))
if aconfig, err = ReadElemStreamDescAAC(bytes.NewReader(data)); err != nil {
t.Error(err)
}
t.Logf("aconfig=%v", aconfig.Complete())
//00000000 ff f1 50 80 04 3f fc de 04 00 00 6c 69 62 66 61 |..P..?.....libfa|
//00000010 61 63 20 31 2e 32 38 00 00 42 40 93 20 04 32 00 |ac 1.28..B@. .2.|
//00000020 47 ff f1 50 80 05 1f fc 21 42 fe ed b2 5c a8 00 |G..P....!B...\..|
data, _ = hex.DecodeString("fff15080043ffcde040000")
var n, framelen int
aconfig, _, n, _, _ = ReadADTSFrame(data)
t.Logf("%v n=%d", aconfig.Complete(), n)
data = MakeADTSHeader(aconfig, 1024*3, 33)
data = append(data, []byte{1, 2, 3, 4, 5}...)
t.Logf("%x", data)
aconfig, _, n, framelen, err = ReadADTSFrame(data)
t.Logf("%v n=%d framelen=%d err=%v", aconfig.Complete(), n, framelen, err)
}

View File

@ -1,25 +1,29 @@
package mp4 package mp4
import ( import (
"bytes"
"fmt" "fmt"
"time" "time"
"github.com/nareix/joy4/av" "github.com/nareix/joy4/av"
"github.com/nareix/joy4/codec/aacparser" "github.com/nareix/joy4/codec/aacparser"
"github.com/nareix/joy4/codec/h264parser" "github.com/nareix/joy4/codec/h264parser"
"github.com/nareix/joy4/format/mp4/atom" "github.com/nareix/joy4/format/mp4/mp4io"
"github.com/nareix/joy4/format/mp4/isom" "github.com/nareix/pio"
"io" "io"
"bufio"
) )
type Muxer struct { type Muxer struct {
w io.WriteSeeker w io.WriteSeeker
bufw *bufio.Writer
wpos int64
streams []*Stream streams []*Stream
mdatWriter *atom.Writer
} }
func NewMuxer(w io.WriteSeeker) *Muxer { func NewMuxer(w io.WriteSeeker) *Muxer {
return &Muxer{w: w} return &Muxer{
w: w,
bufw: bufio.NewWriterSize(w, pio.RecommendBufioSize),
}
} }
func (self *Muxer) newStream(codec av.CodecData) (err error) { func (self *Muxer) newStream(codec av.CodecData) (err error) {
@ -32,11 +36,11 @@ func (self *Muxer) newStream(codec av.CodecData) (err error) {
} }
stream := &Stream{CodecData: codec} stream := &Stream{CodecData: codec}
stream.sample = &atom.SampleTable{ stream.sample = &mp4io.SampleTable{
SampleDesc: &atom.SampleDesc{}, SampleDesc: &mp4io.SampleDesc{},
TimeToSample: &atom.TimeToSample{}, TimeToSample: &mp4io.TimeToSample{},
SampleToChunk: &atom.SampleToChunk{ SampleToChunk: &mp4io.SampleToChunk{
Entries: []atom.SampleToChunkEntry{ Entries: []mp4io.SampleToChunkEntry{
{ {
FirstChunk: 1, FirstChunk: 1,
SampleDescId: 1, SampleDescId: 1,
@ -44,28 +48,28 @@ func (self *Muxer) newStream(codec av.CodecData) (err error) {
}, },
}, },
}, },
SampleSize: &atom.SampleSize{}, SampleSize: &mp4io.SampleSize{},
ChunkOffset: &atom.ChunkOffset{}, ChunkOffset: &mp4io.ChunkOffset{},
} }
stream.trackAtom = &atom.Track{ stream.trackAtom = &mp4io.Track{
Header: &atom.TrackHeader{ Header: &mp4io.TrackHeader{
TrackId: len(self.streams) + 1, TrackId: int32(len(self.streams)+1),
Flags: 0x0003, // Track enabled | Track in movie Flags: 0x0003, // Track enabled | Track in movie
Duration: 0, // fill later Duration: 0, // fill later
Matrix: [9]int{0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000}, Matrix: [9]int32{0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000},
}, },
Media: &atom.Media{ Media: &mp4io.Media{
Header: &atom.MediaHeader{ Header: &mp4io.MediaHeader{
TimeScale: 0, // fill later TimeScale: 0, // fill later
Duration: 0, // fill later Duration: 0, // fill later
Language: 21956, Language: 21956,
}, },
Info: &atom.MediaInfo{ Info: &mp4io.MediaInfo{
Sample: stream.sample, Sample: stream.sample,
Data: &atom.DataInfo{ Data: &mp4io.DataInfo{
Refer: &atom.DataRefer{ Refer: &mp4io.DataRefer{
Url: &atom.DataReferUrl{ Url: &mp4io.DataReferUrl{
Flags: 0x000001, // Self reference Flags: 0x000001, // Self reference
}, },
}, },
@ -76,7 +80,7 @@ func (self *Muxer) newStream(codec av.CodecData) (err error) {
switch codec.Type() { switch codec.Type() {
case av.H264: case av.H264:
stream.sample.SyncSample = &atom.SyncSample{} stream.sample.SyncSample = &mp4io.SyncSample{}
} }
stream.timeScale = 90000 stream.timeScale = 90000
@ -87,55 +91,51 @@ func (self *Muxer) newStream(codec av.CodecData) (err error) {
} }
func (self *Stream) fillTrackAtom() (err error) { func (self *Stream) fillTrackAtom() (err error) {
self.trackAtom.Media.Header.TimeScale = int(self.timeScale) self.trackAtom.Media.Header.TimeScale = int32(self.timeScale)
self.trackAtom.Media.Header.Duration = int(self.duration) self.trackAtom.Media.Header.Duration = int32(self.duration)
if self.Type() == av.H264 { if self.Type() == av.H264 {
codec := self.CodecData.(h264parser.CodecData) codec := self.CodecData.(h264parser.CodecData)
width, height := codec.Width(), codec.Height() width, height := codec.Width(), codec.Height()
self.sample.SampleDesc.Avc1Desc = &atom.Avc1Desc{ self.sample.SampleDesc.AVC1Desc = &mp4io.AVC1Desc{
DataRefIdx: 1, DataRefIdx: 1,
HorizontalResolution: 72, HorizontalResolution: 72,
VorizontalResolution: 72, VorizontalResolution: 72,
Width: int(width), Width: int16(width),
Height: int(height), Height: int16(height),
FrameCount: 1, FrameCount: 1,
Depth: 24, Depth: 24,
ColorTableId: -1, ColorTableId: -1,
Conf: &atom.Avc1Conf{Data: codec.AVCDecoderConfRecordBytes()}, Conf: &mp4io.AVC1Conf{Data: codec.AVCDecoderConfRecordBytes()},
} }
self.trackAtom.Media.Handler = &atom.HandlerRefer{ self.trackAtom.Media.Handler = &mp4io.HandlerRefer{
SubType: "vide", SubType: [4]byte{'v','i','d','e'},
Name: "Video Media Handler", Name: []byte("Video Media Handler"),
} }
self.trackAtom.Media.Info.Video = &atom.VideoMediaInfo{ self.trackAtom.Media.Info.Video = &mp4io.VideoMediaInfo{
Flags: 0x000001, Flags: 0x000001,
} }
self.trackAtom.Header.TrackWidth = atom.IntToFixed(int(width)) self.trackAtom.Header.TrackWidth = float64(width)
self.trackAtom.Header.TrackHeight = atom.IntToFixed(int(height)) self.trackAtom.Header.TrackHeight = float64(height)
} else if self.Type() == av.AAC { } else if self.Type() == av.AAC {
codec := self.CodecData.(aacparser.CodecData) codec := self.CodecData.(aacparser.CodecData)
buf := &bytes.Buffer{} self.sample.SampleDesc.MP4ADesc = &mp4io.MP4ADesc{
if err = isom.WriteElemStreamDesc(buf, codec.MPEG4AudioConfigBytes(), uint(self.trackAtom.Header.TrackId)); err != nil {
return
}
self.sample.SampleDesc.Mp4aDesc = &atom.Mp4aDesc{
DataRefIdx: 1, DataRefIdx: 1,
NumberOfChannels: codec.ChannelLayout().Count(), NumberOfChannels: int16(codec.ChannelLayout().Count()),
SampleSize: codec.SampleFormat().BytesPerSample(), SampleSize: int16(codec.SampleFormat().BytesPerSample()),
SampleRate: atom.IntToFixed(codec.SampleRate()), SampleRate: float64(codec.SampleRate()),
Conf: &atom.ElemStreamDesc{ Conf: &mp4io.ElemStreamDesc{
Data: buf.Bytes(), DecConfig: codec.MPEG4AudioConfigBytes(),
}, },
} }
self.trackAtom.Header.Volume = atom.IntToFixed(1) self.trackAtom.Header.Volume = 1
self.trackAtom.Header.AlternateGroup = 1 self.trackAtom.Header.AlternateGroup = 1
self.trackAtom.Media.Handler = &atom.HandlerRefer{ self.trackAtom.Media.Handler = &mp4io.HandlerRefer{
SubType: "soun", SubType: [4]byte{'s','o','u','n'},
Name: "Sound Handler", Name: []byte("Sound Handler"),
} }
self.trackAtom.Media.Info.Sound = &atom.SoundMediaInfo{} self.trackAtom.Media.Info.Sound = &mp4io.SoundMediaInfo{}
} else { } else {
err = fmt.Errorf("mp4: codec type=%d invalid", self.Type()) err = fmt.Errorf("mp4: codec type=%d invalid", self.Type())
@ -152,12 +152,16 @@ func (self *Muxer) WriteHeader(streams []av.CodecData) (err error) {
} }
} }
if self.mdatWriter, err = atom.WriteAtomHeader(self.w, "mdat"); err != nil { taghdr := make([]byte, 8)
pio.PutU32BE(taghdr[4:], uint32(mp4io.MDAT))
if _, err = self.w.Write(taghdr); err != nil {
return return
} }
self.wpos += 8
for _, stream := range self.streams { for _, stream := range self.streams {
if stream.Type().IsVideo() { if stream.Type().IsVideo() {
stream.sample.CompositionOffset = &atom.CompositionOffset{} stream.sample.CompositionOffset = &mp4io.CompositionOffset{}
} }
} }
return return
@ -180,41 +184,26 @@ func (self *Stream) writePacket(pkt av.Packet, rawdur time.Duration) (err error)
return return
} }
var filePos int64 if _, err = self.muxer.bufw.Write(pkt.Data); err != nil {
var sampleSize int
if filePos, err = self.muxer.mdatWriter.Seek(0, 1); err != nil {
return return
} }
if self.Type() == av.H264 {
sampleSize += len(pkt.Data)
if _, err = self.muxer.mdatWriter.Write(pkt.Data); err != nil {
return
}
} else {
sampleSize = len(pkt.Data)
if _, err = self.muxer.mdatWriter.Write(pkt.Data); err != nil {
return
}
}
if pkt.IsKeyFrame && self.sample.SyncSample != nil { if pkt.IsKeyFrame && self.sample.SyncSample != nil {
self.sample.SyncSample.Entries = append(self.sample.SyncSample.Entries, self.sampleIndex+1) self.sample.SyncSample.Entries = append(self.sample.SyncSample.Entries, uint32(self.sampleIndex+1))
} }
duration := int(self.timeToTs(rawdur)) duration := uint32(self.timeToTs(rawdur))
if self.sttsEntry == nil || duration != self.sttsEntry.Duration { if self.sttsEntry == nil || duration != self.sttsEntry.Duration {
self.sample.TimeToSample.Entries = append(self.sample.TimeToSample.Entries, atom.TimeToSampleEntry{Duration: duration}) self.sample.TimeToSample.Entries = append(self.sample.TimeToSample.Entries, mp4io.TimeToSampleEntry{Duration: duration})
self.sttsEntry = &self.sample.TimeToSample.Entries[len(self.sample.TimeToSample.Entries)-1] self.sttsEntry = &self.sample.TimeToSample.Entries[len(self.sample.TimeToSample.Entries)-1]
} }
self.sttsEntry.Count++ self.sttsEntry.Count++
if self.sample.CompositionOffset != nil { if self.sample.CompositionOffset != nil {
offset := int(self.timeToTs(pkt.CompositionTime)) offset := uint32(self.timeToTs(pkt.CompositionTime))
if self.cttsEntry == nil || offset != self.cttsEntry.Offset { if self.cttsEntry == nil || offset != self.cttsEntry.Offset {
table := self.sample.CompositionOffset table := self.sample.CompositionOffset
table.Entries = append(table.Entries, atom.CompositionOffsetEntry{Offset: offset}) table.Entries = append(table.Entries, mp4io.CompositionOffsetEntry{Offset: offset})
self.cttsEntry = &table.Entries[len(table.Entries)-1] self.cttsEntry = &table.Entries[len(table.Entries)-1]
} }
self.cttsEntry.Count++ self.cttsEntry.Count++
@ -222,9 +211,10 @@ func (self *Stream) writePacket(pkt av.Packet, rawdur time.Duration) (err error)
self.duration += int64(duration) self.duration += int64(duration)
self.sampleIndex++ self.sampleIndex++
self.sample.ChunkOffset.Entries = append(self.sample.ChunkOffset.Entries, int(filePos)) self.sample.ChunkOffset.Entries = append(self.sample.ChunkOffset.Entries, uint32(self.muxer.wpos))
self.sample.SampleSize.Entries = append(self.sample.SampleSize.Entries, sampleSize) self.sample.SampleSize.Entries = append(self.sample.SampleSize.Entries, uint32(len(pkt.Data)))
self.muxer.wpos += int64(len(pkt.Data))
return return
} }
@ -238,11 +228,11 @@ func (self *Muxer) WriteTrailer() (err error) {
} }
} }
moov := &atom.Movie{} moov := &mp4io.Movie{}
moov.Header = &atom.MovieHeader{ moov.Header = &mp4io.MovieHeader{
PreferredRate: atom.IntToFixed(1), PreferredRate: 1,
PreferredVolume: atom.IntToFixed(1), PreferredVolume: 1,
Matrix: [9]int{0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000}, Matrix: [9]int32{0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000},
NextTrackId: 2, NextTrackId: 2,
} }
@ -253,19 +243,38 @@ func (self *Muxer) WriteTrailer() (err error) {
return return
} }
dur := stream.tsToTime(stream.duration) dur := stream.tsToTime(stream.duration)
stream.trackAtom.Header.Duration = int(timeToTs(dur, timeScale)) stream.trackAtom.Header.Duration = int32(timeToTs(dur, timeScale))
if dur > maxDur { if dur > maxDur {
maxDur = dur maxDur = dur
} }
moov.Tracks = append(moov.Tracks, stream.trackAtom) moov.Tracks = append(moov.Tracks, stream.trackAtom)
} }
moov.Header.TimeScale = int(timeScale) moov.Header.TimeScale = int32(timeScale)
moov.Header.Duration = int(timeToTs(maxDur, timeScale)) moov.Header.Duration = int32(timeToTs(maxDur, timeScale))
if err = self.mdatWriter.Close(); err != nil { if err = self.bufw.Flush(); err != nil {
return return
} }
if err = atom.WriteMovie(self.w, moov); err != nil {
var mdatsize int64
if mdatsize, err = self.w.Seek(0, 1); err != nil {
return
}
if _, err = self.w.Seek(0, 0); err != nil {
return
}
taghdr := make([]byte, 4)
pio.PutU32BE(taghdr, uint32(mdatsize))
if _, err = self.w.Write(taghdr); err != nil {
return
}
if _, err = self.w.Seek(0, 2); err != nil {
return
}
b := make([]byte, moov.Len())
moov.Marshal(b)
if _, err = self.w.Write(b); err != nil {
return return
} }

View File

@ -2,7 +2,7 @@ package mp4
import ( import (
"github.com/nareix/joy4/av" "github.com/nareix/joy4/av"
"github.com/nareix/joy4/format/mp4/atom" "github.com/nareix/joy4/format/mp4/mp4io"
"time" "time"
"io" "io"
) )
@ -10,7 +10,7 @@ import (
type Stream struct { type Stream struct {
av.CodecData av.CodecData
trackAtom *atom.Track trackAtom *mp4io.Track
r io.ReadSeeker r io.ReadSeeker
idx int idx int
@ -21,7 +21,7 @@ type Stream struct {
muxer *Muxer muxer *Muxer
sample *atom.SampleTable sample *mp4io.SampleTable
sampleIndex int sampleIndex int
sampleOffsetInChunk int64 sampleOffsetInChunk int64
@ -38,8 +38,8 @@ type Stream struct {
chunkIndex int chunkIndex int
sampleIndexInChunk int sampleIndexInChunk int
sttsEntry *atom.TimeToSampleEntry sttsEntry *mp4io.TimeToSampleEntry
cttsEntry *atom.CompositionOffsetEntry cttsEntry *mp4io.CompositionOffsetEntry
} }
func timeToTs(tm time.Duration, timeScale int64) int64 { func timeToTs(tm time.Duration, timeScale int64) int64 {