1
1
mirror of https://github.com/wader/fq.git synced 2024-11-22 15:45:45 +03:00

decode: More type safe API and split scalar into multiple types

Preparation to make decoder use less memory and API more type safe.
Now each scalar type has it's own struct type so it can store different
things and enables to have a scalar interface.
Also own types will enable experimenting with decode DLS designs like
using chained methods that are type aware.
This commit is contained in:
Mattias Wadman 2022-09-30 13:58:23 +02:00
parent b43cf5775d
commit 9b81d4d3ab
144 changed files with 7476 additions and 6070 deletions

View File

@ -1,11 +1,6 @@
def protobuf_to_value:
.fields | map({(.name | tostring): (.enum // .value)}) | add;
# hack to parse just a box
# <binary> | mp4_box
def mp4_box:
[0, 0, 0, 16, "ftyp", "isom", 0, 0 , 2 , 0, .] | mp4.boxes;
# converted from https://github.com/FFmpeg/FFmpeg/blob/870bfe16a12bf09dca3a4ae27ef6f81a2de80c40/libavutil/display.c av_display_rotation_get
def mp4_matrix_structure_rotation:
( .a as $s0

View File

@ -98,7 +98,7 @@ d.FieldUTF8("magic", 4)
// create a new struct and add it as "headers", returns a *decode.D
d.FieldStruct("headers", func(d *decode.D) {
// read 8 bit unsigned integer, map it and add it as "type", returns a uint64
d.FieldU8("type", scalar.UToSymStr{
d.FieldU8("type", scalar.UintMapSymStr{
1: "start",
// ...
})
@ -115,7 +115,7 @@ will produce something like this:
Children: []*decode.Value{
*decode.Value{
Name: "magic",
V: scalar.S{
V: scalar.Str{
Actual: "abcd", // read and set by UTF8 reader
},
Range: ranges.Range{Start: 0, Len: 32},
@ -128,9 +128,9 @@ will produce something like this:
Children: []*decode.Value{
*decode.Value{
Name: "type",
V: scalar.S{
V: scalar.Uint{
Actual: uint64(1), // read and set by U8 reader
Sym: "start", // set by UToSymStr scalar.Mapper
Sym: "start", // set by UintMapSymStr scalar.Mapper
},
Range: ranges.Range{Start: 32, Len: 8},
},
@ -186,7 +186,7 @@ Decoder authors will probably not have to create them.
Keeps track of
- Actual value. Decoded value represented using a go type like `uint64`, `string` etc. For example a value reader by a utf8 or utf16 reader both will ends up as a `string`.
- Symbolic value. Optional symbolic representation of the actual value. For example a `scalar.UToSymStr` would map an actual `uint64` to a symbolic `string`.
- Symbolic value. Optional symbolic representation of the actual value. For example a `scalar.UintMapSymStr` would map an actual `uint64` to a symbolic `string`.
- String description of the value.
- Number representation

View File

@ -1,12 +1,12 @@
func avcHdrParameters(d *decode.D) {
cpbCnt := d.FieldUFn("cpb_cnt", uEV, scalar.UAdd(1))
cpbCnt := d.FieldUintFn("cpb_cnt", uEV, scalar.UAdd(1))
d.FieldU4("bit_rate_scale")
d.FieldU4("cpb_size_scale")
d.FieldArray("sched_sels", func(d *decode.D) {
for i := uint64(0); i < cpbCnt; i++ {
d.FieldStruct("sched_sel", func(d *decode.D) {
d.FieldUFn("bit_rate_value", uEV, scalar.UAdd(1))
d.FieldUFn("cpb_size_value", uEV, scalar.UAdd(1))
d.FieldUintFn("bit_rate_value", uEV, scalar.UAdd(1))
d.FieldUintFn("cpb_size_value", uEV, scalar.UAdd(1))
d.FieldBool("cbr_flag")
})
}

View File

@ -27,7 +27,7 @@ func apev2Decode(d *decode.D, _ any) any {
headerFooterFn := func(d *decode.D, name string) uint64 {
var tagCount uint64
d.FieldStruct(name, func(d *decode.D) {
d.FieldUTF8("preamble", 8, d.AssertStr("APETAGEX"))
d.FieldUTF8("preamble", 8, d.StrAssert("APETAGEX"))
d.FieldU32("version")
d.FieldU32("tag_size")
tagCount = d.FieldU32("item_count")

View File

@ -44,7 +44,7 @@ const (
dataTypeRelativeURL = 0x0902
)
var dataTypeMap = scalar.UToScalar{
var dataTypeMap = scalar.UintMap{
dataTypeString: {Sym: "string", Description: "UTF-8 String"},
dataTypeData: {Sym: "data", Description: "Raw bytes"},
dataTypeNumber8: {Sym: "byte", Description: "(signed 8-bit) 1-byte number"},
@ -103,7 +103,7 @@ const (
elementTypeSandboxROExtension = 0xf081
)
var elementTypeMap = scalar.UToScalar{
var elementTypeMap = scalar.UintMap{
elementTypeTargetURL: {Sym: "target_url", Description: "A URL"},
elementTypeTargetPath: {Sym: "target_path", Description: "Array of individual path components"},
elementTypeTargetCNIDPath: {Sym: "target_cnid_path", Description: "Array of CNIDs"},
@ -147,8 +147,8 @@ const dataObjectLen = 24
func decodeFlagDataObject(d *decode.D, flagFn func(d *decode.D)) {
d.FieldStruct("record", func(d *decode.D) {
d.FieldU32("length", d.AssertU(dataObjectLen))
d.FieldU32("raw_type", dataTypeMap, d.AssertU(dataTypeData))
d.FieldU32("length", d.UintAssert(dataObjectLen))
d.FieldU32("raw_type", dataTypeMap, d.UintAssert(dataTypeData))
d.FieldValueStr("type", "flag_data")
d.FieldStruct("property_flags", flagFn)
d.FieldStruct("enabled_property_flags", flagFn)
@ -284,7 +284,7 @@ func decodeTOCHeader(d *decode.D) *tocHeader {
d.FieldStruct("toc_header", func(d *decode.D) {
d.FieldU32("toc_size")
d.FieldU32("magic", d.AssertU(0xfffffffe))
d.FieldU32("magic", d.UintAssert(0xfffffffe))
d.FieldU32("identifier")
hdr.nextTOCOffset = d.FieldU32("next_toc_offset")
hdr.numEntries = d.FieldU32("num_entries_in_toc")
@ -351,7 +351,7 @@ func makeDecodeRecord() func(d *decode.D) {
case dataTypeNumber64F:
d.FieldF64("data")
case dataTypeDate:
d.FieldF64BE("data", scalar.DescriptionTimeFn(scalar.S.TryActualF, cocoaTimeEpochDate, time.RFC3339))
d.FieldF64BE("data", scalar.FltActualDate(cocoaTimeEpochDate, time.RFC3339))
case dataTypeBooleanFalse:
case dataTypeBooleanTrue:
case dataTypeArray:
@ -405,10 +405,10 @@ func bookmarkDecode(d *decode.D, _ any) any {
// decode bookmarkdata header, one at the top of each "file",
// although these may be nested inside of binary plists
d.FieldStruct("header", func(d *decode.D) {
d.FieldUTF8("magic", 4, d.AssertStr("book", "alis"))
d.FieldUTF8("magic", 4, d.StrAssert("book", "alis"))
d.FieldU32("total_size")
d.FieldU32("unknown")
d.FieldU32("header_size", d.AssertU(48))
d.FieldU32("header_size", d.UintAssert(48))
d.FieldRawLen("reserved", reservedSize*8)
})

View File

@ -22,21 +22,20 @@ func init() {
}
func decodeAr(d *decode.D, _ any) any {
d.FieldUTF8("signature", 8, d.AssertStr("!<arch>\n"))
d.FieldUTF8("signature", 8, d.StrAssert("!<arch>\n"))
d.FieldArray("files", func(d *decode.D) {
for !d.End() {
d.FieldStruct("file", func(d *decode.D) {
d.FieldUTF8("identifier", 16, scalar.ActualTrimSpace)
// TODO: try scalar.DescriptionSymUUnixTime
d.FieldUTF8("modification_timestamp", 12, scalar.ActualTrimSpace, scalar.TrySymUParseUint(10))
d.FieldUTF8("owner_id", 6, scalar.ActualTrimSpace, scalar.TrySymUParseUint(10))
d.FieldUTF8("group_id", 6, scalar.ActualTrimSpace, scalar.TrySymUParseUint(10))
d.FieldUTF8("file_mode", 8, scalar.ActualTrimSpace, scalar.TrySymUParseUint(8)) // Octal
sizeS := d.FieldScalarUTF8("file_size", 10, scalar.ActualTrimSpace, scalar.TrySymUParseUint(10))
if sizeS.Sym == nil {
d.FieldUTF8("modification_timestamp", 12, scalar.ActualTrimSpace, scalar.TryStrSymParseUint(10))
d.FieldUTF8("owner_id", 6, scalar.ActualTrimSpace, scalar.TryStrSymParseUint(10))
d.FieldUTF8("group_id", 6, scalar.ActualTrimSpace, scalar.TryStrSymParseUint(10))
d.FieldUTF8("file_mode", 8, scalar.ActualTrimSpace, scalar.TryStrSymParseUint(8)) // Octal
sizeStr := d.FieldScalarUTF8("file_size", 10, scalar.ActualTrimSpace, scalar.TryStrSymParseUint(10))
if sizeStr.Sym == nil {
d.Fatalf("could not decode file_size")
}
size := int64(sizeS.SymU()) * 8
size := int64(sizeStr.SymUint()) * 8
d.FieldUTF8("ending_characters", 2)
d.FieldFormatOrRawLen("data", size, probeFormat, nil)
padding := d.AlignBits(16)

View File

@ -49,7 +49,7 @@ const (
classPrivate = 0b11
)
var tagClassMap = scalar.UToSymStr{
var tagClassMap = scalar.UintMapSymStr{
classUniversal: "universal",
classApplication: "application",
classContext: "context",
@ -61,7 +61,7 @@ const (
formConstructed = 1
)
var constructedPrimitiveMap = scalar.UToSymStr{
var constructedPrimitiveMap = scalar.UintMapSymStr{
formConstructed: "constructed",
formPrimitive: "primitive",
}
@ -95,7 +95,7 @@ const (
universalTypeUniversalString = 0x1c // not encoded?
)
var universalTypeMap = scalar.UToSymStr{
var universalTypeMap = scalar.UintMapSymStr{
universalTypeEndOfContent: "end_of_content",
universalTypeBoolean: "boolean",
universalTypeInteger: "integer",
@ -136,7 +136,7 @@ const (
decimalMinusZero = 0b00_00_11
)
var lengthMap = scalar.UToSymStr{
var lengthMap = scalar.UintMapSymStr{
0: "indefinite",
}
@ -179,12 +179,12 @@ func decodeASN1BERValue(d *decode.D, bib *bitio.Buffer, sb *strings.Builder, par
var tag uint64
switch class {
case classUniversal:
tag = d.FieldUFn("tag", decodeTagNumber, universalTypeMap, scalar.ActualHex)
tag = d.FieldUintFn("tag", decodeTagNumber, universalTypeMap, scalar.UintHex)
default:
tag = d.FieldUFn("tag", decodeTagNumber)
tag = d.FieldUintFn("tag", decodeTagNumber)
}
length := d.FieldUFn("length", decodeLength, lengthMap)
length := d.FieldUintFn("length", decodeLength, lengthMap)
var l int64
switch length {
case lengthIndefinite:
@ -261,9 +261,9 @@ func decodeASN1BERValue(d *decode.D, bib *bitio.Buffer, sb *strings.Builder, par
case class == classUniversal && tag == universalTypeEndOfContent:
// nop
case class == classUniversal && tag == universalTypeBoolean:
d.FieldU8("value", scalar.URangeToScalar{
{Range: [2]uint64{0, 0}, S: scalar.S{Sym: false}},
{Range: [2]uint64{0x01, 0xff1}, S: scalar.S{Sym: true}},
d.FieldU8("value", scalar.UintRangeToScalar{
{Range: [2]uint64{0, 0}, S: scalar.Uint{Sym: false}},
{Range: [2]uint64{0x01, 0xff1}, S: scalar.Uint{Sym: true}},
})
case class == classUniversal && tag == universalTypeInteger:
if length > 8 {
@ -295,15 +295,15 @@ func decodeASN1BERValue(d *decode.D, bib *bitio.Buffer, sb *strings.Builder, par
}
}
case class == classUniversal && tag == universalTypeNull:
d.FieldValueNil("value")
d.FieldValueAny("value", nil)
case class == classUniversal && tag == universalTypeObjectIdentifier:
d.FieldArray("value", func(d *decode.D) {
// first byte is = oid0*40 + oid1
d.FieldUFn("oid", func(d *decode.D) uint64 { return d.U8() / 40 })
d.FieldUintFn("oid", func(d *decode.D) uint64 { return d.U8() / 40 })
d.SeekRel(-8)
d.FieldUFn("oid", func(d *decode.D) uint64 { return d.U8() % 40 })
d.FieldUintFn("oid", func(d *decode.D) uint64 { return d.U8() % 40 })
for !d.End() {
d.FieldUFn("oid", func(d *decode.D) uint64 {
d.FieldUintFn("oid", func(d *decode.D) uint64 {
more := true
var n uint64
for more {
@ -322,20 +322,20 @@ func decodeASN1BERValue(d *decode.D, bib *bitio.Buffer, sb *strings.Builder, par
case class == classUniversal && tag == universalTypeReal:
switch {
case length == 0:
d.FieldValueU("value", 0)
d.FieldValueUint("value", 0)
default:
switch d.FieldBool("binary_encoding") {
case true:
s := d.FieldScalarBool("sign", scalar.BoolToSymS{
s := d.FieldScalarBool("sign", scalar.BoolMapSymSint{
true: -1,
false: 1,
}).SymS()
base := d.FieldScalarU2("base", scalar.UToSymU{
}).SymSint()
base := d.FieldScalarU2("base", scalar.UintMapSymUint{
0b00: 2,
0b01: 8,
0b10: 16,
0b11: 0,
}).SymU()
}).SymUint()
scale := d.FieldU2("scale")
format := d.FieldU2("format")
@ -354,15 +354,13 @@ func decodeASN1BERValue(d *decode.D, bib *bitio.Buffer, sb *strings.Builder, par
}
n := d.FieldU("n", int(d.BitsLeft()))
m := float64(s) * float64(n) * math.Pow(float64(base), float64(exp)) * float64(int(1)<<scale)
d.FieldValueFloat("value", m)
d.FieldValueFlt("value", m)
case false:
switch d.FieldBool("decimal_encoding") {
case true:
n := d.FieldU6("special", scalar.UToSymStr{
n := d.FieldU6("special", scalar.UintMapSymStr{
decimalPlusInfinity: "plus_infinity",
decimalMinusInfinity: "minus_infinity",
decimalNan: "nan",
@ -371,21 +369,21 @@ func decodeASN1BERValue(d *decode.D, bib *bitio.Buffer, sb *strings.Builder, par
switch n {
case decimalPlusInfinity:
d.FieldValueFloat("value", math.Inf(1))
d.FieldValueFlt("value", math.Inf(1))
case decimalMinusInfinity:
d.FieldValueFloat("value", math.Inf(-1))
d.FieldValueFlt("value", math.Inf(-1))
case decimalNan:
d.FieldValueFloat("value", math.NaN())
d.FieldValueFlt("value", math.NaN())
case decimalMinusZero:
d.FieldValueFloat("value", -0)
d.FieldValueFlt("value", -0)
}
case false:
d.FieldU6("representation", scalar.UToSymStr{
d.FieldU6("representation", scalar.UintMapSymStr{
0b00_00_01: "nr1",
0b00_00_10: "nr2",
0b00_00_11: "nr3",
})
d.FieldFFn("value", func(d *decode.D) float64 {
d.FieldFltFn("value", func(d *decode.D) float64 {
// TODO: can ParseFloat do all ISO-6093 nr?
n, _ := strconv.ParseFloat(d.UTF8(int(d.BitsLeft()/8)), 64)
return n

View File

@ -34,7 +34,7 @@ func ccrDecode(d *decode.D, _ any) any {
d.FieldU3("reserved = 0")
initalPreDelay := d.FieldBool("initial_presentation_delay_present")
if initalPreDelay {
d.FieldU4("initial_presentation_delay", scalar.ActualUAdd(1))
d.FieldU4("initial_presentation_delay", scalar.UintActualAdd(1))
} else {
d.FieldU4("reserved")
}

View File

@ -28,7 +28,7 @@ const (
OBU_PADDING = 15
)
var obuTypeNames = scalar.UToSymStr{
var obuTypeNames = scalar.UintMapSymStr{
OBU_SEQUENCE_HEADER: "OBU_SEQUENCE_HEADER",
OBU_TEMPORAL_DELIMITER: "OBU_TEMPORAL_DELIMITER",
OBU_FRAME_HEADER: "OBU_FRAME_HEADER",

View File

@ -119,10 +119,10 @@ func decodeBlockCodec(d *decode.D, dataSize int64, codec string) *bytes.Buffer {
// Check the checksum
crc32W := crc32.NewIEEE()
d.Copy(crc32W, bytes.NewReader(bb.Bytes()))
d.FieldU32("crc", d.ValidateUBytes(crc32W.Sum(nil)), scalar.ActualHex)
d.FieldU32("crc", d.UintValidateBytes(crc32W.Sum(nil)), scalar.UintHex)
} else {
// Unknown codec, just dump the compressed data.
d.FieldRawLen("compressed", dataSize*8, scalar.Description(codec+" encoded"))
d.FieldRawLen("compressed", dataSize*8, scalar.BitBufDescription(codec+" encoded"))
return nil
}
return bb
@ -137,11 +137,11 @@ func decodeAvroOCF(d *decode.D, _ any) any {
}
d.FieldStructArrayLoop("blocks", "block", func() bool { return d.NotEnd() }, func(d *decode.D) {
count := d.FieldSFn("count", decoders.VarZigZag)
count := d.FieldSintFn("count", decoders.VarZigZag)
if count <= 0 {
return
}
size := d.FieldSFn("size", decoders.VarZigZag)
size := d.FieldSintFn("size", decoders.VarZigZag)
i := int64(0)
if header.Codec != "null" {

View File

@ -34,9 +34,9 @@ func decodeArrayFn(schema schema.SimplifiedSchema) (DecodeFn, error) {
count := int64(-1)
for count != 0 {
d.FieldStruct("block", func(d *decode.D) {
count = d.FieldSFn("count", VarZigZag)
count = d.FieldSintFn("count", VarZigZag)
if count < 0 {
d.FieldSFn("size", VarZigZag)
d.FieldSintFn("size", VarZigZag)
count *= -1
}
d.FieldArray("data", func(d *decode.D) {

View File

@ -5,7 +5,7 @@ import (
"github.com/wader/fq/pkg/scalar"
)
func decodeBoolFn(sms ...scalar.Mapper) (DecodeFn, error) {
func decodeBoolFn(sms ...scalar.BoolMapper) (DecodeFn, error) {
// A boolean is written as a single byte whose value is either 0 (false) or 1 (true).
return func(name string, d *decode.D) any {
return d.FieldBoolFn(name, func(d *decode.D) bool {

View File

@ -8,13 +8,13 @@ import (
type BytesCodec struct{}
func decodeBytesFn(sms ...scalar.Mapper) (DecodeFn, error) {
func decodeBytesFn(sms ...scalar.BitBufMapper) (DecodeFn, error) {
// Bytes are encoded as a long followed by that many bytes of data.
return func(name string, d *decode.D) any {
var val []byte
d.FieldStruct(name, func(d *decode.D) {
length := d.FieldSFn("length", VarZigZag)
length := d.FieldSintFn("length", VarZigZag)
br := d.FieldRawLen("data", length*8, sms...)
val = make([]byte, length)

View File

@ -11,8 +11,8 @@ import (
type DecodeFn func(string, *decode.D) any
func DecodeFnForSchema(s schema.SimplifiedSchema) (DecodeFn, error) {
var sms []scalar.Mapper
mapper := logicalMapperForSchema(s)
var sms []scalar.SintMapper
mapper := logicalTimeMapperForSchema(s)
if mapper != nil {
sms = append(sms, mapper)
}
@ -21,27 +21,27 @@ func DecodeFnForSchema(s schema.SimplifiedSchema) (DecodeFn, error) {
case schema.ARRAY:
return decodeArrayFn(s)
case schema.BOOLEAN:
return decodeBoolFn(sms...)
return decodeBoolFn()
case schema.BYTES:
return decodeBytesFn(sms...)
return decodeBytesFn()
case schema.DOUBLE:
return decodeDoubleFn(sms...)
return decodeDoubleFn()
case schema.ENUM:
return decodeEnumFn(s, sms...)
case schema.FIXED:
return decodeFixedFn(s, sms...)
return decodeFixedFn(s)
case schema.FLOAT:
return decodeFloatFn(sms...)
return decodeFloatFn()
case schema.INT:
return decodeIntFn(sms...)
case schema.LONG:
return decodeLongFn(sms...)
case schema.NULL:
return decodeNullFn(sms...)
return decodeNullFn()
case schema.RECORD:
return decodeRecordFn(s)
case schema.STRING:
return decodeStringFn(s, sms...)
return decodeStringFn(s)
case schema.UNION:
return decodeUnionFn(s)
case schema.MAP:

View File

@ -5,7 +5,7 @@ import (
"github.com/wader/fq/pkg/scalar"
)
func decodeDoubleFn(sms ...scalar.Mapper) (DecodeFn, error) {
func decodeDoubleFn(sms ...scalar.FltMapper) (DecodeFn, error) {
// A double is written as 8 bytes. The double is converted into a 64-bit integer using a method equivalent to Java's
// doubleToLongBits and then encoded in little-endian format.
return func(name string, d *decode.D) any {

View File

@ -11,8 +11,8 @@ type EnumMapper struct {
Symbols []string
}
func (e EnumMapper) MapScalar(s scalar.S) (scalar.S, error) {
v := int(s.ActualS())
func (e EnumMapper) MapSint(s scalar.Sint) (scalar.Sint, error) {
v := int(s.Actual)
if v < 0 || v >= len(e.Symbols) {
return s, errors.New("enum value of out range")
}
@ -20,7 +20,7 @@ func (e EnumMapper) MapScalar(s scalar.S) (scalar.S, error) {
return s, nil
}
func decodeEnumFn(schema schema.SimplifiedSchema, sms ...scalar.Mapper) (DecodeFn, error) {
func decodeEnumFn(schema schema.SimplifiedSchema, sms ...scalar.SintMapper) (DecodeFn, error) {
if len(schema.Symbols) == 0 {
return nil, errors.New("enum requires symbols")
}
@ -29,6 +29,6 @@ func decodeEnumFn(schema schema.SimplifiedSchema, sms ...scalar.Mapper) (DecodeF
// For example, consider the enum:
// {"type": "enum", "name": "Foo", "symbols": ["A", "B", "C", "D"] }
// This would be encoded by an int between zero and three, with zero indicating "A", and 3 indicating "D".
sms = append([]scalar.Mapper{EnumMapper{Symbols: schema.Symbols}}, sms...)
sms = append([]scalar.SintMapper{EnumMapper{Symbols: schema.Symbols}}, sms...)
return decodeIntFn(sms...)
}

View File

@ -9,7 +9,7 @@ import (
"github.com/wader/fq/pkg/decode"
)
func decodeFixedFn(schema schema.SimplifiedSchema, sms ...scalar.Mapper) (DecodeFn, error) {
func decodeFixedFn(schema schema.SimplifiedSchema, sms ...scalar.BitBufMapper) (DecodeFn, error) {
if schema.Size < 0 {
return nil, errors.New("fixed size must be greater than or equal to zero")
}

View File

@ -5,7 +5,7 @@ import (
"github.com/wader/fq/pkg/scalar"
)
func decodeFloatFn(sms ...scalar.Mapper) (DecodeFn, error) {
func decodeFloatFn(sms ...scalar.FltMapper) (DecodeFn, error) {
// A float is written as 4 bytes. The float is converted into a 32-bit integer using a method equivalent to Java's
// floatToIntBits and then encoded in little-endian format.
return func(name string, d *decode.D) any {

View File

@ -5,9 +5,9 @@ import (
"github.com/wader/fq/pkg/scalar"
)
func decodeIntFn(sms ...scalar.Mapper) (DecodeFn, error) {
func decodeIntFn(sms ...scalar.SintMapper) (DecodeFn, error) {
// Int and long values are written using variable-length zig-zag coding.
return func(name string, d *decode.D) any {
return d.FieldSFn(name, VarZigZag, sms...)
return d.FieldSintFn(name, VarZigZag, sms...)
}, nil
}

View File

@ -17,7 +17,7 @@ const (
NANOSECOND
)
func logicalMapperForSchema(schema schema.SimplifiedSchema) scalar.Mapper {
func logicalTimeMapperForSchema(schema schema.SimplifiedSchema) scalar.SintMapper {
switch schema.LogicalType {
case "timestamp":
return TimestampMapper{Precision: SECOND}
@ -46,8 +46,8 @@ type TimestampMapper struct {
Precision Precision
}
func (t TimestampMapper) MapScalar(s scalar.S) (scalar.S, error) {
v := s.ActualS()
func (t TimestampMapper) MapSint(s scalar.Sint) (scalar.Sint, error) {
v := s.Actual
var ts time.Time
if t.Precision == SECOND {
ts = time.Unix(v, 0)
@ -68,8 +68,8 @@ type TimeMapper struct {
Precision Precision
}
func (t TimeMapper) MapScalar(s scalar.S) (scalar.S, error) {
v := s.ActualS()
func (t TimeMapper) MapSint(s scalar.Sint) (scalar.Sint, error) {
v := s.Actual
if t.Precision == SECOND {
s.Sym = time.Unix(v, 0).UTC().Format("15:04:05")
@ -88,8 +88,8 @@ func (t TimeMapper) MapScalar(s scalar.S) (scalar.S, error) {
type DateMapper struct {
}
func (d DateMapper) MapScalar(s scalar.S) (scalar.S, error) {
v := s.ActualS()
func (d DateMapper) MapSint(s scalar.Sint) (scalar.Sint, error) {
v := s.Actual
s.Sym = time.Unix(0, 0).AddDate(0, 0, int(v)).UTC().Format("2006-01-02")
return s, nil
}

View File

@ -29,9 +29,9 @@ func VarZigZag(d *decode.D) int64 {
return 0
}
func decodeLongFn(sms ...scalar.Mapper) (DecodeFn, error) {
func decodeLongFn(sms ...scalar.SintMapper) (DecodeFn, error) {
// Int and long values are written using variable-length zig-zag coding.
return func(name string, d *decode.D) any {
return d.FieldSFn(name, VarZigZag, sms...)
return d.FieldSintFn(name, VarZigZag, sms...)
}, nil
}

View File

@ -5,10 +5,10 @@ import (
"github.com/wader/fq/pkg/scalar"
)
func decodeNullFn(sms ...scalar.Mapper) (DecodeFn, error) {
func decodeNullFn(sms ...scalar.AnyMapper) (DecodeFn, error) {
// null is written as zero bytes.
return func(name string, d *decode.D) any {
d.FieldValueNil(name, sms...)
d.FieldValueAny(name, nil, sms...)
return nil
}, nil
}

View File

@ -6,7 +6,7 @@ import (
"github.com/wader/fq/pkg/scalar"
)
func decodeStringFn(schema schema.SimplifiedSchema, sms ...scalar.Mapper) (DecodeFn, error) {
func decodeStringFn(schema schema.SimplifiedSchema, sms ...scalar.StrMapper) (DecodeFn, error) {
// String is encoded as a long followed by that many bytes of UTF-8 encoded character data.
// For example, the three-character string "foo" would be encoded as the long value 3 (encoded as hex 06) followed
// by the UTF-8 encoding of 'f', 'o', and 'o' (the hex bytes 66 6f 6f):
@ -14,7 +14,7 @@ func decodeStringFn(schema schema.SimplifiedSchema, sms ...scalar.Mapper) (Decod
return func(name string, d *decode.D) any {
var val string
d.FieldStruct(name, func(d *decode.D) {
length := d.FieldSFn("length", VarZigZag)
length := d.FieldSintFn("length", VarZigZag)
val = d.FieldUTF8("data", int(length))
})
return val

View File

@ -27,7 +27,7 @@ func decodeUnionFn(schema schema.SimplifiedSchema) (DecodeFn, error) {
return func(name string, d *decode.D) any {
var val any
d.FieldStruct(name, func(d *decode.D) {
v := int(d.FieldSFn("type", VarZigZag))
v := int(d.FieldSintFn("type", VarZigZag))
if v < 0 || v >= len(decoders) {
d.Fatalf("invalid union value: %d", v)
}

View File

@ -26,7 +26,7 @@ func init() {
interp.RegisterFS(bencodeFS)
}
var typeToNames = scalar.StrToSymStr{
var typeToNames = scalar.StrMapSymStr{
"d": "dictionary",
"i": "integer",
"l": "list",
@ -63,19 +63,19 @@ func decodeBencodeValue(d *decode.D) {
switch typ {
case "0", "1", "2", "3", "4", "5", "6", "7", "8", "9":
d.SeekRel(-8)
length := d.FieldSFn("length", decodeStrIntUntil(':'))
d.FieldUTF8("separator", 1, d.AssertStr(":"))
length := d.FieldSintFn("length", decodeStrIntUntil(':'))
d.FieldUTF8("separator", 1, d.StrAssert(":"))
d.FieldUTF8("value", int(length))
case "i":
d.FieldSFn("value", decodeStrIntUntil('e'))
d.FieldUTF8("end", 1, d.AssertStr("e"))
d.FieldSintFn("value", decodeStrIntUntil('e'))
d.FieldUTF8("end", 1, d.StrAssert("e"))
case "l":
d.FieldArray("values", func(d *decode.D) {
for d.PeekBits(8) != 'e' {
d.FieldStruct("value", decodeBencodeValue)
}
})
d.FieldUTF8("end", 1, d.AssertStr("e"))
d.FieldUTF8("end", 1, d.StrAssert("e"))
case "d":
d.FieldArray("pairs", func(d *decode.D) {
for d.PeekBits(8) != 'e' {
@ -85,7 +85,7 @@ func decodeBencodeValue(d *decode.D) {
})
}
})
d.FieldUTF8("end", 1, d.AssertStr("e"))
d.FieldUTF8("end", 1, d.StrAssert("e"))
default:
d.Fatalf("unknown type %v", typ)
}

View File

@ -28,7 +28,7 @@ func init() {
})
}
var rawHexReverse = scalar.Fn(func(s scalar.S) (scalar.S, error) {
var rawHexReverse = scalar.BitBufFn(func(s scalar.BitBuf) (scalar.BitBuf, error) {
return scalar.RawSym(s, -1, func(b []byte) string {
decode.ReverseBytes(b)
return fmt.Sprintf("%x", b)
@ -45,11 +45,11 @@ func decodeBitcoinBlock(d *decode.D, in interface{}) interface{} {
case 0xf9beb4d9,
0x0b110907,
0xfabfb5da:
d.FieldU32("magic", scalar.UToSymStr{
d.FieldU32("magic", scalar.UintMapSymStr{
0xf9beb4d9: "mainnet",
0x0b110907: "testnet3",
0xfabfb5da: "regtest",
}, scalar.ActualHex)
}, scalar.UintHex)
size = int64(d.FieldU32LE("size")) * 8
default:
d.Fatalf("unknown magic %x", magic)
@ -60,12 +60,12 @@ func decodeBitcoinBlock(d *decode.D, in interface{}) interface{} {
d.FramedFn(size, func(d *decode.D) {
d.FieldStruct("header", func(d *decode.D) {
d.FieldU32("version", scalar.ActualHex)
d.FieldU32("version", scalar.UintHex)
d.FieldRawLen("previous_block_hash", 32*8, rawHexReverse)
d.FieldRawLen("merkle_root", 32*8, rawHexReverse)
d.FieldU32("time", scalar.DescriptionUnixTimeFn(scalar.S.TryActualU, time.RFC3339))
d.FieldU32("bits", scalar.ActualHex)
d.FieldU32("nonce", scalar.ActualHex)
d.FieldU32("time", scalar.UintActualUnixTime(time.RFC3339))
d.FieldU32("bits", scalar.UintHex)
d.FieldU32("nonce", scalar.UintHex)
})
// TODO: remove? support header only decode this way?
@ -73,7 +73,7 @@ func decodeBitcoinBlock(d *decode.D, in interface{}) interface{} {
return
}
txCount := d.FieldUFn("tx_count", decodeVarInt)
txCount := d.FieldUintFn("tx_count", decodeVarInt)
d.FieldArray("transactions", func(d *decode.D) {
for i := uint64(0); i < txCount; i++ {
d.FieldFormat("transaction", bitcoinTranscationFormat, nil)

View File

@ -9,7 +9,7 @@ import (
type opcodeEntry struct {
r [2]byte
s scalar.S
s scalar.Uint
d func(d *decode.D, opcode byte)
}
@ -24,8 +24,8 @@ func (ops opcodeEntries) lookup(u byte) (opcodeEntry, bool) {
return opcodeEntry{}, false
}
func (ops opcodeEntries) MapScalar(s scalar.S) (scalar.S, error) {
u := s.ActualU()
func (ops opcodeEntries) MapUint(s scalar.Uint) (scalar.Uint, error) {
u := s.Actual
if fe, ok := ops.lookup(byte(u)); ok {
s = fe.s
s.Actual = u
@ -46,128 +46,128 @@ func init() {
func decodeBitcoinScript(d *decode.D, in interface{}) interface{} {
// based on https://en.bitcoin.it/wiki/Script
opcodeEntries := opcodeEntries{
{r: [2]byte{0x00, 0x00}, s: scalar.S{Sym: "false"}},
{r: [2]byte{0x00, 0x00}, s: scalar.Uint{Sym: "false"}},
// TODO: name op code?
{r: [2]byte{0x01, 0x4b}, s: scalar.S{Sym: "pushself"}, d: func(d *decode.D, opcode byte) {
{r: [2]byte{0x01, 0x4b}, s: scalar.Uint{Sym: "pushself"}, d: func(d *decode.D, opcode byte) {
d.FieldRawLen("arg", int64(opcode)*8)
}},
{r: [2]byte{0x04c, 0x4e}, s: scalar.S{Sym: "pushdata1"}, d: func(d *decode.D, opcode byte) {
{r: [2]byte{0x04c, 0x4e}, s: scalar.Uint{Sym: "pushdata1"}, d: func(d *decode.D, opcode byte) {
argLen := d.FieldU8("arg_length")
d.FieldRawLen("arg", int64(argLen)*8)
}},
{r: [2]byte{0x04c, 0x4e}, s: scalar.S{Sym: "pushdata2"}, d: func(d *decode.D, opcode byte) {
{r: [2]byte{0x04c, 0x4e}, s: scalar.Uint{Sym: "pushdata2"}, d: func(d *decode.D, opcode byte) {
argLen := d.FieldU16("arg_length")
d.FieldRawLen("arg", int64(argLen)*8)
}},
{r: [2]byte{0x04c, 0x4e}, s: scalar.S{Sym: "pushdata4"}, d: func(d *decode.D, opcode byte) {
{r: [2]byte{0x04c, 0x4e}, s: scalar.Uint{Sym: "pushdata4"}, d: func(d *decode.D, opcode byte) {
argLen := d.FieldU32("arg_length")
d.FieldRawLen("arg", int64(argLen)*8)
}},
{r: [2]byte{0x4f, 0x4f}, s: scalar.S{Sym: "1negate"}},
{r: [2]byte{0x51, 0x51}, s: scalar.S{Sym: "true"}},
{r: [2]byte{0x4f, 0x4f}, s: scalar.Uint{Sym: "1negate"}},
{r: [2]byte{0x51, 0x51}, s: scalar.Uint{Sym: "true"}},
// TODO: name
{r: [2]byte{0x52, 0x60}, s: scalar.S{Sym: "push"}, d: func(d *decode.D, opcode byte) {
d.FieldValueU("arg", uint64(opcode-0x50))
{r: [2]byte{0x52, 0x60}, s: scalar.Uint{Sym: "push"}, d: func(d *decode.D, opcode byte) {
d.FieldValueUint("arg", uint64(opcode-0x50))
}},
{r: [2]byte{0x61, 0x61}, s: scalar.S{Sym: "nop"}},
{r: [2]byte{0x62, 0x62}, s: scalar.S{Sym: "ver"}},
{r: [2]byte{0x63, 0x63}, s: scalar.S{Sym: "if"}},
{r: [2]byte{0x64, 0x64}, s: scalar.S{Sym: "notif"}},
{r: [2]byte{0x65, 0x65}, s: scalar.S{Sym: "verif"}},
{r: [2]byte{0x66, 0x66}, s: scalar.S{Sym: "vernotif"}},
{r: [2]byte{0x67, 0x67}, s: scalar.S{Sym: "else"}},
{r: [2]byte{0x68, 0x68}, s: scalar.S{Sym: "endif"}},
{r: [2]byte{0x69, 0x69}, s: scalar.S{Sym: "verify"}},
{r: [2]byte{0x6a, 0x6a}, s: scalar.S{Sym: "return"}},
{r: [2]byte{0x6b, 0x6b}, s: scalar.S{Sym: "toaltstack"}},
{r: [2]byte{0x6c, 0x6c}, s: scalar.S{Sym: "fromaltstack"}},
{r: [2]byte{0x6d, 0x6d}, s: scalar.S{Sym: "2drop"}},
{r: [2]byte{0x6e, 0x6e}, s: scalar.S{Sym: "2dup"}},
{r: [2]byte{0x6f, 0x6f}, s: scalar.S{Sym: "3dup"}},
{r: [2]byte{0x70, 0x70}, s: scalar.S{Sym: "2over"}},
{r: [2]byte{0x71, 0x71}, s: scalar.S{Sym: "2rot"}},
{r: [2]byte{0x72, 0x72}, s: scalar.S{Sym: "2swap"}},
{r: [2]byte{0x73, 0x73}, s: scalar.S{Sym: "ifdup"}},
{r: [2]byte{0x74, 0x74}, s: scalar.S{Sym: "depth"}},
{r: [2]byte{0x75, 0x75}, s: scalar.S{Sym: "drop"}},
{r: [2]byte{0x76, 0x76}, s: scalar.S{Sym: "dup"}},
{r: [2]byte{0x77, 0x77}, s: scalar.S{Sym: "nip"}},
{r: [2]byte{0x78, 0x78}, s: scalar.S{Sym: "over"}},
{r: [2]byte{0x79, 0x79}, s: scalar.S{Sym: "pick"}},
{r: [2]byte{0x7a, 0x7a}, s: scalar.S{Sym: "roll"}},
{r: [2]byte{0x7b, 0x7b}, s: scalar.S{Sym: "rot"}},
{r: [2]byte{0x7c, 0x7c}, s: scalar.S{Sym: "swap"}},
{r: [2]byte{0x7d, 0x7d}, s: scalar.S{Sym: "tuck"}},
{r: [2]byte{0x7e, 0x7e}, s: scalar.S{Sym: "cat"}},
{r: [2]byte{0x7f, 0x7f}, s: scalar.S{Sym: "split"}},
{r: [2]byte{0x80, 0x80}, s: scalar.S{Sym: "num2bin"}},
{r: [2]byte{0x81, 0x81}, s: scalar.S{Sym: "bin2num"}},
{r: [2]byte{0x82, 0x82}, s: scalar.S{Sym: "size"}},
{r: [2]byte{0x83, 0x83}, s: scalar.S{Sym: "invert"}},
{r: [2]byte{0x84, 0x84}, s: scalar.S{Sym: "and"}},
{r: [2]byte{0x85, 0x85}, s: scalar.S{Sym: "or"}},
{r: [2]byte{0x86, 0x86}, s: scalar.S{Sym: "xor"}},
{r: [2]byte{0x87, 0x87}, s: scalar.S{Sym: "equal"}},
{r: [2]byte{0x88, 0x88}, s: scalar.S{Sym: "equalverify"}},
{r: [2]byte{0x89, 0x89}, s: scalar.S{Sym: "reserved1"}},
{r: [2]byte{0x8a, 0x8a}, s: scalar.S{Sym: "reserved2"}},
{r: [2]byte{0x8b, 0x8b}, s: scalar.S{Sym: "1add"}},
{r: [2]byte{0x8c, 0x8c}, s: scalar.S{Sym: "1sub"}},
{r: [2]byte{0x8d, 0x8d}, s: scalar.S{Sym: "2mul"}},
{r: [2]byte{0x8e, 0x8e}, s: scalar.S{Sym: "2div"}},
{r: [2]byte{0x8f, 0x8f}, s: scalar.S{Sym: "negate"}},
{r: [2]byte{0x90, 0x90}, s: scalar.S{Sym: "abs"}},
{r: [2]byte{0x91, 0x91}, s: scalar.S{Sym: "not"}},
{r: [2]byte{0x92, 0x92}, s: scalar.S{Sym: "0notequal"}},
{r: [2]byte{0x93, 0x93}, s: scalar.S{Sym: "add"}},
{r: [2]byte{0x94, 0x94}, s: scalar.S{Sym: "sub"}},
{r: [2]byte{0x95, 0x95}, s: scalar.S{Sym: "mul"}},
{r: [2]byte{0x96, 0x96}, s: scalar.S{Sym: "div"}},
{r: [2]byte{0x97, 0x97}, s: scalar.S{Sym: "mod"}},
{r: [2]byte{0x98, 0x98}, s: scalar.S{Sym: "lshift"}},
{r: [2]byte{0x99, 0x99}, s: scalar.S{Sym: "rshift"}},
{r: [2]byte{0x9a, 0x9a}, s: scalar.S{Sym: "booland"}},
{r: [2]byte{0x9b, 0x9b}, s: scalar.S{Sym: "boolor"}},
{r: [2]byte{0x9c, 0x9c}, s: scalar.S{Sym: "numequal"}},
{r: [2]byte{0x9d, 0x9d}, s: scalar.S{Sym: "numequalverify"}},
{r: [2]byte{0x9e, 0x9e}, s: scalar.S{Sym: "numnotequal"}},
{r: [2]byte{0x9f, 0x9f}, s: scalar.S{Sym: "lessthan"}},
{r: [2]byte{0xa0, 0xa0}, s: scalar.S{Sym: "greaterthan"}},
{r: [2]byte{0xa1, 0xa1}, s: scalar.S{Sym: "lessthanorequal"}},
{r: [2]byte{0xa2, 0xa2}, s: scalar.S{Sym: "greaterthanorequal"}},
{r: [2]byte{0xa3, 0xa3}, s: scalar.S{Sym: "min"}},
{r: [2]byte{0xa4, 0xa4}, s: scalar.S{Sym: "max"}},
{r: [2]byte{0xa5, 0xa5}, s: scalar.S{Sym: "within"}},
{r: [2]byte{0xa6, 0xa6}, s: scalar.S{Sym: "ripemd160"}},
{r: [2]byte{0xa7, 0xa7}, s: scalar.S{Sym: "sha1"}},
{r: [2]byte{0xa8, 0xa8}, s: scalar.S{Sym: "sha256"}},
{r: [2]byte{0xa9, 0xa9}, s: scalar.S{Sym: "hash160"}},
{r: [2]byte{0xaa, 0xaa}, s: scalar.S{Sym: "hash256"}},
{r: [2]byte{0xab, 0xab}, s: scalar.S{Sym: "codeseparator"}},
{r: [2]byte{0xac, 0xac}, s: scalar.S{Sym: "checksig"}},
{r: [2]byte{0xad, 0xad}, s: scalar.S{Sym: "checksigverify"}},
{r: [2]byte{0xae, 0xae}, s: scalar.S{Sym: "checkmultisig"}},
{r: [2]byte{0xaf, 0xaf}, s: scalar.S{Sym: "checkmultisigverify"}},
{r: [2]byte{0xb0, 0xb0}, s: scalar.S{Sym: "nop1"}},
{r: [2]byte{0xb1, 0xb1}, s: scalar.S{Sym: "nop2"}},
{r: [2]byte{0xb1, 0xb1}, s: scalar.S{Sym: "checklocktimeverify"}},
{r: [2]byte{0xb2, 0xb2}, s: scalar.S{Sym: "nop3"}},
{r: [2]byte{0xb2, 0xb2}, s: scalar.S{Sym: "checksequenceverify"}},
{r: [2]byte{0xb3, 0xb3}, s: scalar.S{Sym: "nop4"}},
{r: [2]byte{0xb4, 0xb4}, s: scalar.S{Sym: "nop5"}},
{r: [2]byte{0xb5, 0xb5}, s: scalar.S{Sym: "nop6"}},
{r: [2]byte{0xb6, 0xb6}, s: scalar.S{Sym: "nop7"}},
{r: [2]byte{0xb7, 0xb7}, s: scalar.S{Sym: "nop8"}},
{r: [2]byte{0xb8, 0xb8}, s: scalar.S{Sym: "nop9"}},
{r: [2]byte{0xb9, 0xb9}, s: scalar.S{Sym: "nop10"}},
{r: [2]byte{0xba, 0xba}, s: scalar.S{Sym: "checkdatasig"}},
{r: [2]byte{0xbb, 0xbb}, s: scalar.S{Sym: "checkdatasigverif"}},
{r: [2]byte{0xfa, 0xfa}, s: scalar.S{Sym: "smallinteger"}},
{r: [2]byte{0xfb, 0xfb}, s: scalar.S{Sym: "pubkeys"}},
{r: [2]byte{0xfc, 0xfc}, s: scalar.S{Sym: "unknown252"}},
{r: [2]byte{0xfd, 0xfd}, s: scalar.S{Sym: "pubkeyhash"}},
{r: [2]byte{0xfe, 0xfe}, s: scalar.S{Sym: "pubkey"}},
{r: [2]byte{0xff, 0xff}, s: scalar.S{Sym: "invalidopcode"}},
{r: [2]byte{0x61, 0x61}, s: scalar.Uint{Sym: "nop"}},
{r: [2]byte{0x62, 0x62}, s: scalar.Uint{Sym: "ver"}},
{r: [2]byte{0x63, 0x63}, s: scalar.Uint{Sym: "if"}},
{r: [2]byte{0x64, 0x64}, s: scalar.Uint{Sym: "notif"}},
{r: [2]byte{0x65, 0x65}, s: scalar.Uint{Sym: "verif"}},
{r: [2]byte{0x66, 0x66}, s: scalar.Uint{Sym: "vernotif"}},
{r: [2]byte{0x67, 0x67}, s: scalar.Uint{Sym: "else"}},
{r: [2]byte{0x68, 0x68}, s: scalar.Uint{Sym: "endif"}},
{r: [2]byte{0x69, 0x69}, s: scalar.Uint{Sym: "verify"}},
{r: [2]byte{0x6a, 0x6a}, s: scalar.Uint{Sym: "return"}},
{r: [2]byte{0x6b, 0x6b}, s: scalar.Uint{Sym: "toaltstack"}},
{r: [2]byte{0x6c, 0x6c}, s: scalar.Uint{Sym: "fromaltstack"}},
{r: [2]byte{0x6d, 0x6d}, s: scalar.Uint{Sym: "2drop"}},
{r: [2]byte{0x6e, 0x6e}, s: scalar.Uint{Sym: "2dup"}},
{r: [2]byte{0x6f, 0x6f}, s: scalar.Uint{Sym: "3dup"}},
{r: [2]byte{0x70, 0x70}, s: scalar.Uint{Sym: "2over"}},
{r: [2]byte{0x71, 0x71}, s: scalar.Uint{Sym: "2rot"}},
{r: [2]byte{0x72, 0x72}, s: scalar.Uint{Sym: "2swap"}},
{r: [2]byte{0x73, 0x73}, s: scalar.Uint{Sym: "ifdup"}},
{r: [2]byte{0x74, 0x74}, s: scalar.Uint{Sym: "depth"}},
{r: [2]byte{0x75, 0x75}, s: scalar.Uint{Sym: "drop"}},
{r: [2]byte{0x76, 0x76}, s: scalar.Uint{Sym: "dup"}},
{r: [2]byte{0x77, 0x77}, s: scalar.Uint{Sym: "nip"}},
{r: [2]byte{0x78, 0x78}, s: scalar.Uint{Sym: "over"}},
{r: [2]byte{0x79, 0x79}, s: scalar.Uint{Sym: "pick"}},
{r: [2]byte{0x7a, 0x7a}, s: scalar.Uint{Sym: "roll"}},
{r: [2]byte{0x7b, 0x7b}, s: scalar.Uint{Sym: "rot"}},
{r: [2]byte{0x7c, 0x7c}, s: scalar.Uint{Sym: "swap"}},
{r: [2]byte{0x7d, 0x7d}, s: scalar.Uint{Sym: "tuck"}},
{r: [2]byte{0x7e, 0x7e}, s: scalar.Uint{Sym: "cat"}},
{r: [2]byte{0x7f, 0x7f}, s: scalar.Uint{Sym: "split"}},
{r: [2]byte{0x80, 0x80}, s: scalar.Uint{Sym: "num2bin"}},
{r: [2]byte{0x81, 0x81}, s: scalar.Uint{Sym: "bin2num"}},
{r: [2]byte{0x82, 0x82}, s: scalar.Uint{Sym: "size"}},
{r: [2]byte{0x83, 0x83}, s: scalar.Uint{Sym: "invert"}},
{r: [2]byte{0x84, 0x84}, s: scalar.Uint{Sym: "and"}},
{r: [2]byte{0x85, 0x85}, s: scalar.Uint{Sym: "or"}},
{r: [2]byte{0x86, 0x86}, s: scalar.Uint{Sym: "xor"}},
{r: [2]byte{0x87, 0x87}, s: scalar.Uint{Sym: "equal"}},
{r: [2]byte{0x88, 0x88}, s: scalar.Uint{Sym: "equalverify"}},
{r: [2]byte{0x89, 0x89}, s: scalar.Uint{Sym: "reserved1"}},
{r: [2]byte{0x8a, 0x8a}, s: scalar.Uint{Sym: "reserved2"}},
{r: [2]byte{0x8b, 0x8b}, s: scalar.Uint{Sym: "1add"}},
{r: [2]byte{0x8c, 0x8c}, s: scalar.Uint{Sym: "1sub"}},
{r: [2]byte{0x8d, 0x8d}, s: scalar.Uint{Sym: "2mul"}},
{r: [2]byte{0x8e, 0x8e}, s: scalar.Uint{Sym: "2div"}},
{r: [2]byte{0x8f, 0x8f}, s: scalar.Uint{Sym: "negate"}},
{r: [2]byte{0x90, 0x90}, s: scalar.Uint{Sym: "abs"}},
{r: [2]byte{0x91, 0x91}, s: scalar.Uint{Sym: "not"}},
{r: [2]byte{0x92, 0x92}, s: scalar.Uint{Sym: "0notequal"}},
{r: [2]byte{0x93, 0x93}, s: scalar.Uint{Sym: "add"}},
{r: [2]byte{0x94, 0x94}, s: scalar.Uint{Sym: "sub"}},
{r: [2]byte{0x95, 0x95}, s: scalar.Uint{Sym: "mul"}},
{r: [2]byte{0x96, 0x96}, s: scalar.Uint{Sym: "div"}},
{r: [2]byte{0x97, 0x97}, s: scalar.Uint{Sym: "mod"}},
{r: [2]byte{0x98, 0x98}, s: scalar.Uint{Sym: "lshift"}},
{r: [2]byte{0x99, 0x99}, s: scalar.Uint{Sym: "rshift"}},
{r: [2]byte{0x9a, 0x9a}, s: scalar.Uint{Sym: "booland"}},
{r: [2]byte{0x9b, 0x9b}, s: scalar.Uint{Sym: "boolor"}},
{r: [2]byte{0x9c, 0x9c}, s: scalar.Uint{Sym: "numequal"}},
{r: [2]byte{0x9d, 0x9d}, s: scalar.Uint{Sym: "numequalverify"}},
{r: [2]byte{0x9e, 0x9e}, s: scalar.Uint{Sym: "numnotequal"}},
{r: [2]byte{0x9f, 0x9f}, s: scalar.Uint{Sym: "lessthan"}},
{r: [2]byte{0xa0, 0xa0}, s: scalar.Uint{Sym: "greaterthan"}},
{r: [2]byte{0xa1, 0xa1}, s: scalar.Uint{Sym: "lessthanorequal"}},
{r: [2]byte{0xa2, 0xa2}, s: scalar.Uint{Sym: "greaterthanorequal"}},
{r: [2]byte{0xa3, 0xa3}, s: scalar.Uint{Sym: "min"}},
{r: [2]byte{0xa4, 0xa4}, s: scalar.Uint{Sym: "max"}},
{r: [2]byte{0xa5, 0xa5}, s: scalar.Uint{Sym: "within"}},
{r: [2]byte{0xa6, 0xa6}, s: scalar.Uint{Sym: "ripemd160"}},
{r: [2]byte{0xa7, 0xa7}, s: scalar.Uint{Sym: "sha1"}},
{r: [2]byte{0xa8, 0xa8}, s: scalar.Uint{Sym: "sha256"}},
{r: [2]byte{0xa9, 0xa9}, s: scalar.Uint{Sym: "hash160"}},
{r: [2]byte{0xaa, 0xaa}, s: scalar.Uint{Sym: "hash256"}},
{r: [2]byte{0xab, 0xab}, s: scalar.Uint{Sym: "codeseparator"}},
{r: [2]byte{0xac, 0xac}, s: scalar.Uint{Sym: "checksig"}},
{r: [2]byte{0xad, 0xad}, s: scalar.Uint{Sym: "checksigverify"}},
{r: [2]byte{0xae, 0xae}, s: scalar.Uint{Sym: "checkmultisig"}},
{r: [2]byte{0xaf, 0xaf}, s: scalar.Uint{Sym: "checkmultisigverify"}},
{r: [2]byte{0xb0, 0xb0}, s: scalar.Uint{Sym: "nop1"}},
{r: [2]byte{0xb1, 0xb1}, s: scalar.Uint{Sym: "nop2"}},
{r: [2]byte{0xb1, 0xb1}, s: scalar.Uint{Sym: "checklocktimeverify"}},
{r: [2]byte{0xb2, 0xb2}, s: scalar.Uint{Sym: "nop3"}},
{r: [2]byte{0xb2, 0xb2}, s: scalar.Uint{Sym: "checksequenceverify"}},
{r: [2]byte{0xb3, 0xb3}, s: scalar.Uint{Sym: "nop4"}},
{r: [2]byte{0xb4, 0xb4}, s: scalar.Uint{Sym: "nop5"}},
{r: [2]byte{0xb5, 0xb5}, s: scalar.Uint{Sym: "nop6"}},
{r: [2]byte{0xb6, 0xb6}, s: scalar.Uint{Sym: "nop7"}},
{r: [2]byte{0xb7, 0xb7}, s: scalar.Uint{Sym: "nop8"}},
{r: [2]byte{0xb8, 0xb8}, s: scalar.Uint{Sym: "nop9"}},
{r: [2]byte{0xb9, 0xb9}, s: scalar.Uint{Sym: "nop10"}},
{r: [2]byte{0xba, 0xba}, s: scalar.Uint{Sym: "checkdatasig"}},
{r: [2]byte{0xbb, 0xbb}, s: scalar.Uint{Sym: "checkdatasigverif"}},
{r: [2]byte{0xfa, 0xfa}, s: scalar.Uint{Sym: "smallinteger"}},
{r: [2]byte{0xfb, 0xfb}, s: scalar.Uint{Sym: "pubkeys"}},
{r: [2]byte{0xfc, 0xfc}, s: scalar.Uint{Sym: "unknown252"}},
{r: [2]byte{0xfd, 0xfd}, s: scalar.Uint{Sym: "pubkeyhash"}},
{r: [2]byte{0xfe, 0xfe}, s: scalar.Uint{Sym: "pubkey"}},
{r: [2]byte{0xff, 0xff}, s: scalar.Uint{Sym: "invalidopcode"}},
}
for !d.End() {

View File

@ -54,27 +54,27 @@ func decodeBitcoinTranscation(d *decode.D, in interface{}) interface{} {
d.FieldU8("marker")
d.FieldU8("flag")
}
inputCount := d.FieldUFn("input_count", decodeVarInt)
inputCount := d.FieldUintFn("input_count", decodeVarInt)
d.FieldArray("inputs", func(d *decode.D) {
for i := uint64(0); i < inputCount; i++ {
d.FieldStruct("input", func(d *decode.D) {
d.FieldRawLen("txid", 32*8, scalar.BytesToScalar{
{Bytes: txIDCoinbaseBytes[:], Scalar: scalar.S{Description: "coinbase"}},
d.FieldRawLen("txid", 32*8, scalar.RawBytesMap{
{Bytes: txIDCoinbaseBytes[:], Scalar: scalar.BitBuf{Description: "coinbase"}},
}, rawHexReverse)
d.FieldU32("vout")
scriptSigSize := d.FieldUFn("scriptsig_size", decodeVarInt)
scriptSigSize := d.FieldUintFn("scriptsig_size", decodeVarInt)
d.FieldFormatOrRawLen("scriptsig", int64(scriptSigSize)*8, bitcoinScriptFormat, nil)
// TODO: better way to know if there should be a valid script
d.FieldU32("sequence", scalar.ActualHex)
d.FieldU32("sequence", scalar.UintHex)
})
}
})
outputCount := d.FieldUFn("output_count", decodeVarInt)
outputCount := d.FieldUintFn("output_count", decodeVarInt)
d.FieldArray("outputs", func(d *decode.D) {
for i := uint64(0); i < outputCount; i++ {
d.FieldStruct("output", func(d *decode.D) {
d.FieldU64("value")
scriptSigSize := d.FieldUFn("scriptpub_size", decodeVarInt)
scriptSigSize := d.FieldUintFn("scriptpub_size", decodeVarInt)
// TODO: better way to know if there should be a valid script
d.FieldFormatOrRawLen("scriptpub", int64(scriptSigSize)*8, bitcoinScriptFormat, nil)
})
@ -85,10 +85,10 @@ func decodeBitcoinTranscation(d *decode.D, in interface{}) interface{} {
d.FieldArray("witnesses", func(d *decode.D) {
for i := uint64(0); i < inputCount; i++ {
d.FieldStruct("witness", func(d *decode.D) {
witnessSize := d.FieldUFn("witness_size", decodeVarInt)
witnessSize := d.FieldUintFn("witness_size", decodeVarInt)
d.FieldArray("items", func(d *decode.D) {
for j := uint64(0); j < witnessSize; j++ {
itemSize := d.FieldUFn("item_size", decodeVarInt)
itemSize := d.FieldUintFn("item_size", decodeVarInt)
d.FieldRawLen("item", int64(itemSize)*8)
}
})

View File

@ -15,7 +15,7 @@ var bitsFS embed.FS
func decodeBits(unit int) func(d *decode.D, _ any) any {
return func(d *decode.D, _ any) any {
var s scalar.S
var s scalar.Any
b, _ := interp.NewBinaryFromBitReader(d.BitBufRange(0, d.Len()), unit, 0)
s.Actual = b
d.Value.V = &s

View File

@ -47,7 +47,7 @@ const (
boolTrue = 0x09
)
var elementTypeMap = scalar.UToScalar{
var elementTypeMap = scalar.UintMap{
elementTypeNullOrBoolOrFill: {Sym: "singleton", Description: "Singleton value (null/bool)"},
elementTypeInt: {Sym: "int", Description: "Integer"},
elementTypeReal: {Sym: "real", Description: "Floating Point Number"},
@ -64,13 +64,13 @@ var elementTypeMap = scalar.UToScalar{
var cocoaTimeEpochDate = time.Date(2001, time.January, 1, 0, 0, 0, 0, time.UTC)
// decodes the number of bits required to store the following object
func decodeSize(d *decode.D, sms ...scalar.Mapper) uint64 {
func decodeSize(d *decode.D, sms ...scalar.UintMapper) uint64 {
n := d.FieldU4("size_bits")
if n != 0x0f {
return n
}
d.FieldU4("large_size_marker", d.AssertU(0b0001))
d.FieldU4("large_size_marker", d.UintAssert(0b0001))
// get the exponent value
n = d.FieldU4("exponent")
@ -79,11 +79,11 @@ func decodeSize(d *decode.D, sms ...scalar.Mapper) uint64 {
n = 1 << n
// decode that many bytes as big endian
n = d.FieldUFn(
n = d.FieldUintFn(
"size_bigint",
func(d *decode.D) uint64 {
v := d.UBigInt(int(n * 8))
d.AssertBigIntRange(big.NewInt(1), big.NewInt(math.MaxInt64))
d.BigIntAssertRange(big.NewInt(1), big.NewInt(math.MaxInt64))
return v.Uint64()
}, sms...)
@ -98,13 +98,13 @@ func decodeItem(d *decode.D, p *plist) bool {
m := d.FieldU4("type", elementTypeMap)
switch m {
case elementTypeNullOrBoolOrFill:
d.FieldU4("value", scalar.UToScalar{
null: scalar.S{Sym: nil},
boolTrue: scalar.S{Sym: true},
boolFalse: scalar.S{Sym: false},
d.FieldU4("value", scalar.UintMap{
null: scalar.Uint{Sym: nil},
boolTrue: scalar.Uint{Sym: true},
boolFalse: scalar.Uint{Sym: false},
})
case elementTypeInt, elementTypeUID:
n := d.FieldUFn("size", func(d *decode.D) uint64 {
n := d.FieldUintFn("size", func(d *decode.D) uint64 {
return 1 << d.U4()
})
switch n {
@ -123,29 +123,29 @@ func decodeItem(d *decode.D, p *plist) bool {
}
case elementTypeReal:
n := 1 << decodeSize(d)
d.FieldValueU("size", uint64(n))
d.FieldValueUint("size", uint64(n))
d.FieldF("value", n*8)
case elementTypeDate:
n := 1 << decodeSize(d, d.AssertU(4, 8))
d.FieldValueU("size", uint64(n))
d.FieldF("value", n*8, scalar.DescriptionTimeFn(scalar.S.TryActualF, cocoaTimeEpochDate, time.RFC3339))
n := 1 << decodeSize(d, d.UintAssert(4, 8))
d.FieldValueUint("size", uint64(n))
d.FieldF("value", n*8, scalar.FltActualDate(cocoaTimeEpochDate, time.RFC3339))
case elementTypeData:
n := decodeSize(d)
d.FieldValueU("size", n)
d.FieldValueUint("size", n)
d.FieldRawLen("value", int64(n*8))
case elementTypeASCIIString:
n := decodeSize(d)
d.FieldValueU("size", n)
d.FieldValueUint("size", n)
d.FieldUTF8("value", int(n))
return true
case elementTypeUnicodeString:
n := decodeSize(d)
d.FieldValueU("size", n)
d.FieldValueUint("size", n)
d.FieldUTF16BE("value", int(n))
return true
case elementTypeArray:
n := decodeSize(d)
d.FieldValueU("size", n)
d.FieldValueUint("size", n)
d.FieldStructNArray("entries", "entry", int64(n),
func(d *decode.D) {
idx := d.FieldU("object_index", int(p.t.objRefSize)*8)
@ -153,7 +153,7 @@ func decodeItem(d *decode.D, p *plist) bool {
})
case elementTypeSet:
n := decodeSize(d)
d.FieldValueU("size", n)
d.FieldValueUint("size", n)
d.FieldStructNArray("entries", "entry", int64(n),
func(d *decode.D) {
idx := d.FieldU("object_index", int(p.t.objRefSize)*8)
@ -161,7 +161,7 @@ func decodeItem(d *decode.D, p *plist) bool {
})
case elementTypeDict:
n := decodeSize(d)
d.FieldValueU("size", n)
d.FieldValueUint("size", n)
d.FieldStructNArray("entries", "entry", int64(n),
func(d *decode.D) {
var ki, vi uint64
@ -248,8 +248,8 @@ type plist struct {
func bplistDecode(d *decode.D, _ any) any {
d.FieldStruct("header", func(d *decode.D) {
d.FieldUTF8("magic", 6, d.AssertStr("bplist"))
d.FieldUTF8("version", 2, d.AssertStr("00"))
d.FieldUTF8("magic", 6, d.StrAssert("bplist"))
d.FieldUTF8("version", 2, d.StrAssert("00"))
})
p := new(plist)
@ -259,8 +259,8 @@ func bplistDecode(d *decode.D, _ any) any {
d.FieldStruct("trailer", func(d *decode.D) {
d.FieldU40("unused")
d.FieldS8("sort_version")
p.t.offTblOffSize = d.FieldU8("offset_table_offset_size", d.AssertURange(1, 8))
p.t.objRefSize = d.FieldU8("object_reference_size", d.AssertURange(1, 8))
p.t.offTblOffSize = d.FieldU8("offset_table_offset_size", d.UintAssertRange(1, 8))
p.t.objRefSize = d.FieldU8("object_reference_size", d.UintAssertRange(1, 8))
p.t.nObjects = d.FieldU64("object_count")
p.t.topObjectOffset = d.FieldU64("top_object_offset")
p.t.offsetTableStart = d.FieldU64("offset_table_start")

View File

@ -43,7 +43,7 @@ const (
elementTypeInt64 = 0x12
)
var elementTypeMap = scalar.UToScalar{
var elementTypeMap = scalar.UintMap{
elementTypeDouble: {Sym: "double", Description: "64-bit binary floating point"},
elementTypeString: {Sym: "string", Description: "UTF-8 string"},
elementTypeDocument: {Sym: "document", Description: "Embedded document"},
@ -91,7 +91,7 @@ func decodeBSONDocument(d *decode.D) {
case elementTypeDatatime:
d.FieldS32("value")
case elementTypeNull:
d.FieldValueNil("value")
d.FieldValueAny("value", nil)
case elementTypeRegexp:
d.FieldUTF8Null("value")
d.FieldUTF8Null("options")
@ -107,7 +107,7 @@ func decodeBSONDocument(d *decode.D) {
})
}
})
d.FieldU8("terminator", d.ValidateU(0))
d.FieldU8("terminator", d.UintValidate(0))
})
}

View File

@ -58,7 +58,7 @@ func bzip2Decode(d *decode.D, _ any) any {
var blockCRCValue *decode.Value
var streamCRCN uint32
d.FieldUTF8("magic", 2, d.AssertStr("BZ"))
d.FieldUTF8("magic", 2, d.StrAssert("BZ"))
d.FieldU8("version")
d.FieldU8("hundred_k_blocksize")
@ -67,8 +67,8 @@ func bzip2Decode(d *decode.D, _ any) any {
// moreStreams = false
// return
// }
d.FieldU48("magic", d.AssertU(blockMagic), scalar.ActualHex)
d.FieldU32("crc", scalar.ActualHex)
d.FieldU48("magic", d.UintAssert(blockMagic), scalar.UintHex)
d.FieldU32("crc", scalar.UintHex)
blockCRCValue = d.FieldGet("crc")
d.FieldU1("randomised")
d.FieldU24("origptr")
@ -91,7 +91,7 @@ func bzip2Decode(d *decode.D, _ any) any {
})
treesI := uint64(0)
d.FieldArrayLoop("trees", func() bool { return treesI < numTrees }, func(d *decode.D) {
d.FieldUFn("tree", func(d *decode.D) uint64 {
d.FieldUintFn("tree", func(d *decode.D) uint64 {
l := d.U5()
if !d.Bool() {
return l
@ -118,7 +118,7 @@ func bzip2Decode(d *decode.D, _ any) any {
blockCRC32W := crc32.NewIEEE()
d.Copy(blockCRC32W, bitFlipReader{bitio.NewIOReader(uncompressedBR)})
blockCRC32N := bits.Reverse32(binary.BigEndian.Uint32(blockCRC32W.Sum(nil)))
_ = blockCRCValue.TryScalarFn(d.ValidateU(uint64(blockCRC32N)))
_ = blockCRCValue.TryUintScalarFn(d.UintValidate(uint64(blockCRC32N)))
streamCRCN = blockCRC32N ^ ((streamCRCN << 1) | (streamCRCN >> 31))
// HACK: bzip2.NewReader will read from start of whole buffer and then we figure out compressedSize ourself
@ -137,9 +137,9 @@ func bzip2Decode(d *decode.D, _ any) any {
d.FieldRawLen("compressed", compressedSize)
d.FieldStruct("footer", func(d *decode.D) {
d.FieldU48("magic", d.AssertU(footerMagic), scalar.ActualHex)
d.FieldU48("magic", d.UintAssert(footerMagic), scalar.UintHex)
// TODO: crc of block crcs
d.FieldU32("crc", scalar.ActualHex, d.ValidateU(uint64(streamCRCN)))
d.FieldU32("crc", scalar.UintHex, d.UintValidate(uint64(streamCRCN)))
d.FieldRawLen("padding", int64(d.ByteAlignBits()))
})
}

View File

@ -35,14 +35,14 @@ func init() {
}
type majorTypeEntry struct {
s scalar.S
s scalar.Uint
d func(d *decode.D, shortCount uint64, count uint64) any
}
type majorTypeEntries map[uint64]majorTypeEntry
func (mts majorTypeEntries) MapScalar(s scalar.S) (scalar.S, error) {
u := s.ActualU()
func (mts majorTypeEntries) MapUint(s scalar.Uint) (scalar.Uint, error) {
u := s.Actual
if fe, ok := mts[u]; ok {
s = fe.s
s.Actual = u
@ -67,7 +67,7 @@ const (
shortCountSpecialFloat64Bit = 27
)
var shortCountMap = scalar.UToSymStr{
var shortCountMap = scalar.UintMapSymStr{
shortCountVariable8Bit: "8bit",
shortCountVariable16Bit: "16bit",
shortCountVariable32Bit: "32bit",
@ -75,7 +75,7 @@ var shortCountMap = scalar.UToSymStr{
shortCountIndefinite: "indefinite",
}
var tagMap = scalar.UToSymStr{
var tagMap = scalar.UintMapSymStr{
0: "date_time",
1: "epoch_date_time",
2: "unsigned_bignum",
@ -110,17 +110,17 @@ const (
func decodeCBORValue(d *decode.D) any {
majorTypeMap := majorTypeEntries{
majorTypePositiveInt: {s: scalar.S{Sym: "positive_int"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
d.FieldValueU("value", count)
majorTypePositiveInt: {s: scalar.Uint{Sym: "positive_int"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
d.FieldValueUint("value", count)
return nil
}},
majorTypeNegativeInt: {s: scalar.S{Sym: "negative_int"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
majorTypeNegativeInt: {s: scalar.Uint{Sym: "negative_int"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
n := new(big.Int)
n.SetUint64(count).Neg(n).Sub(n, mathex.BigIntOne)
d.FieldValueBigInt("value", n)
return nil
}},
majorTypeBytes: {s: scalar.S{Sym: "bytes"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
majorTypeBytes: {s: scalar.Uint{Sym: "bytes"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
if shortCount == shortCountIndefinite {
bb := &bytes.Buffer{}
d.FieldArray("items", func(d *decode.D) {
@ -145,7 +145,7 @@ func decodeCBORValue(d *decode.D) any {
return buf
}},
majorTypeUTF8: {s: scalar.S{Sym: "utf8"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
majorTypeUTF8: {s: scalar.Uint{Sym: "utf8"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
if shortCount == shortCountIndefinite {
sb := &strings.Builder{}
d.FieldArray("items", func(d *decode.D) {
@ -168,7 +168,7 @@ func decodeCBORValue(d *decode.D) any {
return d.FieldUTF8("value", int(count))
}},
majorTypeArray: {s: scalar.S{Sym: "array"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
majorTypeArray: {s: scalar.Uint{Sym: "array"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
d.FieldArray("elements", func(d *decode.D) {
for i := uint64(0); true; i++ {
if shortCount == shortCountIndefinite && d.PeekBits(8) == breakMarker {
@ -184,7 +184,7 @@ func decodeCBORValue(d *decode.D) any {
}
return nil
}},
majorTypeMap: {s: scalar.S{Sym: "map"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
majorTypeMap: {s: scalar.Uint{Sym: "map"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
d.FieldArray("pairs", func(d *decode.D) {
for i := uint64(0); true; i++ {
if shortCount == shortCountIndefinite && d.PeekBits(8) == breakMarker {
@ -203,12 +203,12 @@ func decodeCBORValue(d *decode.D) any {
}
return nil
}},
majorTypeSematic: {s: scalar.S{Sym: "semantic"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
d.FieldValueU("tag", count, tagMap)
majorTypeSematic: {s: scalar.Uint{Sym: "semantic"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
d.FieldValueUint("tag", count, tagMap)
d.FieldStruct("value", func(d *decode.D) { decodeCBORValue(d) })
return nil
}},
majorTypeSpecialFloat: {s: scalar.S{Sym: "special_float"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
majorTypeSpecialFloat: {s: scalar.Uint{Sym: "special_float"}, d: func(d *decode.D, shortCount uint64, count uint64) any {
switch shortCount {
// TODO: 0-19
case shortCountSpecialFalse:
@ -216,7 +216,7 @@ func decodeCBORValue(d *decode.D) any {
case shortCountSpecialTrue:
d.FieldValueBool("value", true)
case shortCountSpecialNull:
d.FieldValueNil("value")
d.FieldValueAny("value", nil)
case shortCountSpecialUndefined:
// TODO: undefined
case 24:

View File

@ -64,7 +64,7 @@ func decodeCSV(d *decode.D, in any) any {
rvs = append(rvs, vs)
}
d.Value.V = &scalar.S{Actual: rvs}
d.Value.V = &scalar.Any{Actual: rvs}
d.Value.Range.Len = d.Len()
return nil

View File

@ -26,18 +26,18 @@ const (
classIN = 1
)
var classNames = scalar.URangeToScalar{
{Range: [2]uint64{0x0000, 0x0000}, S: scalar.S{Sym: "reserved", Description: "Reserved"}},
{Range: [2]uint64{classIN, classIN}, S: scalar.S{Sym: "in", Description: "Internet"}},
{Range: [2]uint64{0x0002, 0x0002}, S: scalar.S{Sym: "unassigned", Description: "Unassigned"}},
{Range: [2]uint64{0x0003, 0x0003}, S: scalar.S{Sym: "chaos", Description: "Chaos"}},
{Range: [2]uint64{0x0004, 0x0004}, S: scalar.S{Sym: "hesiod", Description: "Hesiod"}},
{Range: [2]uint64{0x0005, 0x00fd}, S: scalar.S{Sym: "unassigned", Description: "Unassigned"}},
{Range: [2]uint64{0x00fe, 0x00fe}, S: scalar.S{Sym: "qclass_none", Description: "QCLASS NONE"}},
{Range: [2]uint64{0x00ff, 0x00ff}, S: scalar.S{Sym: "qclass_any", Description: "QCLASS ANY"}},
{Range: [2]uint64{0x0100, 0xfeff}, S: scalar.S{Sym: "unassigned", Description: "Unassigned"}},
{Range: [2]uint64{0xff00, 0xfffe}, S: scalar.S{Sym: "private", Description: "Reserved for Private Use"}},
{Range: [2]uint64{0xffff, 0xffff}, S: scalar.S{Sym: "reserved", Description: "Reserved"}},
var classNames = scalar.UintRangeToScalar{
{Range: [2]uint64{0x0000, 0x0000}, S: scalar.Uint{Sym: "reserved", Description: "Reserved"}},
{Range: [2]uint64{classIN, classIN}, S: scalar.Uint{Sym: "in", Description: "Internet"}},
{Range: [2]uint64{0x0002, 0x0002}, S: scalar.Uint{Sym: "unassigned", Description: "Unassigned"}},
{Range: [2]uint64{0x0003, 0x0003}, S: scalar.Uint{Sym: "chaos", Description: "Chaos"}},
{Range: [2]uint64{0x0004, 0x0004}, S: scalar.Uint{Sym: "hesiod", Description: "Hesiod"}},
{Range: [2]uint64{0x0005, 0x00fd}, S: scalar.Uint{Sym: "unassigned", Description: "Unassigned"}},
{Range: [2]uint64{0x00fe, 0x00fe}, S: scalar.Uint{Sym: "qclass_none", Description: "QCLASS NONE"}},
{Range: [2]uint64{0x00ff, 0x00ff}, S: scalar.Uint{Sym: "qclass_any", Description: "QCLASS ANY"}},
{Range: [2]uint64{0x0100, 0xfeff}, S: scalar.Uint{Sym: "unassigned", Description: "Unassigned"}},
{Range: [2]uint64{0xff00, 0xfffe}, S: scalar.Uint{Sym: "private", Description: "Reserved for Private Use"}},
{Range: [2]uint64{0xffff, 0xffff}, S: scalar.Uint{Sym: "reserved", Description: "Reserved"}},
}
const (
@ -50,7 +50,7 @@ const (
typeAAAA = 28
)
var typeNames = scalar.UToSymStr{
var typeNames = scalar.UintMapSymStr{
typeA: "a",
typeAAAA: "aaaa",
18: "afsdb",
@ -100,7 +100,7 @@ var typeNames = scalar.UToSymStr{
65: "https",
}
var rcodeNames = scalar.UToScalar{
var rcodeNames = scalar.UintMap{
0: {Sym: "no_error", Description: "No error"},
1: {Sym: "form_err", Description: "Format error"},
2: {Sym: "serv_fail", Description: "Server failure"},
@ -230,11 +230,11 @@ func dnsDecode(d *decode.D, hasLengthHeader bool) any {
d.FieldU16("length")
}
d.FieldU16("id")
d.FieldU1("qr", scalar.UToSymStr{
d.FieldU1("qr", scalar.UintMapSymStr{
0: "query",
1: "response",
})
d.FieldU4("opcode", scalar.UToSymStr{
d.FieldU4("opcode", scalar.UintMapSymStr{
0: "query",
1: "iquery",
2: "status",

View File

@ -31,12 +31,12 @@ const (
BIG_ENDIAN = 2
)
var endianNames = scalar.UToSymStr{
var endianNames = scalar.UintMapSymStr{
LITTLE_ENDIAN: "little_endian",
BIG_ENDIAN: "big_endian",
}
var classBits = scalar.UToSymU{
var classBits = scalar.UintMapSymUint{
1: 32,
2: 64,
}
@ -46,7 +46,7 @@ const (
CLASS_64 = 2
)
var osABINames = scalar.UToSymStr{
var osABINames = scalar.UintMapSymStr{
0: "sysv",
1: "hpux",
2: "netbsd",
@ -72,14 +72,14 @@ const (
ET_CORE = 4
)
var typeNames = scalar.URangeToScalar{
{Range: [2]uint64{ET_NONE, ET_NONE}, S: scalar.S{Sym: "none"}},
{Range: [2]uint64{ET_REL, ET_REL}, S: scalar.S{Sym: "rel"}},
{Range: [2]uint64{ET_EXEC, ET_EXEC}, S: scalar.S{Sym: "exec"}},
{Range: [2]uint64{ET_DYN, ET_DYN}, S: scalar.S{Sym: "dyn"}},
{Range: [2]uint64{ET_CORE, ET_CORE}, S: scalar.S{Sym: "core"}},
{Range: [2]uint64{0xfe00, 0xfeff}, S: scalar.S{Sym: "os"}},
{Range: [2]uint64{0xff00, 0xffff}, S: scalar.S{Sym: "proc"}},
var typeNames = scalar.UintRangeToScalar{
{Range: [2]uint64{ET_NONE, ET_NONE}, S: scalar.Uint{Sym: "none"}},
{Range: [2]uint64{ET_REL, ET_REL}, S: scalar.Uint{Sym: "rel"}},
{Range: [2]uint64{ET_EXEC, ET_EXEC}, S: scalar.Uint{Sym: "exec"}},
{Range: [2]uint64{ET_DYN, ET_DYN}, S: scalar.Uint{Sym: "dyn"}},
{Range: [2]uint64{ET_CORE, ET_CORE}, S: scalar.Uint{Sym: "core"}},
{Range: [2]uint64{0xfe00, 0xfeff}, S: scalar.Uint{Sym: "os"}},
{Range: [2]uint64{0xff00, 0xffff}, S: scalar.Uint{Sym: "proc"}},
}
const (
@ -87,7 +87,7 @@ const (
EM_ARM64 = 0xb7
)
var machineNames = scalar.UToScalar{
var machineNames = scalar.UintMap{
0x00: {Description: "No specific instruction set"},
0x01: {Sym: "we_32100", Description: "AT&T WE 32100"},
0x02: {Sym: "sparc", Description: "SPARC"},
@ -151,20 +151,20 @@ const (
PT_TLS = 7
)
var phTypeNames = scalar.URangeToScalar{
{Range: [2]uint64{PT_NULL, PT_NULL}, S: scalar.S{Sym: "null", Description: "Unused element"}},
{Range: [2]uint64{PT_LOAD, PT_LOAD}, S: scalar.S{Sym: "load", Description: "Loadable segment"}},
{Range: [2]uint64{PT_DYNAMIC, PT_DYNAMIC}, S: scalar.S{Sym: "dynamic", Description: "Dynamic linking information"}},
{Range: [2]uint64{PT_INTERP, PT_INTERP}, S: scalar.S{Sym: "interp", Description: "Interpreter to invoke"}},
{Range: [2]uint64{PT_NOTE, PT_NOTE}, S: scalar.S{Sym: "note", Description: "Auxiliary information"}},
{Range: [2]uint64{PT_SHLIB, PT_SHLIB}, S: scalar.S{Sym: "shlib", Description: "Reserved but has unspecified"}},
{Range: [2]uint64{PT_PHDR, PT_PHDR}, S: scalar.S{Sym: "phdr", Description: "Program header location and size"}},
{Range: [2]uint64{PT_TLS, PT_TLS}, S: scalar.S{Sym: "tls", Description: "Thread-Local Storage template"}},
{Range: [2]uint64{0x6474e550, 0x6474e550}, S: scalar.S{Sym: "gnu_eh_frame", Description: "GNU frame unwind information"}},
{Range: [2]uint64{0x6474e551, 0x6474e551}, S: scalar.S{Sym: "gnu_stack", Description: "GNU stack permission"}},
{Range: [2]uint64{0x6474e552, 0x6474e552}, S: scalar.S{Sym: "gnu_relro", Description: "GNU read-only after relocation"}},
{Range: [2]uint64{0x60000000, 0x6fffffff}, S: scalar.S{Sym: "os", Description: "Operating system-specific"}},
{Range: [2]uint64{0x70000000, 0x7fffffff}, S: scalar.S{Sym: "proc", Description: "Processor-specific"}},
var phTypeNames = scalar.UintRangeToScalar{
{Range: [2]uint64{PT_NULL, PT_NULL}, S: scalar.Uint{Sym: "null", Description: "Unused element"}},
{Range: [2]uint64{PT_LOAD, PT_LOAD}, S: scalar.Uint{Sym: "load", Description: "Loadable segment"}},
{Range: [2]uint64{PT_DYNAMIC, PT_DYNAMIC}, S: scalar.Uint{Sym: "dynamic", Description: "Dynamic linking information"}},
{Range: [2]uint64{PT_INTERP, PT_INTERP}, S: scalar.Uint{Sym: "interp", Description: "Interpreter to invoke"}},
{Range: [2]uint64{PT_NOTE, PT_NOTE}, S: scalar.Uint{Sym: "note", Description: "Auxiliary information"}},
{Range: [2]uint64{PT_SHLIB, PT_SHLIB}, S: scalar.Uint{Sym: "shlib", Description: "Reserved but has unspecified"}},
{Range: [2]uint64{PT_PHDR, PT_PHDR}, S: scalar.Uint{Sym: "phdr", Description: "Program header location and size"}},
{Range: [2]uint64{PT_TLS, PT_TLS}, S: scalar.Uint{Sym: "tls", Description: "Thread-Local Storage template"}},
{Range: [2]uint64{0x6474e550, 0x6474e550}, S: scalar.Uint{Sym: "gnu_eh_frame", Description: "GNU frame unwind information"}},
{Range: [2]uint64{0x6474e551, 0x6474e551}, S: scalar.Uint{Sym: "gnu_stack", Description: "GNU stack permission"}},
{Range: [2]uint64{0x6474e552, 0x6474e552}, S: scalar.Uint{Sym: "gnu_relro", Description: "GNU read-only after relocation"}},
{Range: [2]uint64{0x60000000, 0x6fffffff}, S: scalar.Uint{Sym: "os", Description: "Operating system-specific"}},
{Range: [2]uint64{0x70000000, 0x7fffffff}, S: scalar.Uint{Sym: "proc", Description: "Processor-specific"}},
}
const (
@ -236,7 +236,7 @@ const (
NT_LOONGARCH_LBT = 0xa04
)
var coreNoteNames = scalar.UToScalar{
var coreNoteNames = scalar.UintMap{
NT_PRSTATUS: {Sym: "prstatus"},
NT_PRFPREG: {Sym: "prfpreg"},
NT_PRPSINFO: {Sym: "prpsinfo"},
@ -325,7 +325,7 @@ const (
SHT_GNU_HASH = 0x6ffffff6
)
var sectionHeaderTypeMap = scalar.UToScalar{
var sectionHeaderTypeMap = scalar.UintMap{
SHT_NULL: {Sym: "null", Description: "Header inactive"},
SHT_PROGBITS: {Sym: "progbits", Description: "Information defined by the program"},
SHT_SYMTAB: {Sym: "symtab", Description: "Symbol table"},
@ -402,7 +402,7 @@ const (
type dtEntry struct {
r [2]uint64
dUn int
s scalar.S
s scalar.Uint
}
type dynamicTableEntries []dtEntry
@ -416,8 +416,8 @@ func (d dynamicTableEntries) lookup(u uint64) (dtEntry, bool) {
return dtEntry{}, false
}
func (d dynamicTableEntries) MapScalar(s scalar.S) (scalar.S, error) {
u := s.ActualU()
func (d dynamicTableEntries) MapUint(s scalar.Uint) (scalar.Uint, error) {
u := s.Actual
if de, ok := d.lookup(u); ok {
s = de.s
s.Actual = u
@ -426,44 +426,44 @@ func (d dynamicTableEntries) MapScalar(s scalar.S) (scalar.S, error) {
}
var dynamicTableMap = dynamicTableEntries{
{r: [2]uint64{DT_NULL, DT_NULL}, dUn: dUnIgnored, s: scalar.S{Sym: "null", Description: "Marks end of dynamic section"}},
{r: [2]uint64{DT_NEEDED, DT_NEEDED}, dUn: dUnVal, s: scalar.S{Sym: "needed", Description: "String table offset to name of a needed library"}},
{r: [2]uint64{DT_PLTRELSZ, DT_PLTRELSZ}, dUn: dUnVal, s: scalar.S{Sym: "pltrelsz", Description: "Size in bytes of PLT relocation entries"}},
{r: [2]uint64{DT_PLTGOT, DT_PLTGOT}, dUn: dUnPtr, s: scalar.S{Sym: "pltgot", Description: "Address of PLT and/or GOT"}},
{r: [2]uint64{DT_HASH, DT_HASH}, dUn: dUnPtr, s: scalar.S{Sym: "hash", Description: "Address of symbol hash table"}},
{r: [2]uint64{DT_STRTAB, DT_STRTAB}, dUn: dUnPtr, s: scalar.S{Sym: "strtab", Description: "Address of string table"}},
{r: [2]uint64{DT_SYMTAB, DT_SYMTAB}, dUn: dUnPtr, s: scalar.S{Sym: "symtab", Description: "Address of symbol table"}},
{r: [2]uint64{DT_RELA, DT_RELA}, dUn: dUnPtr, s: scalar.S{Sym: "rela", Description: "Address of Rela relocation table"}},
{r: [2]uint64{DT_RELASZ, DT_RELASZ}, dUn: dUnVal, s: scalar.S{Sym: "relasz", Description: "Size in bytes of the Rela relocation table"}},
{r: [2]uint64{DT_RELAENT, DT_RELAENT}, dUn: dUnVal, s: scalar.S{Sym: "relaent", Description: "Size in bytes of a Rela relocation table entry"}},
{r: [2]uint64{DT_STRSZ, DT_STRSZ}, dUn: dUnVal, s: scalar.S{Sym: "strsz", Description: "Size in bytes of string table"}},
{r: [2]uint64{DT_SYMENT, DT_SYMENT}, dUn: dUnVal, s: scalar.S{Sym: "syment", Description: "Size in bytes of a symbol table entry"}},
{r: [2]uint64{DT_INIT, DT_INIT}, dUn: dUnPtr, s: scalar.S{Sym: "init", Description: "Address of the initialization function"}},
{r: [2]uint64{DT_FINI, DT_FINI}, dUn: dUnPtr, s: scalar.S{Sym: "fini", Description: "Address of the termination function"}},
{r: [2]uint64{DT_SONAME, DT_SONAME}, dUn: dUnVal, s: scalar.S{Sym: "soname", Description: "String table offset to name of shared object"}},
{r: [2]uint64{DT_RPATH, DT_RPATH}, dUn: dUnVal, s: scalar.S{Sym: "rpath", Description: "String table offset to library search path (deprecated)"}},
{r: [2]uint64{DT_SYMBOLIC, DT_SYMBOLIC}, dUn: dUnIgnored, s: scalar.S{Sym: "symbolic", Description: "Alert linker to search this shared object before the executable for symbols DT_REL Address of Rel relocation table"}},
{r: [2]uint64{DT_REL, DT_REL}, dUn: dUnPtr, s: scalar.S{Sym: "rel", Description: ""}},
{r: [2]uint64{DT_RELSZ, DT_RELSZ}, dUn: dUnVal, s: scalar.S{Sym: "relsz", Description: "Size in bytes of Rel relocation table"}},
{r: [2]uint64{DT_RELENT, DT_RELENT}, dUn: dUnVal, s: scalar.S{Sym: "relent", Description: "Size in bytes of a Rel table entry"}},
{r: [2]uint64{DT_PLTREL, DT_PLTREL}, dUn: dUnVal, s: scalar.S{Sym: "pltrel", Description: "Type of relocation entry to which the PLT refers (Rela or Rel)"}},
{r: [2]uint64{DT_DEBUG, DT_DEBUG}, dUn: dUnPtr, s: scalar.S{Sym: "debug", Description: "Undefined use for debugging"}},
{r: [2]uint64{DT_TEXTREL, DT_TEXTREL}, dUn: dUnIgnored, s: scalar.S{Sym: "textrel", Description: "Absence of this entry indicates that no relocation entries should apply to a nonwritable segment"}},
{r: [2]uint64{DT_JMPREL, DT_JMPREL}, dUn: dUnPtr, s: scalar.S{Sym: "jmprel", Description: "Address of relocation entries associated solely with the PLT"}},
{r: [2]uint64{DT_BIND_NOW, DT_BIND_NOW}, dUn: dUnIgnored, s: scalar.S{Sym: "bind_now", Description: "Instruct dynamic linker to process all relocations before transferring control to the executable"}},
{r: [2]uint64{DT_INIT_ARRAY, DT_INIT_ARRAY}, dUn: dUnPtr, s: scalar.S{Sym: "init_array", Description: "Address of the array of pointers to initialization functions"}},
{r: [2]uint64{DT_FINI_ARRAY, DT_FINI_ARRAY}, dUn: dUnPtr, s: scalar.S{Sym: "fini_array", Description: "Address of the array of pointers to termination functions"}},
{r: [2]uint64{DT_INIT_ARRAYSZ, DT_INIT_ARRAYSZ}, dUn: dUnVal, s: scalar.S{Sym: "init_arraysz", Description: "Size in bytes of the array of initialization functions"}},
{r: [2]uint64{DT_FINI_ARRAYSZ, DT_FINI_ARRAYSZ}, dUn: dUnVal, s: scalar.S{Sym: "fini_arraysz", Description: "Size in bytes of the array of termination functions "}},
{r: [2]uint64{DT_RUNPATH, DT_RUNPATH}, dUn: dUnVal, s: scalar.S{Sym: "runpath", Description: "String table offset to library search path"}},
{r: [2]uint64{DT_FLAGS, DT_FLAGS}, dUn: dUnVal, s: scalar.S{Sym: "flags", Description: "Flag values specific to the object being loaded"}}, // TODO: flag ma}},
{r: [2]uint64{DT_ENCODING, DT_ENCODING}, dUn: dUnUnspecified, s: scalar.S{Sym: "encoding", Description: ""}}, // or DT_PREINIT_ARRAY }},
{r: [2]uint64{DT_PREINIT_ARRAYSZ, DT_PREINIT_ARRAYSZ}, dUn: dUnVal, s: scalar.S{Sym: "preinit_arraysz", Description: "Address of the array of pointers to pre-initialization functions"}},
{r: [2]uint64{DT_LOOS, DT_HIOS}, dUn: dUnUnspecified, s: scalar.S{Sym: "lo", Description: "Operating system-specific semantics"}},
{r: [2]uint64{DT_LOPROC, DT_HIPROC}, dUn: dUnUnspecified, s: scalar.S{Sym: "proc", Description: "Processor-specific semantics"}},
{r: [2]uint64{DT_NULL, DT_NULL}, dUn: dUnIgnored, s: scalar.Uint{Sym: "null", Description: "Marks end of dynamic section"}},
{r: [2]uint64{DT_NEEDED, DT_NEEDED}, dUn: dUnVal, s: scalar.Uint{Sym: "needed", Description: "String table offset to name of a needed library"}},
{r: [2]uint64{DT_PLTRELSZ, DT_PLTRELSZ}, dUn: dUnVal, s: scalar.Uint{Sym: "pltrelsz", Description: "Size in bytes of PLT relocation entries"}},
{r: [2]uint64{DT_PLTGOT, DT_PLTGOT}, dUn: dUnPtr, s: scalar.Uint{Sym: "pltgot", Description: "Address of PLT and/or GOT"}},
{r: [2]uint64{DT_HASH, DT_HASH}, dUn: dUnPtr, s: scalar.Uint{Sym: "hash", Description: "Address of symbol hash table"}},
{r: [2]uint64{DT_STRTAB, DT_STRTAB}, dUn: dUnPtr, s: scalar.Uint{Sym: "strtab", Description: "Address of string table"}},
{r: [2]uint64{DT_SYMTAB, DT_SYMTAB}, dUn: dUnPtr, s: scalar.Uint{Sym: "symtab", Description: "Address of symbol table"}},
{r: [2]uint64{DT_RELA, DT_RELA}, dUn: dUnPtr, s: scalar.Uint{Sym: "rela", Description: "Address of Rela relocation table"}},
{r: [2]uint64{DT_RELASZ, DT_RELASZ}, dUn: dUnVal, s: scalar.Uint{Sym: "relasz", Description: "Size in bytes of the Rela relocation table"}},
{r: [2]uint64{DT_RELAENT, DT_RELAENT}, dUn: dUnVal, s: scalar.Uint{Sym: "relaent", Description: "Size in bytes of a Rela relocation table entry"}},
{r: [2]uint64{DT_STRSZ, DT_STRSZ}, dUn: dUnVal, s: scalar.Uint{Sym: "strsz", Description: "Size in bytes of string table"}},
{r: [2]uint64{DT_SYMENT, DT_SYMENT}, dUn: dUnVal, s: scalar.Uint{Sym: "syment", Description: "Size in bytes of a symbol table entry"}},
{r: [2]uint64{DT_INIT, DT_INIT}, dUn: dUnPtr, s: scalar.Uint{Sym: "init", Description: "Address of the initialization function"}},
{r: [2]uint64{DT_FINI, DT_FINI}, dUn: dUnPtr, s: scalar.Uint{Sym: "fini", Description: "Address of the termination function"}},
{r: [2]uint64{DT_SONAME, DT_SONAME}, dUn: dUnVal, s: scalar.Uint{Sym: "soname", Description: "String table offset to name of shared object"}},
{r: [2]uint64{DT_RPATH, DT_RPATH}, dUn: dUnVal, s: scalar.Uint{Sym: "rpath", Description: "String table offset to library search path (deprecated)"}},
{r: [2]uint64{DT_SYMBOLIC, DT_SYMBOLIC}, dUn: dUnIgnored, s: scalar.Uint{Sym: "symbolic", Description: "Alert linker to search this shared object before the executable for symbols DT_REL Address of Rel relocation table"}},
{r: [2]uint64{DT_REL, DT_REL}, dUn: dUnPtr, s: scalar.Uint{Sym: "rel", Description: ""}},
{r: [2]uint64{DT_RELSZ, DT_RELSZ}, dUn: dUnVal, s: scalar.Uint{Sym: "relsz", Description: "Size in bytes of Rel relocation table"}},
{r: [2]uint64{DT_RELENT, DT_RELENT}, dUn: dUnVal, s: scalar.Uint{Sym: "relent", Description: "Size in bytes of a Rel table entry"}},
{r: [2]uint64{DT_PLTREL, DT_PLTREL}, dUn: dUnVal, s: scalar.Uint{Sym: "pltrel", Description: "Type of relocation entry to which the PLT refers (Rela or Rel)"}},
{r: [2]uint64{DT_DEBUG, DT_DEBUG}, dUn: dUnPtr, s: scalar.Uint{Sym: "debug", Description: "Undefined use for debugging"}},
{r: [2]uint64{DT_TEXTREL, DT_TEXTREL}, dUn: dUnIgnored, s: scalar.Uint{Sym: "textrel", Description: "Absence of this entry indicates that no relocation entries should apply to a nonwritable segment"}},
{r: [2]uint64{DT_JMPREL, DT_JMPREL}, dUn: dUnPtr, s: scalar.Uint{Sym: "jmprel", Description: "Address of relocation entries associated solely with the PLT"}},
{r: [2]uint64{DT_BIND_NOW, DT_BIND_NOW}, dUn: dUnIgnored, s: scalar.Uint{Sym: "bind_now", Description: "Instruct dynamic linker to process all relocations before transferring control to the executable"}},
{r: [2]uint64{DT_INIT_ARRAY, DT_INIT_ARRAY}, dUn: dUnPtr, s: scalar.Uint{Sym: "init_array", Description: "Address of the array of pointers to initialization functions"}},
{r: [2]uint64{DT_FINI_ARRAY, DT_FINI_ARRAY}, dUn: dUnPtr, s: scalar.Uint{Sym: "fini_array", Description: "Address of the array of pointers to termination functions"}},
{r: [2]uint64{DT_INIT_ARRAYSZ, DT_INIT_ARRAYSZ}, dUn: dUnVal, s: scalar.Uint{Sym: "init_arraysz", Description: "Size in bytes of the array of initialization functions"}},
{r: [2]uint64{DT_FINI_ARRAYSZ, DT_FINI_ARRAYSZ}, dUn: dUnVal, s: scalar.Uint{Sym: "fini_arraysz", Description: "Size in bytes of the array of termination functions "}},
{r: [2]uint64{DT_RUNPATH, DT_RUNPATH}, dUn: dUnVal, s: scalar.Uint{Sym: "runpath", Description: "String table offset to library search path"}},
{r: [2]uint64{DT_FLAGS, DT_FLAGS}, dUn: dUnVal, s: scalar.Uint{Sym: "flags", Description: "Flag values specific to the object being loaded"}}, // TODO: flag ma}},
{r: [2]uint64{DT_ENCODING, DT_ENCODING}, dUn: dUnUnspecified, s: scalar.Uint{Sym: "encoding", Description: ""}}, // or DT_PREINIT_ARRAY }},
{r: [2]uint64{DT_PREINIT_ARRAYSZ, DT_PREINIT_ARRAYSZ}, dUn: dUnVal, s: scalar.Uint{Sym: "preinit_arraysz", Description: "Address of the array of pointers to pre-initialization functions"}},
{r: [2]uint64{DT_LOOS, DT_HIOS}, dUn: dUnUnspecified, s: scalar.Uint{Sym: "lo", Description: "Operating system-specific semantics"}},
{r: [2]uint64{DT_LOPROC, DT_HIPROC}, dUn: dUnUnspecified, s: scalar.Uint{Sym: "proc", Description: "Processor-specific semantics"}},
}
var symbolTableBindingMap = scalar.UToSymStr{
var symbolTableBindingMap = scalar.UintMapSymStr{
0: "local",
1: "global",
2: "weak",
@ -474,7 +474,7 @@ var symbolTableBindingMap = scalar.UToSymStr{
15: "proc",
}
var symbolTableTypeMap = scalar.UToSymStr{
var symbolTableTypeMap = scalar.UintMapSymStr{
0: "notype",
1: "object",
2: "func",
@ -489,7 +489,7 @@ var symbolTableTypeMap = scalar.UToSymStr{
15: "proc",
}
var symbolTableVisibilityMap = scalar.UToSymStr{
var symbolTableVisibilityMap = scalar.UintMapSymStr{
0: "default",
1: "internal",
2: "hidden",
@ -509,8 +509,8 @@ func strIndexNull(idx int, s string) string {
type strTable string
func (m strTable) MapScalar(s scalar.S) (scalar.S, error) {
s.Sym = strIndexNull(int(s.ActualU()), string(m))
func (m strTable) MapUint(s scalar.Uint) (scalar.Uint, error) {
s.Sym = strIndexNull(int(s.Actual), string(m))
return s, nil
}
@ -819,8 +819,8 @@ func elfDecodeHeader(d *decode.D, ec *elfContext) {
d.Fatalf("unknown endian %d", endian)
}
typ := d.FieldU16("type", typeNames, scalar.ActualHex)
machine := d.FieldU16("machine", machineNames, scalar.ActualHex)
typ := d.FieldU16("type", typeNames, scalar.UintHex)
machine := d.FieldU16("machine", machineNames, scalar.UintHex)
d.FieldU32("version")
d.FieldU("entry", archBits)
phOff := d.FieldU("phoff", archBits)
@ -871,9 +871,9 @@ func elfDecodeProgramHeader(d *decode.D, ec elfContext) {
switch ec.archBits {
case 32:
typ = d.FieldU32("type", phTypeNames)
offset = d.FieldU("offset", ec.archBits, scalar.ActualHex)
d.FieldU("vaddr", ec.archBits, scalar.ActualHex)
d.FieldU("paddr", ec.archBits, scalar.ActualHex)
offset = d.FieldU("offset", ec.archBits, scalar.UintHex)
d.FieldU("vaddr", ec.archBits, scalar.UintHex)
d.FieldU("paddr", ec.archBits, scalar.UintHex)
size = d.FieldU32("filesz")
d.FieldU32("memsz")
pFlags(d)
@ -881,9 +881,9 @@ func elfDecodeProgramHeader(d *decode.D, ec elfContext) {
case 64:
typ = d.FieldU32("type", phTypeNames)
pFlags(d)
offset = d.FieldU("offset", ec.archBits, scalar.ActualHex)
d.FieldU("vaddr", ec.archBits, scalar.ActualHex)
d.FieldU("paddr", ec.archBits, scalar.ActualHex)
offset = d.FieldU("offset", ec.archBits, scalar.UintHex)
d.FieldU("vaddr", ec.archBits, scalar.UintHex)
d.FieldU("paddr", ec.archBits, scalar.UintHex)
size = d.FieldU64("filesz")
d.FieldU64("memsz")
d.FieldU64("align")
@ -900,9 +900,9 @@ func elfDecodeProgramHeader(d *decode.D, ec elfContext) {
nameSz := d.FieldU32("n_namesz")
descSz := d.FieldU32("n_descsz")
if ec.typ == ET_CORE {
d.FieldU32("n_type", coreNoteNames, scalar.ActualHex)
d.FieldU32("n_type", coreNoteNames, scalar.UintHex)
} else {
d.FieldU32("n_type", scalar.ActualHex)
d.FieldU32("n_type", scalar.UintHex)
}
d.FieldUTF8NullFixedLen("name", int(nameSz))
nameAlign := d.AlignBits(4 * 8)
@ -935,14 +935,14 @@ func elfDecodeProgramHeaders(d *decode.D, ec elfContext) {
func elfDecodeDynamicTag(d *decode.D, ec elfContext, dc dynamicContext) {
dtTag := d.FieldU("tag", ec.archBits, dynamicTableMap)
name := "unspecified"
dfMapper := scalar.ActualHex
dfMapper := scalar.UintHex
if de, ok := dynamicTableMap.lookup(dtTag); ok {
switch de.dUn {
case dUnIgnored:
name = "ignored"
case dUnVal:
name = "val"
dfMapper = scalar.ActualDec
dfMapper = scalar.UintDec
case dUnPtr:
name = "ptr"
}
@ -954,7 +954,7 @@ func elfDecodeDynamicTag(d *decode.D, ec elfContext, dc dynamicContext) {
case DT_HASH:
v := d.FieldU(name, ec.archBits, dfMapper)
if i, ok := ec.sectionIndexByAddr(int64(v) * 8); ok {
d.FieldValueU("section_index", uint64(i))
d.FieldValueUint("section_index", uint64(i))
}
case DT_SYMTAB,
DT_STRTAB,
@ -964,7 +964,7 @@ func elfDecodeDynamicTag(d *decode.D, ec elfContext, dc dynamicContext) {
DT_FINI:
v := d.FieldU(name, ec.archBits, dfMapper)
if i, ok := ec.sectionIndexByAddr(int64(v) * 8); ok {
d.FieldValueU("section_index", uint64(i))
d.FieldValueUint("section_index", uint64(i))
}
default:
d.FieldU(name, ec.archBits, dfMapper)
@ -1033,21 +1033,21 @@ func elfDecodeSectionHeader(d *decode.D, ec elfContext, sh sectionHeader) {
switch ec.archBits {
case 32:
d.FieldU32("name", strTable(ec.strTabMap[STRTAB_SHSTRTAB]))
typ = d.FieldU32("type", sectionHeaderTypeMap, scalar.ActualHex)
typ = d.FieldU32("type", sectionHeaderTypeMap, scalar.UintHex)
shFlags(d, ec.archBits)
d.FieldU("addr", ec.archBits, scalar.ActualHex)
d.FieldU("addr", ec.archBits, scalar.UintHex)
offset = int64(d.FieldU("offset", ec.archBits)) * 8
size = int64(d.FieldU32("size", scalar.ActualHex) * 8)
size = int64(d.FieldU32("size", scalar.UintHex) * 8)
d.FieldU32("link")
d.FieldU32("info")
d.FieldU32("addralign")
entSize = int64(d.FieldU32("entsize") * 8)
case 64:
d.FieldU32("name", strTable(ec.strTabMap[STRTAB_SHSTRTAB]))
typ = d.FieldU32("type", sectionHeaderTypeMap, scalar.ActualHex)
typ = d.FieldU32("type", sectionHeaderTypeMap, scalar.UintHex)
shFlags(d, ec.archBits)
d.FieldU("addr", ec.archBits, scalar.ActualHex)
offset = int64(d.FieldU("offset", ec.archBits, scalar.ActualHex) * 8)
d.FieldU("addr", ec.archBits, scalar.UintHex)
offset = int64(d.FieldU("offset", ec.archBits, scalar.UintHex) * 8)
size = int64(d.FieldU64("size") * 8)
d.FieldU32("link")
d.FieldU32("info")

View File

@ -33,7 +33,7 @@ func init() {
}
func flacDecode(d *decode.D, _ any) any {
d.FieldUTF8("magic", 4, d.AssertStr("fLaC"))
d.FieldUTF8("magic", 4, d.StrAssert("fLaC"))
var streamInfo format.FlacStreamInfo
var flacFrameIn format.FlacFrameIn
@ -78,8 +78,8 @@ func flacDecode(d *decode.D, _ any) any {
})
md5CalcValue := d.FieldRootBitBuf("md5_calculated", bitio.NewBitReader(md5Samples.Sum(nil), -1))
_ = md5CalcValue.TryScalarFn(d.ValidateBitBuf(streamInfo.MD5), scalar.RawHex)
d.FieldValueU("decoded_samples", framesNDecodedSamples)
_ = md5CalcValue.TryBitBufScalarFn(d.ValidateBitBuf(streamInfo.MD5), scalar.RawHex)
d.FieldValueUint("decoded_samples", framesNDecodedSamples)
return nil
}

View File

@ -36,7 +36,7 @@ const (
BlockingStrategyVariable = 1
)
var BlockingStrategyNames = scalar.UToSymStr{
var BlockingStrategyNames = scalar.UintMapSymStr{
BlockingStrategyFixed: "fixed",
BlockingStrategyVariable: "variable",
}
@ -71,9 +71,9 @@ const (
ResidualCodingMethodRice2 = 0b01
)
var ResidualCodingMethodMap = scalar.UToScalar{
ResidualCodingMethodRice: scalar.S{Sym: uint64(4), Description: "rice"},
ResidualCodingMethodRice2: scalar.S{Sym: uint64(5), Description: "rice2"},
var ResidualCodingMethodMap = scalar.UintMap{
ResidualCodingMethodRice: scalar.Uint{Sym: uint64(4), Description: "rice"},
ResidualCodingMethodRice2: scalar.Uint{Sym: uint64(5), Description: "rice2"},
}
// TODO: generic enough?
@ -115,12 +115,12 @@ func frameDecode(d *decode.D, in any) any {
d.FieldStruct("header", func(d *decode.D) {
// <14> 11111111111110
d.FieldU14("sync", d.AssertU(0b11111111111110), scalar.ActualBin)
d.FieldU14("sync", d.UintAssert(0b11111111111110), scalar.UintBin)
// <1> Reserved
// 0 : mandatory value
// 1 : reserved for future use
d.FieldU1("reserved0", d.AssertU(0))
d.FieldU1("reserved0", d.UintAssert(0))
// <1> Blocking strategy:
// 0 : fixed-blocksize stream; frame header encodes the frame number
@ -134,7 +134,7 @@ func frameDecode(d *decode.D, in any) any {
// 0110 : get 8 bit (blocksize-1) from end of header
// 0111 : get 16 bit (blocksize-1) from end of header
// 1000-1111 : 256 * (2^(n-8)) samples, i.e. 256/512/1024/2048/4096/8192/16384/32768
var blockSizeMap = scalar.UToScalar{
var blockSizeMap = scalar.UintMap{
0b0000: {Description: "reserved"},
0b0001: {Sym: uint64(192)},
0b0010: {Sym: uint64(576)},
@ -152,9 +152,9 @@ func frameDecode(d *decode.D, in any) any {
0b1110: {Sym: uint64(16384)},
0b1111: {Sym: uint64(32768)},
}
blockSizeS := d.FieldScalarU4("block_size", blockSizeMap, scalar.ActualBin)
blockSizeS := d.FieldScalarU4("block_size", blockSizeMap, scalar.UintBin)
if blockSizeS.Sym != nil {
blockSize = int(blockSizeS.SymU())
blockSize = int(blockSizeS.SymUint())
}
// <4> Sample rate:
@ -174,7 +174,7 @@ func frameDecode(d *decode.D, in any) any {
// 1101 : get 16 bit sample rate (in Hz) from end of header
// 1110 : get 16 bit sample rate (in tens of Hz) from end of header
// 1111 : invalid, to prevent sync-fooling string of 1s
var sampleRateMap = scalar.UToScalar{
var sampleRateMap = scalar.UintMap{
0b0000: {Description: "from streaminfo"},
0b0001: {Sym: uint64(88200)},
0b0010: {Sym: uint64(176400)},
@ -192,7 +192,7 @@ func frameDecode(d *decode.D, in any) any {
0b1110: {Description: "end of header (16 bit*10)"},
0b1111: {Description: "invalid"},
}
sampleRateS := d.FieldScalarU4("sample_rate", sampleRateMap, scalar.ActualBin)
sampleRateS := d.FieldScalarU4("sample_rate", sampleRateMap, scalar.UintBin)
// <4> Channel assignment
// 0000-0111 : (number of independent channels)-1. Where defined, the channel order follows SMPTE/ITU-R recommendations. The assignments are as follows:
@ -209,7 +209,7 @@ func frameDecode(d *decode.D, in any) any {
// 1010 : mid/side stereo: channel 0 is the mid(average) channel, channel 1 is the side(difference) channel
// 1011-1111 : reserved
// TODO: extract to tables and cleanup
var channelAssignmentMap = scalar.UToScalar{
var channelAssignmentMap = scalar.UintMap{
0: {Sym: uint64(1), Description: "mono"},
1: {Sym: uint64(2), Description: "lr"},
2: {Sym: uint64(3), Description: "lrc"},
@ -226,13 +226,13 @@ func frameDecode(d *decode.D, in any) any {
0b1101: {Sym: nil, Description: "reserved"},
0b1111: {Sym: nil, Description: "reserved"},
}
channelAssignmentS := d.FieldScalarU4("channel_assignment", channelAssignmentMap)
if channelAssignmentS.Sym == nil {
channelAssignmentUint := d.FieldScalarU4("channel_assignment", channelAssignmentMap)
if channelAssignmentUint.Sym == nil {
d.Fatalf("unknown number of channels")
}
channelAssignment = channelAssignmentS.ActualU()
channels = int(channelAssignmentS.SymU())
switch channelAssignmentS.ActualU() {
channelAssignment = channelAssignmentUint.Actual
channels = int(channelAssignmentUint.SymUint())
switch channelAssignmentUint.Actual {
case ChannelLeftSide:
sideChannelIndex = 1
case ChannelSideRight:
@ -241,7 +241,7 @@ func frameDecode(d *decode.D, in any) any {
sideChannelIndex = 1
}
if sideChannelIndex != -1 {
d.FieldValueU("side_channel_index", uint64(sideChannelIndex))
d.FieldValueUint("side_channel_index", uint64(sideChannelIndex))
}
// <3> Sample size in bits:
@ -253,7 +253,7 @@ func frameDecode(d *decode.D, in any) any {
// 101 : 20 bits per sample
// 110 : 24 bits per sample
// 111 : reserved
var sampleSizeMap = scalar.UToScalar{
var sampleSizeMap = scalar.UintMap{
0b000: {Description: "from streaminfo"},
0b001: {Sym: uint64(8)},
0b010: {Sym: uint64(12)},
@ -263,20 +263,20 @@ func frameDecode(d *decode.D, in any) any {
0b110: {Sym: uint64(24)},
0b111: {Sym: uint64(32)},
}
sampleSizeS := d.FieldScalarU3("sample_size", sampleSizeMap, scalar.ActualBin)
switch sampleSizeS.ActualU() {
sampleSizeS := d.FieldScalarU3("sample_size", sampleSizeMap, scalar.UintBin)
switch sampleSizeS.Actual {
case SampleSizeStreaminfo:
sampleSize = ffi.BitsPerSample
default:
if sampleSizeS.Sym != nil {
sampleSize = int(sampleSizeS.SymU())
sampleSize = int(sampleSizeS.SymUint())
}
}
// <1> Reserved:
// 0 : mandatory value
// 1 : reserved for future use
d.FieldU1("reserved1", d.AssertU(0))
d.FieldU1("reserved1", d.UintAssert(0))
d.FieldStruct("end_of_header", func(d *decode.D) {
// if(variable blocksize)
@ -287,20 +287,20 @@ func frameDecode(d *decode.D, in any) any {
// 1 : variable-blocksize stream; frame header encodes the sample number
switch blockingStrategy {
case BlockingStrategyVariable:
d.FieldUFn("sample_number", utf8Uint)
d.FieldUintFn("sample_number", utf8Uint)
case BlockingStrategyFixed:
d.FieldUFn("frame_number", utf8Uint)
d.FieldUintFn("frame_number", utf8Uint)
}
// if(blocksize bits == 011x)
// 8/16 bit (blocksize-1)
// 0110 : get 8 bit (blocksize-1) from end of header
// 0111 : get 16 bit (blocksize-1) from end of header
switch blockSizeS.ActualU() {
switch blockSizeS.Actual {
case BlockSizeEndOfHeader8:
blockSize = int(d.FieldU8("block_size", scalar.ActualUAdd(1)))
blockSize = int(d.FieldU8("block_size", scalar.UintActualAdd(1)))
case BlockSizeEndOfHeader16:
blockSize = int(d.FieldU16("block_size", scalar.ActualUAdd(1)))
blockSize = int(d.FieldU16("block_size", scalar.UintActualAdd(1)))
}
// if(sample rate bits == 11xx)
@ -308,19 +308,19 @@ func frameDecode(d *decode.D, in any) any {
// 1100 : get 8 bit sample rate (in kHz) from end of header
// 1101 : get 16 bit sample rate (in Hz) from end of header
// 1110 : get 16 bit sample rate (in tens of Hz) from end of header
switch sampleRateS.ActualU() {
switch sampleRateS.Actual {
case SampeleRateEndOfHeader8:
d.FieldUFn("sample_rate", func(d *decode.D) uint64 { return d.U8() * 1000 })
d.FieldUintFn("sample_rate", func(d *decode.D) uint64 { return d.U8() * 1000 })
case SampeleRateEndOfHeader16:
d.FieldU16("sample_rate")
case SampeleRateEndOfHeader160:
d.FieldUFn("sample_rate", func(d *decode.D) uint64 { return d.U16() * 10 })
d.FieldUintFn("sample_rate", func(d *decode.D) uint64 { return d.U16() * 10 })
}
})
headerCRC := &checksum.CRC{Bits: 8, Table: checksum.ATM8Table}
d.CopyBits(headerCRC, d.BitBufRange(frameStart, d.Pos()-frameStart))
d.FieldU8("crc", d.ValidateUBytes(headerCRC.Sum(nil)), scalar.ActualHex)
d.FieldU8("crc", d.UintValidateBytes(headerCRC.Sum(nil)), scalar.UintHex)
})
var channelSamples [][]int64
@ -328,7 +328,7 @@ func frameDecode(d *decode.D, in any) any {
for channelIndex := 0; channelIndex < channels; channelIndex++ {
d.FieldStruct("subframe", func(d *decode.D) {
// <1> Zero bit padding, to prevent sync-fooling string of 1s
d.FieldU1("zero_bit", d.AssertU(0))
d.FieldU1("zero_bit", d.UintAssert(0))
// <6> Subframe type:
// 000000 : SUBFRAME_CONSTANT
@ -339,25 +339,25 @@ func frameDecode(d *decode.D, in any) any {
// 01xxxx : reserved
// 1xxxxx : SUBFRAME_LPC, xxxxx=order-1
lpcOrder := -1
var subframeTypeRangeMap = scalar.URangeToScalar{
{Range: [2]uint64{0b000000, 0b000000}, S: scalar.S{Sym: SubframeConstant}},
{Range: [2]uint64{0b000001, 0b000001}, S: scalar.S{Sym: SubframeVerbatim}},
{Range: [2]uint64{0b000010, 0b000011}, S: scalar.S{Sym: SubframeReserved}},
{Range: [2]uint64{0b000100, 0b000111}, S: scalar.S{Sym: SubframeReserved}},
{Range: [2]uint64{0b001000, 0b001100}, S: scalar.S{Sym: SubframeFixed}},
{Range: [2]uint64{0b001101, 0b001111}, S: scalar.S{Sym: SubframeReserved}},
{Range: [2]uint64{0b010000, 0b011111}, S: scalar.S{Sym: SubframeReserved}},
{Range: [2]uint64{0b100000, 0b111111}, S: scalar.S{Sym: SubframeLPC}},
var subframeTypeRangeMap = scalar.UintRangeToScalar{
{Range: [2]uint64{0b000000, 0b000000}, S: scalar.Uint{Sym: SubframeConstant}},
{Range: [2]uint64{0b000001, 0b000001}, S: scalar.Uint{Sym: SubframeVerbatim}},
{Range: [2]uint64{0b000010, 0b000011}, S: scalar.Uint{Sym: SubframeReserved}},
{Range: [2]uint64{0b000100, 0b000111}, S: scalar.Uint{Sym: SubframeReserved}},
{Range: [2]uint64{0b001000, 0b001100}, S: scalar.Uint{Sym: SubframeFixed}},
{Range: [2]uint64{0b001101, 0b001111}, S: scalar.Uint{Sym: SubframeReserved}},
{Range: [2]uint64{0b010000, 0b011111}, S: scalar.Uint{Sym: SubframeReserved}},
{Range: [2]uint64{0b100000, 0b111111}, S: scalar.Uint{Sym: SubframeLPC}},
}
subframeTypeS := d.FieldScalarU6("subframe_type", subframeTypeRangeMap, scalar.ActualBin)
switch subframeTypeS.SymStr() {
subframeTypeUint := d.FieldScalarU6("subframe_type", subframeTypeRangeMap, scalar.UintBin)
switch subframeTypeUint.SymStr() {
case SubframeFixed:
lpcOrder = int(subframeTypeS.ActualU() & 0b111)
lpcOrder = int(subframeTypeUint.Actual & 0b111)
case SubframeLPC:
lpcOrder = int((subframeTypeS.ActualU() & 0b11111) + 1)
lpcOrder = int((subframeTypeUint.Actual & 0b11111) + 1)
}
if lpcOrder != -1 {
d.FieldValueU("lpc_order", uint64(lpcOrder))
d.FieldValueUint("lpc_order", uint64(lpcOrder))
}
// 'Wasted bits-per-sample' flag:
@ -366,7 +366,7 @@ func frameDecode(d *decode.D, in any) any {
wastedBitsFlag := d.FieldU1("wasted_bits_flag")
var wastedBitsK int
if wastedBitsFlag != 0 {
wastedBitsK = int(d.FieldUnary("wasted_bits_k", 0, scalar.ActualUAdd(1)))
wastedBitsK = int(d.FieldUnary("wasted_bits_k", 0, scalar.UintActualAdd(1)))
}
subframeSampleSize := sampleSize - wastedBitsK
@ -378,7 +378,7 @@ func frameDecode(d *decode.D, in any) any {
if channelIndex == sideChannelIndex {
subframeSampleSize++
}
d.FieldValueU("subframe_sample_size", uint64(subframeSampleSize))
d.FieldValueUint("subframe_sample_size", uint64(subframeSampleSize))
decodeWarmupSamples := func(samples []int64, n int, sampleSize int) {
if len(samples) < n {
@ -402,9 +402,9 @@ func frameDecode(d *decode.D, in any) any {
// 10-11 : reserved
var riceEscape int
var riceBits int
residualCodingMethod := d.FieldU2("residual_coding_method", scalar.UToScalar{
0b00: scalar.S{Sym: uint64(4), Description: "rice"},
0b01: scalar.S{Sym: uint64(5), Description: "rice2"},
residualCodingMethod := d.FieldU2("residual_coding_method", scalar.UintMap{
0b00: scalar.Uint{Sym: uint64(4), Description: "rice"},
0b01: scalar.Uint{Sym: uint64(5), Description: "rice2"},
})
switch residualCodingMethod {
case ResidualCodingMethodRice:
@ -419,7 +419,7 @@ func frameDecode(d *decode.D, in any) any {
partitionOrder := int(d.FieldU4("partition_order"))
// There will be 2^order partitions.
ricePartitions := 1 << partitionOrder
d.FieldValueU("rice_partitions", uint64(ricePartitions))
d.FieldValueUint("rice_partitions", uint64(ricePartitions))
d.FieldArray("partitions", func(d *decode.D) {
for i := 0; i < ricePartitions; i++ {
@ -445,7 +445,7 @@ func frameDecode(d *decode.D, in any) any {
count = (blockSize / ricePartitions) - lpcOrder
}
d.FieldValueU("count", uint64(count))
d.FieldValueUint("count", uint64(count))
riceParameter := int(d.FieldU("rice_parameter", riceBits))
@ -504,7 +504,7 @@ func frameDecode(d *decode.D, in any) any {
}
samples := make([]int64, blockSize)
switch subframeTypeS.SymStr() {
switch subframeTypeUint.SymStr() {
case SubframeConstant:
// <n> Unencoded constant value of the subblock, n = frame's bits-per-sample.
v := d.FieldS("value", subframeSampleSize)
@ -540,7 +540,7 @@ func frameDecode(d *decode.D, in any) any {
// <n> Unencoded warm-up samples (n = frame's bits-per-sample * lpc order).
decodeWarmupSamples(samples, lpcOrder, subframeSampleSize)
// <4> (Quantized linear predictor coefficients' precision in bits)-1 (1111 = invalid).
precision := int(d.FieldU4("precision", scalar.ActualUAdd(1)))
precision := int(d.FieldU4("precision", scalar.UintActualAdd(1)))
// <5> Quantized linear predictor coefficient shift needed in bits (NOTE: this number is signed two's-complement).
shift := d.FieldS5("shift")
if shift < 0 {
@ -570,7 +570,7 @@ func frameDecode(d *decode.D, in any) any {
})
// <?> Zero-padding to byte alignment.
d.FieldU("byte_align", d.ByteAlignBits(), d.AssertU(0))
d.FieldU("byte_align", d.ByteAlignBits(), d.UintAssert(0))
// <16> CRC-16 (polynomial = x^16 + x^15 + x^2 + x^0, initialized with 0) of everything before the crc, back to and including the frame header sync code
footerCRC := &checksum.CRC{Bits: 16, Table: checksum.ANSI16Table}
d.CopyBits(footerCRC, d.BitBufRange(frameStart, d.Pos()-frameStart))

View File

@ -39,7 +39,7 @@ const (
MetadataBlockPicture = 6
)
var metadataBlockNames = scalar.UToSymStr{
var metadataBlockNames = scalar.UintMapSymStr{
MetadataBlockStreaminfo: "streaminfo",
MetadataBlockPadding: "padding",
MetadataBlockApplication: "application",
@ -74,7 +74,7 @@ func metadatablockDecode(d *decode.D, _ any) any {
d.FieldArray("seekpoints", func(d *decode.D) {
for i := uint64(0); i < seektableCount; i++ {
d.FieldStruct("seekpoint", func(d *decode.D) {
d.FieldU64("sample_number", scalar.UToScalar{
d.FieldU64("sample_number", scalar.UintMap{
0xffff_ffff_ffff_ffff: {Description: "Placeholder"},
})
d.FieldU64("offset")

View File

@ -9,7 +9,7 @@ import (
var images decode.Group
var pictureTypeNames = scalar.UToSymStr{
var pictureTypeNames = scalar.UintMapSymStr{
0: "Other",
1: "32x32_pixels",
2: "other_file_icon",

View File

@ -22,9 +22,9 @@ func streaminfoDecode(d *decode.D, _ any) any {
d.FieldU24("maximum_frame_size")
sampleRate := d.FieldU("sample_rate", 20)
// <3> (number of channels)-1. FLAC supports from 1 to 8 channels
d.FieldU3("channels", scalar.ActualUAdd(1))
d.FieldU3("channels", scalar.UintActualAdd(1))
// <5> (bits per sample)-1. FLAC supports from 4 to 32 bits per sample. Currently the reference encoder and decoders only support up to 24 bits per sample.
bitsPerSample := d.FieldU5("bits_per_sample", scalar.ActualUAdd(1))
bitsPerSample := d.FieldU5("bits_per_sample", scalar.UintActualAdd(1))
totalSamplesInStream := d.FieldU("total_samples_in_stream", 36)
md5BR := d.FieldRawLen("md5", 16*8, scalar.RawHex)
md5b := d.ReadAllBits(md5BR)

View File

@ -27,7 +27,7 @@ const (
scriptDataObject = 18
)
var tagTypeNames = scalar.UToSymStr{
var tagTypeNames = scalar.UintMapSymStr{
audioData: "audioData",
videoData: "videoData",
scriptDataObject: "scriptDataObject",
@ -49,7 +49,7 @@ const (
typeLongString = 12
)
var typeNames = scalar.UToSymStr{
var typeNames = scalar.UintMapSymStr{
typeNumber: "Number",
typeBoolean: "Boolean",
typeString: "String",
@ -146,11 +146,11 @@ func flvDecode(d *decode.D, _ any) any {
})
}
d.FieldUTF8("signature", 3, d.AssertStr("FLV"))
d.FieldUTF8("signature", 3, d.StrAssert("FLV"))
d.FieldU8("version")
d.FieldU5("type_flags_reserved", d.AssertU(0))
d.FieldU5("type_flags_reserved", d.UintAssert(0))
d.FieldU1("type_flags_audio")
d.FieldU1("type_flags_reserved", d.AssertU(0))
d.FieldU1("type_flags_reserved", d.UintAssert(0))
d.FieldU1("type_flags_video")
dataOffset := d.FieldU32("data_offset")

View File

@ -32,7 +32,7 @@ const (
extensionApplication = 0xff
)
var extensionNames = scalar.UToSymStr{
var extensionNames = scalar.UintMapSymStr{
extensionPlainText: "PlainText",
extensionGraphicalControl: "GraphicalControl",
extensionComment: "Comment",
@ -54,14 +54,14 @@ func fieldColorMap(d *decode.D, name string, bitDepth int) {
func gifDecode(d *decode.D, _ any) any {
d.Endian = decode.LittleEndian
d.FieldUTF8("header", 6, d.AssertStr("GIF87a", "GIF89a"))
d.FieldUTF8("header", 6, d.StrAssert("GIF87a", "GIF89a"))
d.FieldU16("width")
d.FieldU16("height")
gcpFollows := d.FieldBool("gcp_follows")
d.FieldUFn("color_resolution", func(d *decode.D) uint64 { return d.U3() + 1 })
d.FieldUintFn("color_resolution", func(d *decode.D) uint64 { return d.U3() + 1 })
d.FieldU1("zero")
bitDepth := d.FieldUFn("bit_depth", func(d *decode.D) uint64 { return d.U3() + 1 })
bitDepth := d.FieldUintFn("bit_depth", func(d *decode.D) uint64 { return d.U3() + 1 })
d.FieldU8("black_color")
d.FieldU8("pixel_aspect_ratio")
@ -78,7 +78,7 @@ func gifDecode(d *decode.D, _ any) any {
case '!': /* "!" */
d.FieldStruct("extension_block", func(d *decode.D) {
d.FieldU8("introducer")
functionCode := d.FieldU8("function_code", extensionNames, scalar.ActualHex)
functionCode := d.FieldU8("function_code", extensionNames, scalar.UintHex)
dataBytes := &bytes.Buffer{}
@ -121,7 +121,7 @@ func gifDecode(d *decode.D, _ any) any {
localFollows := d.FieldBool("local_color_map_follows")
d.FieldBool("image_interlaced")
d.FieldU3("zero")
d.FieldUFn("bit_depth", func(d *decode.D) uint64 { return d.U3() + 1 })
d.FieldUintFn("bit_depth", func(d *decode.D) uint64 { return d.U3() + 1 })
d.FieldU8("code_size")
if localFollows {

View File

@ -32,11 +32,11 @@ func init() {
const delfateMethod = 8
var compressionMethodNames = scalar.UToSymStr{
var compressionMethodNames = scalar.UintMapSymStr{
delfateMethod: "deflate",
}
var osNames = scalar.UToSymStr{
var osNames = scalar.UintMapSymStr{
0: "fat",
1: "amiga",
2: "vms",
@ -53,7 +53,7 @@ var osNames = scalar.UToSymStr{
13: "acorn_riscos",
}
var deflateExtraFlagsNames = scalar.UToSymStr{
var deflateExtraFlagsNames = scalar.UintMapSymStr{
2: "slow",
4: "fast",
}
@ -75,7 +75,7 @@ func gzDecode(d *decode.D, _ any) any {
hasComment = d.FieldBool("comment")
d.FieldU3("reserved")
})
d.FieldU32("mtime", scalar.DescriptionUnixTimeFn(scalar.S.TryActualU, time.RFC3339))
d.FieldU32("mtime", scalar.UintActualUnixTime(time.RFC3339))
switch compressionMethod {
case delfateMethod:
d.FieldU8("extra_flags", deflateExtraFlagsNames)
@ -117,7 +117,7 @@ func gzDecode(d *decode.D, _ any) any {
crc32W := crc32.NewIEEE()
// TODO: cleanup clone
d.CopyBits(crc32W, d.CloneReadSeeker(uncompressedBR))
d.FieldU32("crc32", d.ValidateUBytes(crc32W.Sum(nil)), scalar.ActualHex)
d.FieldU32("crc32", d.UintValidateBytes(crc32W.Sum(nil)), scalar.UintHex)
d.FieldU32("isize")
}
}

View File

@ -111,8 +111,8 @@ func iccProfileDecode(d *decode.D, _ any) any {
d.FieldStruct("header", func(d *decode.D) {
d.FieldU32("size")
d.FieldUTF8NullFixedLen("cmm_type_signature", 4)
d.FieldUFn("version_major", decodeBCDU8)
d.FieldUFn("version_minor", decodeBCDU8)
d.FieldUintFn("version_major", decodeBCDU8)
d.FieldUintFn("version_minor", decodeBCDU8)
d.FieldU16("version_reserved")
d.FieldUTF8NullFixedLen("device_class_signature", 4)
d.FieldUTF8NullFixedLen("color_space", 4)

View File

@ -20,7 +20,7 @@ func init() {
// Decode ID3v1 tag
func id3v1Decode(d *decode.D, _ any) any {
d.AssertAtLeastBitsLeft(128 * 8)
d.FieldUTF8("magic", 3, d.AssertStr("TAG"))
d.FieldUTF8("magic", 3, d.StrAssert("TAG"))
if d.PeekBits(8) == uint64('+') {
d.Errorf("looks like id3v11")
}
@ -30,7 +30,7 @@ func id3v1Decode(d *decode.D, _ any) any {
d.FieldUTF8NullFixedLen("year", 4)
d.FieldUTF8NullFixedLen("comment", 30)
// from https://en.wikipedia.org/wiki/List_of_ID3v1_Genres
d.FieldU8("genre", scalar.UToScalar{
d.FieldU8("genre", scalar.UintMap{
0: {Sym: "blues", Description: "Blues"},
1: {Sym: "classic_rock", Description: "Classic Rock"},
2: {Sym: "country", Description: "Country"},

View File

@ -17,11 +17,11 @@ func init() {
func id3v11Decode(d *decode.D, _ any) any {
d.AssertAtLeastBitsLeft(128 * 8)
d.FieldUTF8("magic", 4, d.AssertStr("TAG+"))
d.FieldUTF8("magic", 4, d.StrAssert("TAG+"))
d.FieldUTF8("title", 60)
d.FieldUTF8("artist", 60)
d.FieldUTF8("album", 60)
d.FieldU8("speed", scalar.UToSymStr{
d.FieldU8("speed", scalar.UintMapSymStr{
0: "unset",
1: "slow",
2: "medium",

View File

@ -32,7 +32,7 @@ func init() {
})
}
var idDescriptions = scalar.StrToDescription{
var idDescriptions = scalar.StrMapDescription{
"BUF": "Recommended buffer size",
"CNT": "Play counter",
"COM": "Comments",
@ -236,7 +236,7 @@ const (
// Terminated with $00 00.
//
// $03 UTF-8 [UTF-8] encoded Unicode [UNICODE]. Terminated with $00.
var encodingNames = scalar.UToSymStr{
var encodingNames = scalar.UintMapSymStr{
encodingISO8859_1: "iso_8859-1",
encodingUTF16: "utf16",
encodingUTF16BE: "utf16be",
@ -370,7 +370,7 @@ func decodeFrame(d *decode.D, version int) uint64 {
// Size 4 * %0xxxxxxx (synchsafe integer)
// Flags $xx xx
id = d.FieldUTF8("id", 4, idDescriptions)
dataSize = d.FieldUFn("size", decodeSyncSafeU32)
dataSize = d.FieldUintFn("size", decodeSyncSafeU32)
var headerLen uint64 = 10
dataLenFlag := false
@ -395,7 +395,7 @@ func decodeFrame(d *decode.D, version int) uint64 {
})
if dataLenFlag {
d.FieldUFn("data_length_indicator", decodeSyncSafeU32)
d.FieldUintFn("data_length_indicator", decodeSyncSafeU32)
dataSize -= 4
headerLen += 4
}
@ -608,7 +608,7 @@ func decodeFrames(d *decode.D, version int, size uint64) {
func id3v2Decode(d *decode.D, _ any) any {
d.AssertAtLeastBitsLeft(4 * 8)
d.FieldUTF8("magic", 3, d.AssertStr("ID3"))
d.FieldUTF8("magic", 3, d.StrAssert("ID3"))
version := int(d.FieldU8("version"))
versionValid := version == 2 || version == 3 || version == 4
if !versionValid {
@ -623,7 +623,7 @@ func id3v2Decode(d *decode.D, _ any) any {
d.FieldBool("experimental_indicator")
d.FieldU5("unused")
})
size := d.FieldUFn("size", decodeSyncSafeU32)
size := d.FieldUintFn("size", decodeSyncSafeU32)
var extHeaderSize uint64
if extendedHeader {
@ -633,7 +633,7 @@ func id3v2Decode(d *decode.D, _ any) any {
extHeaderSize = d.FieldU32("size")
d.FieldRawLen("data", int64(extHeaderSize)*8)
case 4:
extHeaderSize = d.FieldUFn("size", decodeSyncSafeU32)
extHeaderSize = d.FieldUintFn("size", decodeSyncSafeU32)
// in v4 synchsafe integer includes itself
d.FieldRawLen("data", (int64(extHeaderSize)-4)*8)
}

View File

@ -140,7 +140,7 @@ const (
LinkTypeETW = 290
)
var LinkTypeMap = scalar.UToScalar{
var LinkTypeMap = scalar.UintMap{
LinkTypeNULL: {Sym: "null", Description: `BSD loopback encapsulation`},
LinkTypeETHERNET: {Sym: "ethernet", Description: `IEEE 802.3 Ethernet`},
LinkTypeAX25: {Sym: "ax25", Description: `AX.25 packet, with nothing preceding it`},
@ -281,7 +281,7 @@ const (
// from https://en.wikipedia.org/wiki/EtherType
// TODO: cleanup
var EtherTypeMap = scalar.UToScalar{
var EtherTypeMap = scalar.UintMap{
EtherTypeIPv4: {Sym: "ipv4", Description: `Internet Protocol version 4`},
0x0806: {Sym: "arp", Description: `Address Resolution Protocol`},
0x0842: {Sym: "wake", Description: `Wake-on-LAN[9]`},
@ -350,7 +350,7 @@ const (
IPv4ProtocolICMPv6 = 58
)
var IPv4ProtocolMap = scalar.UToScalar{
var IPv4ProtocolMap = scalar.UintMap{
0: {Sym: "ip", Description: "Internet protocol, pseudo protocol number"},
IPv4ProtocolICMP: {Sym: "icmp", Description: "Internet control message protocol"},
IPv4ProtocolIGMP: {Sym: "igmp", Description: "Internet group management protocol"},
@ -498,7 +498,7 @@ const (
UDPPortMDNS = 5353
)
var UDPPortMap = scalar.UToScalar{
var UDPPortMap = scalar.UintMap{
1: {Sym: "tcpmux", Description: "TCP Port Service Multiplexer"},
2: {Sym: "compressnet", Description: "Management Utility"},
3: {Sym: "compressnet", Description: "Compression Process"},
@ -1186,7 +1186,7 @@ const (
TCPPortRTMP = 1935
)
var TCPPortMap = scalar.UToScalar{
var TCPPortMap = scalar.UintMap{
1: {Sym: "tcpmux", Description: "TCP Port Service Multiplexer"},
2: {Sym: "compressnet", Description: "Management Utility"},
3: {Sym: "compressnet", Description: "Compression Process"},

View File

@ -33,7 +33,7 @@ var bsdLoopbackFrameNetworkLayerEtherType = map[uint64]int{
bsdLoopbackNetworkLayerIPv6: format.EtherTypeIPv6,
}
var bsdLookbackNetworkLayerMap = scalar.UToScalar{
var bsdLookbackNetworkLayerMap = scalar.UintMap{
bsdLoopbackNetworkLayerIPv4: {Sym: "ipv4", Description: `Internet protocol v4`},
bsdLoopbackNetworkLayerIPv6: {Sym: "ipv6", Description: `Internet protocol v6`},
}
@ -50,7 +50,7 @@ func decodeLoopbackFrame(d *decode.D, in any) any {
}
// if no LinkFrameIn assume big endian for now
networkLayer := d.FieldU32("network_layer", bsdLookbackNetworkLayerMap, scalar.ActualHex)
networkLayer := d.FieldU32("network_layer", bsdLookbackNetworkLayerMap, scalar.UintHex)
d.FieldFormatOrRawLen(
"payload",

View File

@ -27,9 +27,9 @@ func init() {
}
// TODO: move to shared?
var mapUToEtherSym = scalar.Fn(func(s scalar.S) (scalar.S, error) {
var mapUToEtherSym = scalar.UintFn(func(s scalar.Uint) (scalar.Uint, error) {
var b [8]byte
binary.BigEndian.PutUint64(b[:], s.ActualU())
binary.BigEndian.PutUint64(b[:], s.Actual)
s.Sym = fmt.Sprintf("%.2x:%.2x:%.2x:%.2x:%.2x:%.2x", b[2], b[3], b[4], b[5], b[6], b[7])
return s, nil
})
@ -41,9 +41,9 @@ func decodeEthernetFrame(d *decode.D, in any) any {
}
}
d.FieldU("destination", 48, mapUToEtherSym, scalar.ActualHex)
d.FieldU("source", 48, mapUToEtherSym, scalar.ActualHex)
etherType := d.FieldU16("ether_type", format.EtherTypeMap, scalar.ActualHex)
d.FieldU("destination", 48, mapUToEtherSym, scalar.UintHex)
d.FieldU("source", 48, mapUToEtherSym, scalar.UintHex)
etherType := d.FieldU16("ether_type", format.EtherTypeMap, scalar.UintHex)
d.FieldFormatOrRawLen(
"payload",

View File

@ -17,7 +17,7 @@ func init() {
}
// based on https://en.wikipedia.org/wiki/Internet_Control_Message_Protocol
var icmpTypeMap = scalar.UToScalar{
var icmpTypeMap = scalar.UintMap{
0: {Sym: "echo_reply", Description: "Echo reply"},
3: {Sym: "unreachable", Description: "Destination network unreachable"},
4: {Sym: "source_quench", Description: "Source quench (congestion control)"},
@ -50,7 +50,7 @@ var icmpTypeMap = scalar.UToScalar{
43: {Sym: "extended_echo_reply", Description: "No Error"},
}
var icmpCodeMapMap = map[uint64]scalar.UToDescription{
var icmpCodeMapMap = map[uint64]scalar.UintMapDescription{
3: {
1: "Destination host unreachable",
2: "Destination protocol unreachable",

View File

@ -17,7 +17,7 @@ func init() {
}
// based on https://en.wikipedia.org/wiki/Internet_Control_Message_Protocol_for_IPv6
var icmpv6TypeMap = scalar.UToScalar{
var icmpv6TypeMap = scalar.UintMap{
1: {Sym: "unreachable", Description: "Destination unreachable"},
2: {Sym: "too_big", Description: "Packet too big"},
3: {Sym: "time_exceeded", Description: "Time exceeded"},
@ -56,7 +56,7 @@ var icmpv6TypeMap = scalar.UToScalar{
255: {Description: "Reserved for expansion of ICMPv6 informational messages"},
}
var icmpv6CodeMapMap = map[uint64]scalar.UToDescription{
var icmpv6CodeMapMap = map[uint64]scalar.UintMapDescription{
1: {
1: "Communication with destination administratively prohibited",
2: "Beyond scope of source address",

View File

@ -31,7 +31,7 @@ const (
ipv4OptionNop = 1
)
var ipv4OptionsMap = scalar.UToScalar{
var ipv4OptionsMap = scalar.UintMap{
ipv4OptionEnd: {Sym: "end", Description: "End of options list"},
ipv4OptionNop: {Sym: "nop", Description: "No operation"},
2: {Description: "Security"},
@ -42,9 +42,9 @@ var ipv4OptionsMap = scalar.UToScalar{
4: {Description: "Internet Timestamp"},
}
var mapUToIPv4Sym = scalar.Fn(func(s scalar.S) (scalar.S, error) {
var mapUToIPv4Sym = scalar.UintFn(func(s scalar.Uint) (scalar.Uint, error) {
var b [4]byte
binary.BigEndian.PutUint32(b[:], uint32(s.ActualU()))
binary.BigEndian.PutUint32(b[:], uint32(s.Actual))
s.Sym = net.IP(b[:]).String()
return s, nil
})
@ -67,10 +67,10 @@ func decodeIPv4(d *decode.D, in any) any {
d.FieldU8("ttl")
protocol := d.FieldU8("protocol", format.IPv4ProtocolMap)
checksumStart := d.Pos()
d.FieldU16("header_checksum", scalar.ActualHex)
d.FieldU16("header_checksum", scalar.UintHex)
checksumEnd := d.Pos()
d.FieldU32("source_ip", mapUToIPv4Sym, scalar.ActualHex)
d.FieldU32("destination_ip", mapUToIPv4Sym, scalar.ActualHex)
d.FieldU32("source_ip", mapUToIPv4Sym, scalar.UintHex)
d.FieldU32("destination_ip", mapUToIPv4Sym, scalar.UintHex)
optionsLen := (int64(ihl) - 5) * 8 * 4
if optionsLen > 0 {
d.FramedFn(optionsLen, func(d *decode.D) {
@ -96,7 +96,7 @@ func decodeIPv4(d *decode.D, in any) any {
ipv4Checksum := &checksum.IPv4{}
d.Copy(ipv4Checksum, bitio.NewIOReader(d.BitBufRange(0, checksumStart)))
d.Copy(ipv4Checksum, bitio.NewIOReader(d.BitBufRange(checksumEnd, headerEnd-checksumEnd)))
_ = d.FieldMustGet("header_checksum").TryScalarFn(d.ValidateUBytes(ipv4Checksum.Sum(nil)), scalar.ActualHex)
_ = d.FieldMustGet("header_checksum").TryUintScalarFn(d.UintValidateBytes(ipv4Checksum.Sum(nil)), scalar.UintHex)
dataLen := int64(totalLength-(ihl*4)) * 8

View File

@ -41,7 +41,7 @@ const (
// 253 Use for experimentation and testing [RFC3692][RFC4727]
// 254 Use for experimentation and testing [RFC3692][RFC4727]
var nextHeaderNames = scalar.UToSymStr{
var nextHeaderNames = scalar.UintMapSymStr{
nextHeaderHopByHop: "hop_by_hop",
nextHeaderRouting: "routing",
nextHeaderFragment: "fragment",
@ -53,11 +53,11 @@ var nextHeaderNames = scalar.UToSymStr{
nextHeaderShim6: "shim6",
}
var nextHeaderMap = scalar.Fn(func(s scalar.S) (scalar.S, error) {
if isIpv6Option(s.ActualU()) {
return nextHeaderNames.MapScalar(s)
var nextHeaderMap = scalar.UintFn(func(s scalar.Uint) (scalar.Uint, error) {
if isIpv6Option(s.Actual) {
return nextHeaderNames.MapUint(s)
}
return format.IPv4ProtocolMap.MapScalar(s)
return format.IPv4ProtocolMap.MapUint(s)
})
func isIpv6Option(n uint64) bool {
@ -78,7 +78,7 @@ func isIpv6Option(n uint64) bool {
}
// from https://www.iana.org/assignments/ipv6-parameters/ipv6-parameters.xhtml#ipv6-parameters-2
var hopByHopTypeNames = scalar.UToSymStr{
var hopByHopTypeNames = scalar.UintMapSymStr{
0x00: "pad1",
0x01: "padn",
0xc2: "jumbo_payload",
@ -99,9 +99,9 @@ var hopByHopTypeNames = scalar.UToSymStr{
0x31: "ioam",
}
var mapUToIPv6Sym = scalar.Fn(func(s scalar.S) (scalar.S, error) {
var mapUToIPv6Sym = scalar.BitBufFn(func(s scalar.BitBuf) (scalar.BitBuf, error) {
b := &bytes.Buffer{}
if _, err := bitioex.CopyBits(b, s.ActualBitBuf()); err != nil {
if _, err := bitioex.CopyBits(b, s.Actual); err != nil {
return s, err
}
s.Sym = net.IP(b.Bytes()).String()

View File

@ -31,12 +31,12 @@ func decodeSLL2(d *decode.D, in any) any {
}
}
protcolType := d.FieldU16("protocol_type", format.EtherTypeMap, scalar.ActualHex)
protcolType := d.FieldU16("protocol_type", format.EtherTypeMap, scalar.UintHex)
d.FieldU16("reserved")
d.FieldU32("interface_index")
arpHdrType := d.FieldU16("arphdr_type", arpHdrTypeMAp)
d.FieldU8("packet_type", sllPacketTypeMap)
addressLength := d.FieldU8("link_address_length", d.ValidateURange(0, 8))
addressLength := d.FieldU8("link_address_length", d.UintValidateRange(0, 8))
// "If there are more than 8 bytes, only the first 8 bytes are present"
if addressLength > 8 {
addressLength = 8
@ -51,7 +51,7 @@ func decodeSLL2(d *decode.D, in any) any {
// TODO: handle other arphdr types
switch arpHdrType {
case arpHdrTypeLoopback, arpHdrTypeEther:
_ = d.FieldMustGet("link_address").TryScalarFn(mapUToEtherSym, scalar.ActualHex)
_ = d.FieldMustGet("link_address").TryUintScalarFn(mapUToEtherSym, scalar.UintHex)
d.FieldFormatOrRawLen(
"payload",
d.BitsLeft(),

View File

@ -24,7 +24,7 @@ func init() {
})
}
var sllPacketTypeMap = scalar.UToScalar{
var sllPacketTypeMap = scalar.UintMap{
0: {Sym: "to_us", Description: "Sent to us"},
1: {Sym: "broadcast", Description: "Broadcast by somebody else"},
2: {Sym: "multicast", Description: "Multicast by somebody else"},
@ -38,7 +38,7 @@ const (
)
// based on https://github.com/torvalds/linux/blob/master/include/uapi/linux/if_arp.h
var arpHdrTypeMAp = scalar.UToScalar{
var arpHdrTypeMAp = scalar.UintMap{
0: {Sym: "netrom", Description: `from KA9Q: NET/ROM pseudo`},
arpHdrTypeEther: {Sym: "ether", Description: `Ethernet 10Mbps`},
2: {Sym: "eether", Description: `Experimental Ethernet`},
@ -127,8 +127,8 @@ func decodeSLL(d *decode.D, in any) any {
// TODO: handle other arphdr types
switch arpHdrType {
case arpHdrTypeLoopback, arpHdrTypeEther:
_ = d.FieldMustGet("link_address").TryScalarFn(mapUToEtherSym, scalar.ActualHex)
protcolType := d.FieldU16("protocol_type", format.EtherTypeMap, scalar.ActualHex)
_ = d.FieldMustGet("link_address").TryUintScalarFn(mapUToEtherSym, scalar.UintHex)
protcolType := d.FieldU16("protocol_type", format.EtherTypeMap, scalar.UintHex)
d.FieldFormatOrRawLen(
"payload",
d.BitsLeft(),

View File

@ -23,7 +23,7 @@ const (
tcpOptionNop = 1
)
var tcpOptionsMap = scalar.UToScalar{
var tcpOptionsMap = scalar.UintMap{
tcpOptionEnd: {Sym: "end", Description: "End of options list"},
tcpOptionNop: {Sym: "nop", Description: "No operation"},
2: {Sym: "maxseg", Description: "Maximum segment size"},
@ -55,7 +55,7 @@ func decodeTCP(d *decode.D, in any) any {
d.FieldBool("fin")
d.FieldU16("window_size")
// checksumStart := d.Pos()
d.FieldU16("checksum", scalar.ActualHex)
d.FieldU16("checksum", scalar.UintHex)
// checksumEnd := d.Pos()
d.FieldU16("urgent_pointer")
optionsLen := (int64(dataOffset) - 5) * 8 * 4
@ -81,7 +81,7 @@ func decodeTCP(d *decode.D, in any) any {
// tcpChecksum := &checksum.IPv4{}
// d.MustCopy(tcpChecksum, d.BitBufRange(0, checksumStart))
// d.MustCopy(tcpChecksum, d.BitBufRange(checksumEnd, d.Len()-checksumEnd))
// _ = d.FieldMustGet("checksum").TryScalarFn(d.ValidateUBytes(tcpChecksum.Sum(nil)), scalar.Hex)
// _ = d.FieldMustGet("checksum").TryScalarFn(d.UintValidateBytes(tcpChecksum.Sum(nil)), scalar.Hex)
d.FieldRawLen("payload", d.BitsLeft())

View File

@ -29,7 +29,7 @@ func decodeUDP(d *decode.D, in any) any {
sourcePort := d.FieldU16("source_port", format.UDPPortMap)
destPort := d.FieldU16("destination_port", format.UDPPortMap)
length := d.FieldU16("length")
d.FieldU16("checksum", scalar.ActualHex)
d.FieldU16("checksum", scalar.UintHex)
payloadLen := int64(length-8) * 8
d.FieldFormatOrRawLen(

View File

@ -97,7 +97,7 @@ const (
TEM = 0x01
)
var markers = scalar.UToScalar{
var markers = scalar.UintMap{
SOF0: {Sym: "sof0", Description: "Baseline DCT"},
SOF1: {Sym: "sof1", Description: "Extended sequential DCT"},
SOF2: {Sym: "sof2", Description: "Progressive DCT"},

View File

@ -3,7 +3,7 @@ package jpeg
import "github.com/wader/fq/pkg/scalar"
// based on https://www.adobe.com/devnet-apps/photoshop/fileformatashtml
var psImageResourceBlockNames = scalar.UToDescription{
var psImageResourceBlockNames = scalar.UintMapDescription{
0x03E8: `Contains five 2-byte values: number of channels, rows, columns, depth, and mode`,
0x03E9: `Macintosh print manager print info record`,
0x03EA: `Macintosh page format information. No longer read by Photoshop. (Obsolete)`,

View File

@ -66,7 +66,7 @@ func decodeJSONEx(d *decode.D, lines bool) any {
d.Fatalf("trialing data after top-level value")
}
var s scalar.S
var s scalar.Any
if lines {
if len(vs) == 0 {
d.Fatalf("not lines found")

21
format/json/testdata/json_color.fqtest vendored Normal file
View File

@ -0,0 +1,21 @@
# TODO: tests descorator with different types, move this test
$ fq -C d json.gz
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: json.gz (gzip)
0x000|1f 8b |.. | identification: raw bits (valid)
0x000| 08 | . | compression_method: "deflate" (8)
| | | flags{}:
0x000| 00 | . | text: false
0x000| 00 | . | header_crc: false
0x000| 00 | . | extra: false
0x000| 00 | . | name: false
0x000| 00 | . | comment: false
0x000| 00 | . | reserved: 0
0x000| 65 0a 08 61 | e..a | mtime: 1627916901 (2021-08-02T15:08:21Z)
0x000| 00 | . | extra_flags: 0
0x000| 03 | . | os: "unix" (3)
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|7b 22 61 22 3a 20 31 32 33 7d 0a| |{"a": 123}.| | uncompressed: {} (json)
0x000| ab 56 4a 54 b2 52| .VJT.R| compressed: raw bits
0x010|30 34 32 ae e5 02 00 |042.... |
0x010| 20 ac d2 9c |  ... | crc32: 0x9cd2ac20 (valid)
0x010| 0b 00 00 00| | ....|| isize: 11

View File

@ -40,8 +40,8 @@ func strIndexNull(idx int, s string) string {
type strTable string
func (m strTable) MapScalar(s scalar.S) (scalar.S, error) {
s.Sym = strIndexNull(int(s.ActualU()), string(m))
func (m strTable) MapUint(s scalar.Uint) (scalar.Uint, error) {
s.Sym = strIndexNull(int(s.Actual), string(m))
return s, nil
}
@ -53,14 +53,14 @@ const (
MH_CIGAM_64 = 0xcffa_edfe
)
var magicSymMapper = scalar.UToScalar{
MH_MAGIC: scalar.S{Sym: "32le", Description: "32-bit little endian"},
MH_CIGAM: scalar.S{Sym: "32be", Description: "32-bit big endian"},
MH_MAGIC_64: scalar.S{Sym: "64le", Description: "64-bit little endian"},
MH_CIGAM_64: scalar.S{Sym: "64be", Description: "64-bit big endian"},
var magicSymMapper = scalar.UintMap{
MH_MAGIC: scalar.Uint{Sym: "32le", Description: "32-bit little endian"},
MH_CIGAM: scalar.Uint{Sym: "32be", Description: "32-bit big endian"},
MH_MAGIC_64: scalar.Uint{Sym: "64le", Description: "64-bit little endian"},
MH_CIGAM_64: scalar.Uint{Sym: "64be", Description: "64-bit big endian"},
}
var cpuTypes = scalar.UToSymStr{
var cpuTypes = scalar.UintMapSymStr{
0xff_ff_ff_ff: "any",
1: "vax",
2: "romp",
@ -89,7 +89,7 @@ func intelSubTypeHelper(f, m uint64) uint64 {
return f + (m << 4)
}
var cpuSubTypes = map[uint64]scalar.UToSymStr{
var cpuSubTypes = map[uint64]scalar.UintMapSymStr{
0xff_ff_ff_ff: {
0xff_ff_ff_ff: "multiple",
},
@ -214,7 +214,7 @@ var cpuSubTypes = map[uint64]scalar.UToSymStr{
},
}
var fileTypes = scalar.UToSymStr{
var fileTypes = scalar.UintMapSymStr{
0x1: "object",
0x2: "execute",
0x3: "fvmlib",
@ -284,7 +284,7 @@ const (
LC_BUILD_VERSION = 0x32
)
var loadCommands = scalar.UToSymStr{
var loadCommands = scalar.UintMapSymStr{
LC_REQ_DYLD: "req_dyld",
LC_SEGMENT: "segment",
LC_SYMTAB: "symtab",
@ -339,7 +339,7 @@ var loadCommands = scalar.UToSymStr{
LC_BUILD_VERSION: "build_version",
}
var sectionTypes = scalar.UToSymStr{
var sectionTypes = scalar.UintMapSymStr{
0x0: "regular",
0x1: "zerofill",
0x2: "cstring_literals",
@ -390,11 +390,11 @@ func machoDecode(d *decode.D, _ any) any {
d.SeekRel(-4 * 8)
d.FieldStruct("header", func(d *decode.D) {
d.FieldValueS("arch_bits", int64(archBits))
d.FieldU32("magic", magicSymMapper, scalar.ActualHex)
d.FieldValueU("bits", uint64(archBits))
cpuType = d.FieldU32("cputype", cpuTypes, scalar.ActualHex)
d.FieldU32("cpusubtype", cpuSubTypes[cpuType], scalar.ActualHex)
d.FieldValueSint("arch_bits", int64(archBits))
d.FieldU32("magic", magicSymMapper, scalar.UintHex)
d.FieldValueUint("bits", uint64(archBits))
cpuType = d.FieldU32("cputype", cpuTypes, scalar.UintHex)
d.FieldU32("cpusubtype", cpuSubTypes[cpuType], scalar.UintHex)
d.FieldU32("filetype", fileTypes)
ncmds = d.FieldU32("ncdms")
d.FieldU32("sizeofncdms")
@ -409,7 +409,7 @@ func machoDecode(d *decode.D, _ any) any {
d.FieldStruct("load_command", func(d *decode.D) {
d.SeekAbs(loadCommandsNext)
cmd := d.FieldU32("cmd", loadCommands, scalar.ActualHex)
cmd := d.FieldU32("cmd", loadCommands, scalar.UintHex)
cmdSize := d.FieldU32("cmdsize")
if cmdSize == 0 {
d.Fatalf("cmdSize is zero")
@ -431,17 +431,17 @@ func machoDecode(d *decode.D, _ any) any {
var nsects uint64
d.FieldStruct("segment_command", func(d *decode.D) {
d.FieldValueS("arch_bits", int64(archBits))
d.FieldValueSint("arch_bits", int64(archBits))
d.FieldUTF8NullFixedLen("segname", 16) // OPCODE_DECODER segname==__TEXT
if archBits == 32 {
vmaddr = int64(d.FieldU32("vmaddr", scalar.ActualHex))
vmaddr = int64(d.FieldU32("vmaddr", scalar.UintHex))
d.FieldU32("vmsize")
fileoff = int64(d.FieldU32("fileoff", scalar.ActualHex))
fileoff = int64(d.FieldU32("fileoff", scalar.UintHex))
d.FieldU32("tfilesize")
} else {
vmaddr = int64(d.FieldU64("vmaddr", scalar.ActualHex))
vmaddr = int64(d.FieldU64("vmaddr", scalar.UintHex))
d.FieldU64("vmsize")
fileoff = int64(d.FieldU64("fileoff", scalar.ActualHex))
fileoff = int64(d.FieldU64("fileoff", scalar.UintHex))
d.FieldU64("tfilesize")
}
d.FieldS32("initprot")
@ -457,13 +457,13 @@ func machoDecode(d *decode.D, _ any) any {
d.FieldUTF8NullFixedLen("segname", 16)
var size uint64
if archBits == 32 {
d.FieldU32("address", scalar.ActualHex)
d.FieldU32("address", scalar.UintHex)
size = d.FieldU32("size")
} else {
d.FieldU64("address", scalar.ActualHex)
d.FieldU64("address", scalar.UintHex)
size = d.FieldU64("size")
}
offset := d.FieldU32("offset", scalar.ActualHex)
offset := d.FieldU32("offset", scalar.UintHex)
d.FieldU32("align")
d.FieldU32("reloff")
d.FieldU32("nreloc")
@ -506,11 +506,11 @@ func machoDecode(d *decode.D, _ any) any {
const flagUTF16 = 0x07d0
d.FieldU("isa_vmaddr", archBits)
flag := d.FieldU("flags", archBits, scalar.ActualHex, scalar.UToSymStr{
flag := d.FieldU("flags", archBits, scalar.UintHex, scalar.UintMapSymStr{
flagUTF8: "utf8",
flagUTF16: "utf16",
})
dataPtr := int64(d.FieldU("data_ptr", archBits, scalar.ActualHex))
dataPtr := int64(d.FieldU("data_ptr", archBits, scalar.UintHex))
length := int64(d.FieldU("length", archBits))
offset := ((dataPtr - vmaddr) + fileoff) * 8
@ -533,7 +533,7 @@ func machoDecode(d *decode.D, _ any) any {
}
})
case LC_TWOLEVEL_HINTS:
d.FieldU32("offset", scalar.ActualHex)
d.FieldU32("offset", scalar.UintHex)
d.FieldU32("nhints")
case LC_LOAD_DYLIB,
LC_ID_DYLIB,
@ -542,7 +542,7 @@ func machoDecode(d *decode.D, _ any) any {
LC_LAZY_LOAD_DYLIB,
LC_REEXPORT_DYLIB:
d.FieldStruct("dylib_command", func(d *decode.D) {
offset := d.FieldU32("offset", scalar.ActualHex)
offset := d.FieldU32("offset", scalar.UintHex)
d.FieldU32("timestamp", timestampMapper)
d.FieldU32("current_version")
d.FieldU32("compatibility_version")
@ -551,10 +551,10 @@ func machoDecode(d *decode.D, _ any) any {
case LC_LOAD_DYLINKER,
LC_ID_DYLINKER,
LC_DYLD_ENVIRONMENT:
offset := d.FieldU32("offset", scalar.ActualHex)
offset := d.FieldU32("offset", scalar.UintHex)
d.FieldUTF8NullFixedLen("name", int(cmdSize)-int(offset))
case LC_RPATH:
offset := d.FieldU32("offset", scalar.ActualHex)
offset := d.FieldU32("offset", scalar.UintHex)
d.FieldUTF8NullFixedLen("name", int(cmdSize)-int(offset))
case LC_PREBOUND_DYLIB:
// https://github.com/aidansteele/osx-abi-macho-file-format-reference#prebound_dylib_command
@ -588,7 +588,7 @@ func machoDecode(d *decode.D, _ any) any {
case LC_ROUTINES,
LC_ROUTINES_64:
if archBits == 32 {
d.FieldU32("init_address", scalar.ActualHex)
d.FieldU32("init_address", scalar.UintHex)
d.FieldU32("init_module")
d.FieldU32("reserved1")
d.FieldU32("reserved2")
@ -597,7 +597,7 @@ func machoDecode(d *decode.D, _ any) any {
d.FieldU32("reserved5")
d.FieldU32("reserved6")
} else {
d.FieldU64("init_address", scalar.ActualHex)
d.FieldU64("init_address", scalar.UintHex)
d.FieldU64("init_module")
d.FieldU64("reserved1")
d.FieldU64("reserved2")
@ -610,7 +610,7 @@ func machoDecode(d *decode.D, _ any) any {
LC_SUB_LIBRARY,
LC_SUB_CLIENT,
LC_SUB_FRAMEWORK:
offset := d.FieldU32("offset", scalar.ActualHex)
offset := d.FieldU32("offset", scalar.UintHex)
d.FieldUTF8NullFixedLen("name", int(cmdSize)-int(offset))
case LC_SYMTAB:
symOff := d.FieldU32("symoff")
@ -626,7 +626,7 @@ func machoDecode(d *decode.D, _ any) any {
d.SeekAbs(int64(symOff) * 8)
d.FieldArray("symbols", func(d *decode.D) {
for i := 0; i < int(nSyms); i++ {
symbolTypeMap := scalar.UToSymStr{
symbolTypeMap := scalar.UintMapSymStr{
0x0: "undef",
0x1: "abs",
0x5: "indr",
@ -644,7 +644,7 @@ func machoDecode(d *decode.D, _ any) any {
})
d.FieldU8("sect")
d.FieldU16("desc")
d.FieldU("value", archBits, scalar.ActualHex)
d.FieldU("value", archBits, scalar.UintHex)
})
}
})
@ -700,20 +700,20 @@ func machoDecode(d *decode.D, _ any) any {
case LC_DYLD_INFO,
LC_DYLD_INFO_ONLY:
d.FieldStruct("dyld_info", func(d *decode.D) {
d.FieldU32("rebase_off", scalar.ActualHex)
d.FieldU32("rebase_off", scalar.UintHex)
d.FieldU32("rebase_size")
d.FieldU32("bind_off", scalar.ActualHex)
d.FieldU32("bind_off", scalar.UintHex)
d.FieldU32("bind_size")
d.FieldU32("weak_bind_off", scalar.ActualHex)
d.FieldU32("weak_bind_off", scalar.UintHex)
d.FieldU32("weak_bind_size")
d.FieldU32("lazy_bind_off", scalar.ActualHex)
d.FieldU32("lazy_bind_off", scalar.UintHex)
d.FieldU32("lazy_bind_size")
d.FieldU32("export_off", scalar.ActualHex)
d.FieldU32("export_off", scalar.UintHex)
d.FieldU32("export_size")
})
case LC_MAIN:
d.FieldStruct("entrypoint", func(d *decode.D) {
d.FieldU64("entryoff", scalar.ActualHex)
d.FieldU64("entryoff", scalar.UintHex)
d.FieldU64("stacksize")
})
case LC_SOURCE_VERSION:
@ -728,7 +728,7 @@ func machoDecode(d *decode.D, _ any) any {
case LC_ENCRYPTION_INFO,
LC_ENCRYPTION_INFO_64:
d.FieldStruct("encryption_info", func(d *decode.D) {
offset := d.FieldU32("offset", scalar.ActualHex)
offset := d.FieldU32("offset", scalar.UintHex)
size := d.FieldU32("size")
d.FieldU32("id")
d.RangeFn(int64(offset)*8, int64(size)*8, func(d *decode.D) {
@ -742,9 +742,9 @@ func machoDecode(d *decode.D, _ any) any {
case LC_IDFVMLIB,
LC_LOADFVMLIB:
d.FieldStruct("fvmlib", func(d *decode.D) {
offset := d.FieldU32("offset", scalar.ActualHex)
offset := d.FieldU32("offset", scalar.UintHex)
d.FieldU32("minor_version")
d.FieldU32("header_addr", scalar.ActualHex)
d.FieldU32("header_addr", scalar.UintHex)
d.FieldUTF8NullFixedLen("name", int(cmdSize)-int(offset))
})
default:
@ -817,8 +817,8 @@ func parseSectionFlags(d *decode.D) {
d.FieldBool("attr_loc_reloc")
}
var timestampMapper = scalar.Fn(func(s scalar.S) (scalar.S, error) {
s.Sym = time.UnixMilli(int64(s.ActualU())).UTC().String()
var timestampMapper = scalar.UintFn(func(s scalar.Uint) (scalar.Uint, error) {
s.Sym = time.UnixMilli(int64(s.Actual)).UTC().String()
return s, nil
})

View File

@ -34,16 +34,16 @@ func machoFatDecode(d *decode.D, _ any) any {
var ofiles []ofile
d.FieldStruct("fat_header", func(d *decode.D) {
d.FieldU32("magic", magicSymMapper, scalar.ActualHex, d.AssertU(FAT_MAGIC))
d.FieldU32("magic", magicSymMapper, scalar.UintHex, d.UintAssert(FAT_MAGIC))
narchs := d.FieldU32("narchs")
d.FieldArray("archs", func(d *decode.D) {
for i := 0; i < int(narchs); i++ {
d.FieldStruct("arch", func(d *decode.D) {
// beware cputype and cpusubtype changes from ofile header to fat header
cpuType := d.FieldU32("cputype", cpuTypes, scalar.ActualHex)
d.FieldU32("cpusubtype", cpuSubTypes[cpuType], scalar.ActualHex)
offset := d.FieldU32("offset", scalar.ActualHex)
cpuType := d.FieldU32("cputype", cpuTypes, scalar.UintHex)
d.FieldU32("cpusubtype", cpuSubTypes[cpuType], scalar.UintHex)
offset := d.FieldU32("offset", scalar.UintHex)
size := d.FieldU32("size")
d.FieldU32("align")

View File

@ -34,7 +34,7 @@ func decodeMarkdown(d *decode.D, _ any) any {
panic(err)
}
var s scalar.S
var s scalar.Any
s.Actual = node(markdown.Parse(b, nil))
d.Value.V = &s
d.Value.Range.Len = d.Len()

View File

@ -33,9 +33,9 @@ type Attribute struct {
Type Type
Tag Tag
Definition string
IntegerEnums scalar.SToScalar
UintegerEnums scalar.UToScalar
StringEnums scalar.StrToScalar
IntegerEnums scalar.SintMap
UintegerEnums scalar.UintMap
StringEnums scalar.StrMap
}
type Tag map[uint64]Attribute

View File

@ -165,11 +165,11 @@ func main() {
if len(c.Enums) > 0 {
switch c.Type {
case "integer":
fmt.Printf("\t\tIntegerEnums: scalar.SToScalar{\n")
fmt.Printf("\t\tIntegerEnums: scalar.SintMap{\n")
case "uinteger":
fmt.Printf("\t\tUintegerEnums: scalar.UToScalar{\n")
fmt.Printf("\t\tUintegerEnums: scalar.UintMap{\n")
case "string":
fmt.Printf("\t\tStringEnums: scalar.StrToScalar{\n")
fmt.Printf("\t\tStringEnums: scalar.StrMap{\n")
}
// matroska.xml has dup keys (e.g. PARTS)

View File

@ -424,7 +424,7 @@ var ChapterTranslate = ebml.Tag{
Name: "chapter_translate_codec",
Definition: "This `ChapterTranslate` applies to this chapter codec of the given chapter edition(s); see (#chapprocesscodecid-element).",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "matroska_script",
Description: "Chapter commands using the Matroska Script codec.",
@ -639,7 +639,7 @@ var TrackEntry = ebml.Tag{
Name: "track_type",
Definition: "The `TrackType` defines the type of each frame found in the Track. The value **SHOULD** be stored on 1 octet.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
1: {
Sym: "video",
Description: "An image.",
@ -914,7 +914,7 @@ var TrackTranslate = ebml.Tag{
Name: "track_translate_codec",
Definition: "This `TrackTranslate` applies to this chapter codec of the given chapter edition(s); see (#chapprocesscodecid-element).",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "matroska_script",
Description: "Chapter commands using the Matroska Script codec.",
@ -937,7 +937,7 @@ var Video = ebml.Tag{
Name: "flag_interlaced",
Definition: "Specify whether the video frames in this track are interlaced or not.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "undetermined",
Description: "Unknown status.",
@ -956,7 +956,7 @@ var Video = ebml.Tag{
Name: "field_order",
Definition: "Specify the field ordering of video frames in this track.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "progressive",
Description: "Interlaced frames.",
@ -985,7 +985,7 @@ var Video = ebml.Tag{
Name: "stereo_mode",
Definition: "Stereo-3D video mode. There are some more details in (#multi-planar-and-3d-videos).",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "mono",
},
@ -1037,7 +1037,7 @@ var Video = ebml.Tag{
Name: "alpha_mode",
Definition: "Indicate whether the BlockAdditional Element with BlockAddID of \"1\" contains Alpha data, as defined by to the Codec Mapping for the `CodecID`. Undefined values **SHOULD NOT** be used as the behavior of known implementations is different (considered either as 0 or 1).",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "none",
Description: "The BlockAdditional Element with BlockAddID of \"1\" does not exist or **SHOULD NOT** be considered as containing such data.",
@ -1052,7 +1052,7 @@ var Video = ebml.Tag{
Name: "old_stereo_mode",
Definition: "Bogus StereoMode value used in old versions of libmatroska.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "mono",
},
@ -1111,7 +1111,7 @@ var Video = ebml.Tag{
Name: "display_unit",
Definition: "How DisplayWidth & DisplayHeight are interpreted.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "pixels",
},
@ -1133,7 +1133,7 @@ var Video = ebml.Tag{
Name: "aspect_ratio_type",
Definition: "Specify the possible modifications to the aspect ratio.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "free_resizing",
},
@ -1177,7 +1177,7 @@ var Colour = ebml.Tag{
Name: "matrix_coefficients",
Definition: "The Matrix Coefficients of the video used to derive luma and chroma values from red, green, and blue color primaries. For clarity, the value and meanings for MatrixCoefficients are adopted from Table 4 of ISO/IEC 23001-8:2016 or ITU-T H.273.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "identity",
},
@ -1254,7 +1254,7 @@ var Colour = ebml.Tag{
Name: "chroma_siting_horz",
Definition: "How chroma is subsampled horizontally.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "unspecified",
},
@ -1270,7 +1270,7 @@ var Colour = ebml.Tag{
Name: "chroma_siting_vert",
Definition: "How chroma is subsampled vertically.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "unspecified",
},
@ -1286,7 +1286,7 @@ var Colour = ebml.Tag{
Name: "range",
Definition: "Clipping of the color ranges.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "unspecified",
},
@ -1305,7 +1305,7 @@ var Colour = ebml.Tag{
Name: "transfer_characteristics",
Definition: "The transfer characteristics of the video. For clarity, the value and meanings for TransferCharacteristics are adopted from Table 3 of ISO/IEC 23091-4 or ITU-T H.273.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "reserved",
},
@ -1369,7 +1369,7 @@ var Colour = ebml.Tag{
Name: "primaries",
Definition: "The colour primaries of the video. For clarity, the value and meanings for Primaries are adopted from Table 2 of ISO/IEC 23091-4 or ITU-T H.273.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "reserved",
},
@ -1489,7 +1489,7 @@ var Projection = ebml.Tag{
Name: "projection_type",
Definition: "Describes the projection used for this video track.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "rectangular",
},
@ -1556,7 +1556,7 @@ var Audio = ebml.Tag{
Name: "emphasis",
Definition: "Audio emphasis applied on audio samples. The player **MUST** apply the inverse emphasis to get the proper audio samples.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "no_emphasis",
},
@ -1642,7 +1642,7 @@ var TrackPlane = ebml.Tag{
Name: "track_plane_type",
Definition: "The kind of plane this track corresponds to.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "left_eye",
},
@ -1682,7 +1682,7 @@ var ContentEncoding = ebml.Tag{
Name: "content_encoding_scope",
Definition: "A bit field that describes which Elements have been modified in this way. Values (big-endian) can be OR'ed.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
1: {
Sym: "block",
Description: "All frame contents, excluding lacing data.",
@ -1701,7 +1701,7 @@ var ContentEncoding = ebml.Tag{
Name: "content_encoding_type",
Definition: "A value describing what kind of transformation is applied.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "compression",
},
@ -1727,7 +1727,7 @@ var ContentCompression = ebml.Tag{
Name: "content_comp_algo",
Definition: "The compression algorithm used.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "zlib",
Description: "zlib compression [@!RFC1950].",
@ -1758,7 +1758,7 @@ var ContentEncryption = ebml.Tag{
Name: "content_enc_algo",
Definition: "The encryption algorithm used.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "not_encrypted",
Description: "The data are not encrypted.",
@ -1809,7 +1809,7 @@ var ContentEncryption = ebml.Tag{
Name: "content_sig_algo",
Definition: "The algorithm used for the signature.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "not_signed",
},
@ -1822,7 +1822,7 @@ var ContentEncryption = ebml.Tag{
Name: "content_sig_hash_algo",
Definition: "The hash algorithm used for the signature.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "not_signed",
},
@ -1841,7 +1841,7 @@ var ContentEncAESSettings = ebml.Tag{
Name: "aessettings_cipher_mode",
Definition: "The AES cipher mode used in the encryption.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
1: {
Sym: "aes_ctr",
Description: "Counter [@?SP.800-38A].",
@ -2081,7 +2081,7 @@ var ChapterAtom = ebml.Tag{
Name: "chapter_skip_type",
Definition: "Indicate what type of content the ChapterAtom contains and might be skipped. It can be used to automatically skip content based on the type. If a `ChapterAtom` is inside a `ChapterAtom` that has a `ChapterSkipType` set, it **MUST NOT** have a `ChapterSkipType` or have a `ChapterSkipType` with the same value as it's parent `ChapterAtom`. If the `ChapterAtom` doesn't contain a `ChapterTimeEnd`, the value of the `ChapterSkipType` is only valid until the next `ChapterAtom` with a `ChapterSkipType` value or the end of the file.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "no_skipping",
Description: "Content which should not be skipped.",
@ -2193,7 +2193,7 @@ var ChapProcessCommand = ebml.Tag{
Name: "chap_process_time",
Definition: "Defines when the process command **SHOULD** be handled",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
0: {
Sym: "during_the_whole_chapter",
},
@ -2238,7 +2238,7 @@ var Targets = ebml.Tag{
Name: "target_type_value",
Definition: "A number to indicate the logical level of the target.",
Type: ebml.Uinteger,
UintegerEnums: scalar.UToScalar{
UintegerEnums: scalar.UintMap{
70: {
Sym: "collection",
Description: "The highest hierarchical level that tags can describe.",
@ -2273,7 +2273,7 @@ var Targets = ebml.Tag{
Name: "target_type",
Definition: "An informational string that can be used to display the logical level of the target like \"ALBUM\", \"TRACK\", \"MOVIE\", \"CHAPTER\", etc ; see Section 6.4 of [@?MatroskaTags].",
Type: ebml.String,
StringEnums: scalar.StrToScalar{
StringEnums: scalar.StrMap{
"COLLECTION": {
Sym: "targettypevalue_70",
},

View File

@ -106,7 +106,7 @@ const (
lacingTypeEBML = 0b11
)
var lacingTypeNames = scalar.UToSymStr{
var lacingTypeNames = scalar.UintMapSymStr{
lacingTypeNone: "none",
lacingTypeXiph: "xiph",
lacingTypeFixed: "fixed",
@ -127,7 +127,7 @@ func decodeLacingFn(d *decode.D, lacingType int, fn func(d *decode.D)) {
numLaces := int(d.FieldU8("num_laces"))
d.FieldArray("lace_sizes", func(d *decode.D) {
for i := 0; i < numLaces; i++ {
s := int64(d.FieldUFn("lace_size", decodeXiphLaceSize))
s := int64(d.FieldUintFn("lace_size", decodeXiphLaceSize))
laceSizes = append(laceSizes, s)
}
laceSizes = append(laceSizes, -1)
@ -135,10 +135,10 @@ func decodeLacingFn(d *decode.D, lacingType int, fn func(d *decode.D)) {
case lacingTypeEBML:
numLaces := int(d.FieldU8("num_laces"))
d.FieldArray("lace_sizes", func(d *decode.D) {
s := int64(d.FieldUFn("lace_size", decodeVint)) // first is unsigned, not ranged shifted
s := int64(d.FieldUintFn("lace_size", decodeVint)) // first is unsigned, not ranged shifted
laceSizes = append(laceSizes, s)
for i := 0; i < numLaces-1; i++ {
d := int64(d.FieldUFn("lace_size_delta", decodeRawVint))
d := int64(d.FieldUintFn("lace_size_delta", decodeRawVint))
// range shifting
switch {
case d&0b1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1000_0000 == 0b0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_1000_0000:
@ -258,8 +258,8 @@ func decodeMaster(d *decode.D, bitsLimit int64, tag ebml.Tag, dc *decodeContext)
Type: ebml.Unknown,
}
tagID := d.FieldUFn("id", decodeRawVint, scalar.Fn(func(s scalar.S) (scalar.S, error) {
n := s.ActualU()
tagID := d.FieldUintFn("id", decodeRawVint, scalar.UintFn(func(s scalar.Uint) (scalar.Uint, error) {
n := s.Actual
var ok bool
a, ok = tag[n]
if !ok {
@ -268,10 +268,10 @@ func decodeMaster(d *decode.D, bitsLimit int64, tag ebml.Tag, dc *decodeContext)
a = ebml.Attribute{
Type: ebml.Unknown,
}
return scalar.S{Actual: n, ActualDisplay: scalar.NumberHex, Description: "Unknown"}, nil
return scalar.Uint{Actual: n, DisplayFormat: scalar.NumberHex, Description: "Unknown"}, nil
}
}
return scalar.S{Actual: n, ActualDisplay: scalar.NumberHex, Sym: a.Name, Description: a.Definition}, nil
return scalar.Uint{Actual: n, DisplayFormat: scalar.NumberHex, Sym: a.Name, Description: a.Definition}, nil
}))
d.FieldValueStr("type", ebml.TypeNames[a.Type])
@ -286,7 +286,7 @@ func decodeMaster(d *decode.D, bitsLimit int64, tag ebml.Tag, dc *decodeContext)
// element that is not a valid sub-element of that Master-element
// TODO: should also handle garbage between
const maxStringTagSize = 100 * 1024 * 1024
tagSize := d.FieldUFn("size", decodeVint)
tagSize := d.FieldUintFn("size", decodeVint)
// assert sane tag size
// TODO: strings are limited for now because they are read into memory
@ -309,29 +309,32 @@ func decodeMaster(d *decode.D, bitsLimit int64, tag ebml.Tag, dc *decodeContext)
// nop
}
optionalMap := func(sm scalar.Mapper) scalar.Mapper {
return scalar.Fn(func(s scalar.S) (scalar.S, error) {
if sm != nil {
return sm.MapScalar(s)
}
return s, nil
})
}
switch a.Type {
case ebml.Unknown:
d.FieldRawLen("data", int64(tagSize)*8)
case ebml.Integer:
d.FieldS("value", int(tagSize)*8, optionalMap(a.IntegerEnums))
var sm []scalar.SintMapper
if a.IntegerEnums != nil {
sm = append(sm, a.IntegerEnums)
}
d.FieldS("value", int(tagSize)*8, sm...)
case ebml.Uinteger:
v := d.FieldU("value", int(tagSize)*8, optionalMap(a.UintegerEnums))
var sm []scalar.UintMapper
if a.UintegerEnums != nil {
sm = append(sm, a.UintegerEnums)
}
v := d.FieldU("value", int(tagSize)*8, sm...)
if dc.currentTrack != nil && tagID == ebml_matroska.TrackNumberID {
dc.currentTrack.number = int(v)
}
case ebml.Float:
d.FieldF("value", int(tagSize)*8)
case ebml.String:
v := d.FieldUTF8("value", int(tagSize), optionalMap(a.StringEnums))
var sm []scalar.StrMapper
if a.StringEnums != nil {
sm = append(sm, a.StringEnums)
}
v := d.FieldUTF8("value", int(tagSize), sm...)
if dc.currentTrack != nil && tagID == ebml_matroska.CodecIDID {
dc.currentTrack.codec = v
}
@ -450,7 +453,7 @@ func matroskaDecode(d *decode.D, _ any) any {
case "A_FLAC":
t.parentD.RangeFn(t.codecPrivatePos, t.codecPrivateTagSize, func(d *decode.D) {
d.FieldStruct("value", func(d *decode.D) {
d.FieldUTF8("magic", 4, d.AssertStr("fLaC"))
d.FieldUTF8("magic", 4, d.StrAssert("fLaC"))
dv, v := d.FieldFormat("metadatablocks", flacMetadatablocksFormat, nil)
flacMetadatablockOut, ok := v.(format.FlacMetadatablocksOut)
if dv != nil && !ok {
@ -489,7 +492,7 @@ func matroskaDecode(d *decode.D, _ any) any {
for _, b := range dc.blocks {
b.d.RangeFn(b.r.Start, b.r.Len, func(d *decode.D) {
var lacing uint64
trackNumber := d.FieldUFn("track_number", decodeVint)
trackNumber := d.FieldUintFn("track_number", decodeVint)
d.FieldU16("timestamp")
if b.simple {
d.FieldStruct("flags", func(d *decode.D) {

View File

@ -46,7 +46,7 @@ const (
MPEGObjectTypeNONE = 0
)
var MpegObjectTypeNames = scalar.UToSymStr{
var MpegObjectTypeNames = scalar.UintMapSymStr{
MPEGObjectTypeMOV_TEXT: "MPEGObjectTypeMOV_TEXT",
MPEGObjectTypeMPEG4: "MPEGObjectTypeMPEG4",
MPEGObjectTypeH264: "MPEGObjectTypeH264",
@ -142,7 +142,7 @@ const (
MPEGAudioObjectTypePS = 29
)
var MPEGAudioObjectTypeNames = scalar.UToScalar{
var MPEGAudioObjectTypeNames = scalar.UintMap{
0: {Sym: "mull", Description: "Null"},
MPEGAudioObjectTypeMain: {Sym: "aac_main", Description: "AAC Main"},
MPEGAudioObjectTypeLC: {Sym: "aac_lc", Description: "AAC Low Complexity)"},
@ -194,7 +194,7 @@ var MPEGAudioObjectTypeNames = scalar.UToScalar{
// based on ffmpeg/libavutil/pixfmt.h
//
//nolint:revive
var ISO_23091_2_ColourPrimariesMap = scalar.UToScalar{
var ISO_23091_2_ColourPrimariesMap = scalar.UintMap{
1: {Sym: "bt709", Description: "ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B"},
2: {Sym: "unspecified", Description: "Unspecified"},
3: {Sym: "reserved", Description: "Reserved"},
@ -211,7 +211,7 @@ var ISO_23091_2_ColourPrimariesMap = scalar.UToScalar{
}
//nolint:revive
var ISO_23091_2_TransferCharacteristicMap = scalar.UToScalar{
var ISO_23091_2_TransferCharacteristicMap = scalar.UintMap{
1: {Sym: "bt709", Description: "ITU-R BT1361"},
2: {Sym: "unspecified", Description: "Unspecified"},
3: {Sym: "reserved", Description: "Reserved"},
@ -233,7 +233,7 @@ var ISO_23091_2_TransferCharacteristicMap = scalar.UToScalar{
}
//nolint:revive
var ISO_23091_2_MatrixCoefficients = scalar.UToScalar{
var ISO_23091_2_MatrixCoefficients = scalar.UintMap{
0: {Sym: "rgb", Description: "GBR, IEC 61966-2-1 (sRGB), YZX and ST 428-1"},
1: {Sym: "bt709", Description: "ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B"},
2: {Sym: "unspecified", Description: "Unspecified"},
@ -329,7 +329,7 @@ const (
WAVTagVORBIS = 0x566f
)
var WAVTagNames = scalar.UToSymStr{
var WAVTagNames = scalar.UintMapSymStr{
WAVTagPCM_S16LE: "pcm_s16le",
WAVTagADPCM_MS: "adpcm_ms",
WAVTagPCM_F32LE: "pcm_f32le",

View File

@ -19,14 +19,14 @@ func init() {
})
}
var headerDescription = scalar.StrToDescription{
var headerUintDescription = scalar.StrMapDescription{
"Xing": "XING variable bitrate",
"Info": "XING variable bitrate",
"VBRI": "Fraunhofer Encoder variable bitrate info",
}
func mp3FrameTagsDecode(d *decode.D, _ any) any {
switch d.FieldUTF8("header", 4, headerDescription) {
switch d.FieldUTF8("header", 4, headerUintDescription) {
case "Xing",
"Info":
qualityPresent := false
@ -80,12 +80,12 @@ func mp3FrameTagsDecode(d *decode.D, _ any) any {
d.FieldU16("version_id")
d.FieldU16("delay")
d.FieldU16("quality")
d.FieldU32("length", scalar.Description("Number of bytes"))
d.FieldU32("frames", scalar.Description("Number of frames"))
tocEntries := d.FieldU16("toc_entries", scalar.Description("Number of entries within TOC table"))
d.FieldU16("scale_factor", scalar.Description("Scale factor of TOC table entries"))
tocEntrySize := d.FieldU16("toc_entry_size", d.AssertU(1, 2, 3, 4), scalar.Description("Size per table entry"))
d.FieldU16("frame_per_entry", scalar.Description("Frames per table entry"))
d.FieldU32("length", scalar.UintDescription("Number of bytes"))
d.FieldU32("frames", scalar.UintDescription("Number of frames"))
tocEntries := d.FieldU16("toc_entries", scalar.UintDescription("Number of entries within TOC table"))
d.FieldU16("scale_factor", scalar.UintDescription("Scale factor of TOC table entries"))
tocEntrySize := d.FieldU16("toc_entry_size", d.UintAssert(1, 2, 3, 4), scalar.UintDescription("Size per table entry"))
d.FieldU16("frame_per_entry", scalar.UintDescription("Frames per table entry"))
d.FieldArray("toc", func(d *decode.D) {
for i := 0; i < int(tocEntries); i++ {
d.FieldU("entry", int(tocEntrySize)*8)

View File

@ -22,16 +22,16 @@ const (
boxSizeUse64bitSize = 1
)
var boxSizeNames = scalar.UToDescription{
var boxSizeNames = scalar.UintMapDescription{
boxSizeRestOfFile: "Rest of file",
boxSizeUse64bitSize: "Use 64 bit size",
}
var mediaTimeNames = scalar.SToDescription{
var mediaTimeNames = scalar.SintMapDescription{
-1: "empty",
}
var subTypeNames = scalar.StrToDescription{
var subTypeNames = scalar.StrMapDescription{
"alis": "Alias Data",
"camm": "Camera Metadata",
"crsm": "Clock Reference",
@ -59,7 +59,7 @@ var subTypeNames = scalar.StrToDescription{
"vide": "Video Track",
}
var dataFormatNames = scalar.StrToDescription{
var dataFormatNames = scalar.StrMapDescription{
// additional codecs
"apch": "Apple ProRes 422 High Quality",
"apcn": "Apple ProRes 422 Standard Definition",
@ -185,15 +185,15 @@ var (
uuidIpodBytes = [16]byte{0x6b, 0x68, 0x40, 0xf2, 0x5f, 0x24, 0x4f, 0xc5, 0xba, 0x39, 0xa5, 0x1b, 0xcf, 0x03, 0x23, 0xf3}
)
var uuidNames = scalar.BytesToScalar{
{Bytes: uuidIsmlManifestBytes[:], Scalar: scalar.S{Sym: "isml_manifest"}},
{Bytes: uuidXmpBytes[:], Scalar: scalar.S{Sym: "xmp"}},
{Bytes: uuidSphericalBytes[:], Scalar: scalar.S{Sym: "spherical"}},
{Bytes: uuidPspUsmtBytes[:], Scalar: scalar.S{Sym: "psp_usmt"}},
{Bytes: uuidTfxdBytes[:], Scalar: scalar.S{Sym: "tfxd"}},
{Bytes: uuidTfrfBytes[:], Scalar: scalar.S{Sym: "tfrf"}},
{Bytes: uuidProfBytes[:], Scalar: scalar.S{Sym: "prof"}},
{Bytes: uuidIpodBytes[:], Scalar: scalar.S{Sym: "ipod"}},
var uuidNames = scalar.RawBytesMap{
{Bytes: uuidIsmlManifestBytes[:], Scalar: scalar.BitBuf{Sym: "isml_manifest"}},
{Bytes: uuidXmpBytes[:], Scalar: scalar.BitBuf{Sym: "xmp"}},
{Bytes: uuidSphericalBytes[:], Scalar: scalar.BitBuf{Sym: "spherical"}},
{Bytes: uuidPspUsmtBytes[:], Scalar: scalar.BitBuf{Sym: "psp_usmt"}},
{Bytes: uuidTfxdBytes[:], Scalar: scalar.BitBuf{Sym: "tfxd"}},
{Bytes: uuidTfrfBytes[:], Scalar: scalar.BitBuf{Sym: "tfrf"}},
{Bytes: uuidProfBytes[:], Scalar: scalar.BitBuf{Sym: "prof"}},
{Bytes: uuidIpodBytes[:], Scalar: scalar.BitBuf{Sym: "ipod"}},
}
// ISO 639-2/T language code 3 * 5bit packed uint + 1 zero bit
@ -208,7 +208,8 @@ func decodeLang(d *decode.D) string {
// Quicktime time seconds in January 1, 1904 UTC
var quicktimeEpochDate = time.Date(1904, time.January, 4, 0, 0, 0, 0, time.UTC)
var quicktimeEpoch = scalar.DescriptionTimeFn(scalar.S.TryActualU, quicktimeEpochDate, time.RFC3339)
var uintActualQuicktimeEpoch = scalar.UintActualDate(quicktimeEpochDate, time.RFC3339)
func decodeMvhdFieldMatrix(d *decode.D, name string) {
d.FieldStruct(name, func(d *decode.D) {
@ -228,29 +229,29 @@ func decodeMvhdFieldMatrix(d *decode.D, name string) {
func decodeSampleFlags(d *decode.D) {
d.FieldU4("reserved0")
d.FieldU2("is_leading")
d.FieldU2("sample_depends_on", scalar.UToScalar{
0: scalar.S{Sym: "unknown"},
1: scalar.S{Sym: "other", Description: "Not I-picture"},
2: scalar.S{Sym: "none", Description: "Is I-picture"},
d.FieldU2("sample_depends_on", scalar.UintMap{
0: scalar.Uint{Sym: "unknown"},
1: scalar.Uint{Sym: "other", Description: "Not I-picture"},
2: scalar.Uint{Sym: "none", Description: "Is I-picture"},
})
d.FieldU2("sample_is_depended_on", scalar.UToScalar{
0: scalar.S{Sym: "unknown"},
1: scalar.S{Sym: "other", Description: "Not disposable"},
2: scalar.S{Sym: "none", Description: "Is disposable"},
d.FieldU2("sample_is_depended_on", scalar.UintMap{
0: scalar.Uint{Sym: "unknown"},
1: scalar.Uint{Sym: "other", Description: "Not disposable"},
2: scalar.Uint{Sym: "none", Description: "Is disposable"},
})
d.FieldU2("sample_has_redundancy", scalar.UToScalar{
0: scalar.S{Sym: "unknown"},
2: scalar.S{Sym: "none", Description: "No redundant coding"},
d.FieldU2("sample_has_redundancy", scalar.UintMap{
0: scalar.Uint{Sym: "unknown"},
2: scalar.Uint{Sym: "none", Description: "No redundant coding"},
})
d.FieldU3("sample_padding_value")
d.FieldU1("sample_is_non_sync_sample")
d.FieldU16("sample_degradation_priority")
}
func decodeBoxWithParentData(ctx *decodeContext, d *decode.D, parentData any, extraTypeMappers ...scalar.Mapper) {
func decodeBoxWithParentData(ctx *decodeContext, d *decode.D, parentData any, extraTypeMappers ...scalar.StrMapper) {
var typ string
var dataSize uint64
typeMappers := []scalar.Mapper{boxDescriptions}
typeMappers := []scalar.StrMapper{boxDescriptions}
if len(extraTypeMappers) > 0 {
typeMappers = append(typeMappers, extraTypeMappers...)
}
@ -283,11 +284,11 @@ func decodeBoxWithParentData(ctx *decodeContext, d *decode.D, parentData any, ex
ctx.path = ctx.path[0 : len(ctx.path)-1]
}
func decodeBoxes(ctx *decodeContext, d *decode.D, extraTypeMappers ...scalar.Mapper) {
func decodeBoxes(ctx *decodeContext, d *decode.D, extraTypeMappers ...scalar.StrMapper) {
decodeBoxesWithParentData(ctx, d, nil, extraTypeMappers...)
}
func decodeBoxesWithParentData(ctx *decodeContext, d *decode.D, parentData any, extraTypeMappers ...scalar.Mapper) {
func decodeBoxesWithParentData(ctx *decodeContext, d *decode.D, parentData any, extraTypeMappers ...scalar.StrMapper) {
d.FieldStructArrayLoop("boxes", "box",
func() bool { return d.BitsLeft() >= 8*8 },
func(d *decode.D) {
@ -381,13 +382,13 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
d.FieldU24("flags")
switch version {
case 0:
d.FieldU32("creation_time", quicktimeEpoch)
d.FieldU32("modification_time", quicktimeEpoch)
d.FieldU32("creation_time", uintActualQuicktimeEpoch)
d.FieldU32("modification_time", uintActualQuicktimeEpoch)
d.FieldU32("time_scale")
d.FieldU32("duration")
case 1:
d.FieldU64("creation_time", quicktimeEpoch)
d.FieldU64("modification_time", quicktimeEpoch)
d.FieldU64("creation_time", uintActualQuicktimeEpoch)
d.FieldU64("modification_time", uintActualQuicktimeEpoch)
d.FieldU32("time_scale")
d.FieldU64("duration")
default:
@ -435,14 +436,14 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
d.FieldU24("flags")
switch version {
case 0:
d.FieldU32("creation_time", quicktimeEpoch)
d.FieldU32("modification_time", quicktimeEpoch)
d.FieldU32("creation_time", uintActualQuicktimeEpoch)
d.FieldU32("modification_time", uintActualQuicktimeEpoch)
trackID = int(d.FieldU32("track_id"))
d.FieldU32("reserved1")
d.FieldU32("duration")
case 1:
d.FieldU64("creation_time", quicktimeEpoch)
d.FieldU64("modification_time", quicktimeEpoch)
d.FieldU64("creation_time", uintActualQuicktimeEpoch)
d.FieldU64("modification_time", uintActualQuicktimeEpoch)
trackID = int(d.FieldU32("track_id"))
d.FieldU32("reserved1")
d.FieldU64("duration")
@ -470,13 +471,13 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
// TODO: timestamps
switch version {
case 0:
d.FieldU32("creation_time", quicktimeEpoch)
d.FieldU32("modification_time", quicktimeEpoch)
d.FieldU32("creation_time", uintActualQuicktimeEpoch)
d.FieldU32("modification_time", uintActualQuicktimeEpoch)
d.FieldU32("time_scale")
d.FieldU32("duration")
case 1:
d.FieldU64("creation_time", quicktimeEpoch)
d.FieldU64("modification_time", quicktimeEpoch)
d.FieldU64("creation_time", uintActualQuicktimeEpoch)
d.FieldU64("modification_time", uintActualQuicktimeEpoch)
d.FieldU32("time_scale")
d.FieldU64("duration")
default:
@ -827,7 +828,7 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
for d.NotEnd() {
d.FieldStruct("entry", func(d *decode.D) {
d.FieldU2("reserved")
values := scalar.UToSymStr{
values := scalar.UintMapSymStr{
0: "unknown",
1: "yes",
2: "no",
@ -900,7 +901,7 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
if mb := ctx.currentMetaBox(); mb != nil && mb.keys != nil && len(mb.keys.keys) > 0 {
// meta box had a keys box
var b [4]byte
typeSymMapper := scalar.StrToSymStr{}
typeSymMapper := scalar.StrMapSymStr{}
for k, v := range mb.keys.keys {
// type will be a uint32 be integer
// +1 as they seem to be counted from 1
@ -1206,11 +1207,11 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
systemIDPlayReady = [16]byte{0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86, 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95}
systemIDFairPlay = [16]byte{0x94, 0xce, 0x86, 0xfb, 0x07, 0xff, 0x4f, 0x43, 0xad, 0xb8, 0x93, 0xd2, 0xfa, 0x96, 0x8c, 0xa2}
)
systemIDNames := scalar.BytesToScalar{
{Bytes: systemIDCommon[:], Scalar: scalar.S{Sym: "common"}},
{Bytes: systemIDWidevine[:], Scalar: scalar.S{Sym: "widevine"}},
{Bytes: systemIDPlayReady[:], Scalar: scalar.S{Sym: "playready"}},
{Bytes: systemIDFairPlay[:], Scalar: scalar.S{Sym: "fairplay"}},
systemIDNames := scalar.RawBytesMap{
{Bytes: systemIDCommon[:], Scalar: scalar.BitBuf{Sym: "common"}},
{Bytes: systemIDWidevine[:], Scalar: scalar.BitBuf{Sym: "widevine"}},
{Bytes: systemIDPlayReady[:], Scalar: scalar.BitBuf{Sym: "playready"}},
{Bytes: systemIDFairPlay[:], Scalar: scalar.BitBuf{Sym: "fairplay"}},
}
version := d.FieldU8("version")
@ -1495,7 +1496,7 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
// for i := uint64(0); i < nPresentation; i++ {
// d.FieldStruct("presentation", func(d *decode.D) {
// d.FieldU8("presentation_version")
// presBytes := d.FieldUFn("pres_bytes", func() (uint64, decode.DisplayFormat, string) {
// presBytes := d.FieldUintFn("pres_bytes", func() (uint64, decode.DisplayFormat, string) {
// n := d.U8()
// if n == 0x0ff {
// n += d.U16()
@ -1611,7 +1612,7 @@ func decodeBox(ctx *decodeContext, d *decode.D, typ string) {
case "cdsc":
decodeBoxIrefEntry(ctx, d)
case "irot":
d.FieldU8("rotation", scalar.UToSymU{
d.FieldU8("rotation", scalar.UintMapSymUint{
0: 0,
1: 90,
2: 180,

View File

@ -5,7 +5,7 @@ import "github.com/wader/fq/pkg/scalar"
// from:
// https://cconcolato.github.io/mp4ra/filetype.html
// https://exiftool.org/TagNames/QuickTime.html
var brandDescriptions = scalar.StrToDescription{
var brandDescriptions = scalar.StrMapDescription{
"3g2a": "3GPP2",
"3g2b": "3GPP2 Media (.3G2) compliant with 3GPP2 C.S0050-A V1.0.0",
"3g2c": "3GPP2 Media (.3G2) compliant with 3GPP2 C.S0050-B v1.0",

View File

@ -4,7 +4,7 @@ import "github.com/wader/fq/pkg/scalar"
// based on https://github.com/HexFiend/HexFiend/blob/master/templates/Media/MOV.tcl
var boxDescriptions = scalar.StrToDescription{
var boxDescriptions = scalar.StrMapDescription{
"dimg": "Derived image",
"cdsc": "Content description",
"ainf": "Asset information to identify, license and play",

View File

@ -270,7 +270,7 @@ func mp4Tracks(d *decode.D, ctx *decodeContext) {
}
d.FieldStruct("track", func(d *decode.D) {
d.FieldValueU("id", uint64(t.id))
d.FieldValueUint("id", uint64(t.id))
trackSDDataFormat := "unknown"
if len(t.sampleDescriptions) > 0 {

View File

@ -20,7 +20,7 @@ const (
recordTypeLicenseStore = 2
)
var recordTypeNames = scalar.UToSymStr{
var recordTypeNames = scalar.UintMapSymStr{
recordTypeRightsManagementHeader: "Rights management header",
recordTypeLicenseStore: "License store",
}

View File

@ -37,7 +37,7 @@ const (
TERM = 0b111
)
var syntaxElementNames = scalar.UToSymStr{
var syntaxElementNames = scalar.UintMapSymStr{
SCE: "SCE",
CPE: "CPE",
CCE: "CCE",
@ -58,7 +58,7 @@ const (
EXT_SBR_DATA_CRC = 0xe
)
var extensionPayloadIDNames = scalar.UToSymStr{
var extensionPayloadIDNames = scalar.UintMapSymStr{
EXT_FILL: "EXT_FILL",
EXT_FILL_DATA: "EXT_FILL_DATA",
EXT_DATA_ELEMENT: "EXT_DATA_ELEMENT",
@ -75,7 +75,7 @@ const (
LONG_STOP_SEQUENCE = 0x3
)
var windowSequenceNames = scalar.UToSymStr{
var windowSequenceNames = scalar.UintMapSymStr{
ONLY_LONG_SEQUENCE: "ONLY_LONG_SEQUENCE",
LONG_START_SEQUENCE: "LONG_START_SEQUENCE",
EIGHT_SHORT_SEQUENCE: "EIGHT_SHORT_SEQUENCE",
@ -254,7 +254,7 @@ func aacFillElement(d *decode.D) {
cnt += escCount - 1
}
})
d.FieldValueU("payload_length", cnt)
d.FieldValueUint("payload_length", cnt)
d.FieldStruct("extension_payload", func(d *decode.D) {
d.FramedFn(int64(cnt)*8, func(d *decode.D) {

View File

@ -24,7 +24,7 @@ func init() {
})
}
var protectionAbsentNames = scalar.BoolToDescription{
var protectionAbsentNames = scalar.BoolMapDescription{
true: "No CRC",
false: "Has CRC",
}
@ -65,13 +65,13 @@ func adtsFrameDecoder(d *decode.D, _ any) any {
// P 2 Number of AAC frames (RDBs) in ADTS frame minus 1, for maximum compatibility always use 1 AAC frame per ADTS frame
// Q 16 CRC if protection absent is 0
d.FieldU12("syncword", d.AssertU(0b1111_1111_1111), scalar.ActualBin)
d.FieldU1("mpeg_version", scalar.UToSymStr{0: "mpeg4", 1: "mpeg2_aac"})
d.FieldU2("layer", d.AssertU(0))
d.FieldU12("syncword", d.UintAssert(0b1111_1111_1111), scalar.UintBin)
d.FieldU1("mpeg_version", scalar.UintMapSymStr{0: "mpeg4", 1: "mpeg2_aac"})
d.FieldU2("layer", d.UintAssert(0))
protectionAbsent := d.FieldBool("protection_absent", protectionAbsentNames)
objectType := d.FieldU2("profile", scalar.ActualUAdd(1), format.MPEGAudioObjectTypeNames)
d.FieldUFn("sampling_frequency", decodeEscapeValueAbsFn(4, 24, 0), frequencyIndexHzMap)
objectType := d.FieldU2("profile", scalar.UintActualAdd(1), format.MPEGAudioObjectTypeNames)
d.FieldUintFn("sampling_frequency", decodeEscapeValueAbsFn(4, 24, 0), frequencyIndexHzMap)
d.FieldU1("private_bit")
d.FieldU3("channel_configuration", channelConfigurationNames)
d.FieldU1("originality")
@ -86,7 +86,7 @@ func adtsFrameDecoder(d *decode.D, _ any) any {
}
d.FieldU11("buffer_fullness")
numberOfRDBs := d.FieldU2("number_of_rdbs", scalar.ActualUAdd(1))
numberOfRDBs := d.FieldU2("number_of_rdbs", scalar.UintActualAdd(1))
if !protectionAbsent {
d.FieldU16("crc")
}

View File

@ -25,7 +25,7 @@ func init() {
})
}
var avcProfileNames = scalar.UToSymStr{
var avcProfileNames = scalar.UintMapSymStr{
// 66: "constrained_baseline_profile", // (CBP, 66 with constraint set 1)
66: "baseline_profile",
88: "extended_profile",
@ -48,7 +48,7 @@ var avcProfileNames = scalar.UToSymStr{
}
// TODO: 1b contraint flag 1?
var avcLevelNames = scalar.UToSymStr{
var avcLevelNames = scalar.UintMapSymStr{
10: "1",
//10: "1b"
11: "1.1",
@ -122,7 +122,7 @@ func avcDcrDecode(d *decode.D, _ any) any {
d.FieldU8("profile_compatibility")
d.FieldU8("level_indication", avcLevelNames)
d.FieldU6("reserved0")
lengthSize := d.FieldU2("length_size", scalar.ActualUAdd(1))
lengthSize := d.FieldU2("length_size", scalar.UintActualAdd(1))
d.FieldU3("reserved1")
numSeqParamSets := d.FieldU5("num_of_sequence_parameter_sets")
d.FieldArray("sequence_parameter_sets", func(d *decode.D) {

View File

@ -65,7 +65,7 @@ const (
avcNALCodedSliceExtension = 20
)
var avcNALNames = scalar.UToScalar{
var avcNALNames = scalar.UintMap{
1: {Sym: "slice", Description: "Coded slice of a non-IDR picture"},
2: {Sym: "dpa", Description: "Coded slice data partition A"},
3: {Sym: "dpb", Description: "Coded slice data partition B"},
@ -85,7 +85,7 @@ var avcNALNames = scalar.UToScalar{
20: {Sym: "exten_slice", Description: "Coded slice extension"},
}
var sliceNames = scalar.UToSymStr{
var sliceNames = scalar.UintMapSymStr{
0: "p",
1: "b",
2: "i",
@ -113,9 +113,9 @@ func avcNALUDecode(d *decode.D, _ any) any {
avcNALCodedSliceAuxWithoutPartition,
avcNALCodedSliceExtension:
d.FieldStruct("slice_header", func(d *decode.D) {
d.FieldUFn("first_mb_in_slice", uEV)
d.FieldUFn("slice_type", uEV, sliceNames)
d.FieldUFn("pic_parameter_set_id", uEV)
d.FieldUintFn("first_mb_in_slice", uEV)
d.FieldUintFn("slice_type", uEV, sliceNames)
d.FieldUintFn("pic_parameter_set_id", uEV)
// TODO: if ( separate_colour_plane_flag from SPS ) colour_plane_id; frame_num
})
case avcNALSupplementalEnhancementInformation:

View File

@ -21,26 +21,26 @@ func moreRBSPData(d *decode.D) bool {
}
func avcPPSDecode(d *decode.D, _ any) any {
d.FieldUFn("pic_parameter_set_id", uEV)
d.FieldUFn("seq_parameter_set_id", uEV)
d.FieldUintFn("pic_parameter_set_id", uEV)
d.FieldUintFn("seq_parameter_set_id", uEV)
d.FieldBool("entropy_coding_mode_flag")
d.FieldBool("bottom_field_pic_order_in_frame_present_flag")
numSliceGroups := d.FieldUFn("num_slice_groups", uEV, scalar.ActualUAdd(1))
numSliceGroups := d.FieldUintFn("num_slice_groups", uEV, scalar.UintActualAdd(1))
if numSliceGroups > 1 {
sliceGroupMapType := d.FieldUFn("slice_group_map_type", uEV)
sliceGroupMapType := d.FieldUintFn("slice_group_map_type", uEV)
switch sliceGroupMapType {
case 0:
d.FieldArray("slice_groups", func(d *decode.D) {
for i := uint64(0); i < numSliceGroups; i++ {
d.FieldUFn("slice_group", uEV)
d.FieldUintFn("slice_group", uEV)
}
})
case 2:
d.FieldArray("slice_groups", func(d *decode.D) {
for i := uint64(0); i < numSliceGroups; i++ {
d.FieldStruct("slice_group", func(d *decode.D) {
d.FieldUFn("top_left", uEV)
d.FieldUFn("bottom_right", uEV)
d.FieldUintFn("top_left", uEV)
d.FieldUintFn("bottom_right", uEV)
})
}
})
@ -49,12 +49,12 @@ func avcPPSDecode(d *decode.D, _ any) any {
for i := uint64(0); i < numSliceGroups; i++ {
d.FieldStruct("slice_group", func(d *decode.D) {
d.FieldBool("change_direction_flag")
d.FieldUFn("change_rate", uEV, scalar.ActualUAdd(1))
d.FieldUintFn("change_rate", uEV, scalar.UintActualAdd(1))
})
}
})
case 6:
picSizeInMapUnits := d.FieldUFn("pic_size_in_map_units", uEV, scalar.ActualUAdd(1))
picSizeInMapUnits := d.FieldUintFn("pic_size_in_map_units", uEV, scalar.UintActualAdd(1))
for i := uint64(0); i < picSizeInMapUnits; i++ {
d.FieldStruct("slice_group", func(d *decode.D) {
d.FieldBool("id")
@ -63,13 +63,13 @@ func avcPPSDecode(d *decode.D, _ any) any {
}
}
d.FieldUFn("num_ref_idx_l0_default_active", uEV, scalar.ActualUAdd(1))
d.FieldUFn("num_ref_idx_l1_default_active", uEV, scalar.ActualUAdd(1))
d.FieldUintFn("num_ref_idx_l0_default_active", uEV, scalar.UintActualAdd(1))
d.FieldUintFn("num_ref_idx_l1_default_active", uEV, scalar.UintActualAdd(1))
d.FieldBool("weighted_pred_flag")
d.FieldU2("weighted_bipred_idc")
d.FieldSFn("pic_init_qp", sEV, scalar.ActualSAdd(26))
d.FieldSFn("pic_init_qs", sEV, scalar.ActualSAdd(26))
d.FieldSFn("chroma_qp_index_offset", sEV)
d.FieldSintFn("pic_init_qp", sEV, scalar.SintActualAdd(26))
d.FieldSintFn("pic_init_qs", sEV, scalar.SintActualAdd(26))
d.FieldSintFn("chroma_qp_index_offset", sEV)
d.FieldBool("deblocking_filter_control_present_flag")
d.FieldBool("constrained_intra_pred_flag")
d.FieldBool("redundant_pic_cnt_present_flag")
@ -84,7 +84,7 @@ func avcPPSDecode(d *decode.D, _ any) any {
}
})
}
d.FieldSFn("second_chroma_qp_index_offset", sEV)
d.FieldSintFn("second_chroma_qp_index_offset", sEV)
} else {
d.FieldBool("rbsp_stop_one_bit")
}

View File

@ -19,7 +19,7 @@ const (
avcSEIUserDataUnregistered = 5
)
var seiNames = scalar.UToSymStr{
var seiNames = scalar.UintMapSymStr{
0: "buffering_period",
1: "pic_timing",
2: "pan_scan_rect",
@ -84,8 +84,8 @@ var (
x264Bytes = [16]byte{0xdc, 0x45, 0xe9, 0xbd, 0xe6, 0xd9, 0x48, 0xb7, 0x96, 0x2c, 0xd8, 0x20, 0xd9, 0x23, 0xee, 0xef}
)
var userDataUnregisteredNames = scalar.BytesToScalar{
{Bytes: x264Bytes[:], Scalar: scalar.S{Sym: "x264"}},
var userDataUnregisteredNames = scalar.RawBytesMap{
{Bytes: x264Bytes[:], Scalar: scalar.BitBuf{Sym: "x264"}},
}
// sum bytes until < 0xff
@ -102,8 +102,8 @@ func ffSum(d *decode.D) uint64 {
}
func avcSEIDecode(d *decode.D, _ any) any {
payloadType := d.FieldUFn("payload_type", func(d *decode.D) uint64 { return ffSum(d) }, seiNames)
payloadSize := d.FieldUFn("payload_size", func(d *decode.D) uint64 { return ffSum(d) })
payloadType := d.FieldUintFn("payload_type", func(d *decode.D) uint64 { return ffSum(d) }, seiNames)
payloadSize := d.FieldUintFn("payload_size", func(d *decode.D) uint64 { return ffSum(d) })
d.FramedFn(int64(payloadSize)*8, func(d *decode.D) {
switch payloadType {

View File

@ -15,7 +15,7 @@ func init() {
})
}
var avcVideoFormatMap = scalar.UToSymStr{
var avcVideoFormatMap = scalar.UintMapSymStr{
0: "component",
1: "pal",
2: "ntsc",
@ -26,7 +26,7 @@ var avcVideoFormatMap = scalar.UToSymStr{
7: "reserved",
}
var avcAspectRatioIdcMap = scalar.UToSymStr{
var avcAspectRatioIdcMap = scalar.UintMapSymStr{
0: "unspecified",
1: "1:1",
2: "12:11",
@ -46,7 +46,7 @@ var avcAspectRatioIdcMap = scalar.UToSymStr{
16: "2:1",
}
var chromaFormatMap = scalar.UToSymStr{
var chromaFormatMap = scalar.UintMapSymStr{
0: "monochrome",
1: "4:2:0",
2: "4:2:2",
@ -80,8 +80,8 @@ func avcVuiParameters(d *decode.D) {
}
chromaLocInfoPresentFlag := d.FieldBool("chroma_loc_info_present_flag")
if chromaLocInfoPresentFlag {
d.FieldUFn("chroma_sample_loc_type_top_field", uEV)
d.FieldUFn("chroma_sample_loc_type_bottom_field", uEV)
d.FieldUintFn("chroma_sample_loc_type_top_field", uEV)
d.FieldUintFn("chroma_sample_loc_type_bottom_field", uEV)
}
timingInfoPresentFlag := d.FieldBool("timing_info_present_flag")
@ -106,31 +106,31 @@ func avcVuiParameters(d *decode.D) {
bitstreamRestrictionFlag := d.FieldBool("bitstream_restriction_flag")
if bitstreamRestrictionFlag {
d.FieldBool("motion_vectors_over_pic_boundaries_flag")
d.FieldUFn("max_bytes_per_pic_denom", uEV)
d.FieldUFn("max_bits_per_mb_denom", uEV)
d.FieldUFn("log2_max_mv_length_horizontal", uEV)
d.FieldUFn("log2_max_mv_length_vertical", uEV)
d.FieldUFn("max_num_reorder_frames", uEV)
d.FieldUFn("max_dec_frame_buffering", uEV)
d.FieldUintFn("max_bytes_per_pic_denom", uEV)
d.FieldUintFn("max_bits_per_mb_denom", uEV)
d.FieldUintFn("log2_max_mv_length_horizontal", uEV)
d.FieldUintFn("log2_max_mv_length_vertical", uEV)
d.FieldUintFn("max_num_reorder_frames", uEV)
d.FieldUintFn("max_dec_frame_buffering", uEV)
}
}
func avcHdrParameters(d *decode.D) {
cpbCnt := d.FieldUFn("cpb_cnt", uEV, scalar.ActualUAdd(1))
cpbCnt := d.FieldUintFn("cpb_cnt", uEV, scalar.UintActualAdd(1))
d.FieldU4("bit_rate_scale")
d.FieldU4("cpb_size_scale")
d.FieldArray("sched_sels", func(d *decode.D) {
for i := uint64(0); i < cpbCnt; i++ {
d.FieldStruct("sched_sel", func(d *decode.D) {
d.FieldUFn("bit_rate_value", uEV, scalar.ActualUAdd(1))
d.FieldUFn("cpb_size_value", uEV, scalar.ActualUAdd(1))
d.FieldUintFn("bit_rate_value", uEV, scalar.UintActualAdd(1))
d.FieldUintFn("cpb_size_value", uEV, scalar.UintActualAdd(1))
d.FieldBool("cbr_flag")
})
}
})
d.FieldU5("initial_cpb_removal_delay_length", scalar.ActualUAdd(1))
d.FieldU5("cpb_removal_delay_length", scalar.ActualUAdd(1))
d.FieldU5("dpb_output_delay_length", scalar.ActualUAdd(1))
d.FieldU5("initial_cpb_removal_delay_length", scalar.UintActualAdd(1))
d.FieldU5("cpb_removal_delay_length", scalar.UintActualAdd(1))
d.FieldU5("dpb_output_delay_length", scalar.UintActualAdd(1))
d.FieldU5("time_offset_length")
}
@ -144,18 +144,18 @@ func avcSPSDecode(d *decode.D, _ any) any {
d.FieldBool("constraint_set5_flag")
d.FieldU2("reserved_zero_2bits")
d.FieldU8("level_idc", avcLevelNames)
d.FieldUFn("seq_parameter_set_id", uEV)
d.FieldUintFn("seq_parameter_set_id", uEV)
switch profileIdc {
// TODO: ffmpeg has some more (legacy values?)
case 100, 110, 122, 244, 44, 83, 86, 118, 128, 138, 139, 134, 135:
chromaFormatIdc := d.FieldUFn("chroma_format_idc", uEV, chromaFormatMap)
chromaFormatIdc := d.FieldUintFn("chroma_format_idc", uEV, chromaFormatMap)
if chromaFormatIdc == 3 {
d.FieldBool("separate_colour_plane_flag")
}
d.FieldUFn("bit_depth_luma", uEV, scalar.ActualUAdd(8))
d.FieldUFn("bit_depth_chroma", uEV, scalar.ActualUAdd(8))
d.FieldUintFn("bit_depth_luma", uEV, scalar.UintActualAdd(8))
d.FieldUintFn("bit_depth_chroma", uEV, scalar.UintActualAdd(8))
d.FieldBool("qpprime_y_zero_transform_bypass_flag")
seqScalingMatrixPresentFlag := d.FieldBool("seq_scaling_matrix_present_flag")
if seqScalingMatrixPresentFlag {
@ -164,16 +164,16 @@ func avcSPSDecode(d *decode.D, _ any) any {
}
}
d.FieldUFn("log2_max_frame_num", uEV, scalar.ActualUAdd(4))
d.FieldUintFn("log2_max_frame_num", uEV, scalar.UintActualAdd(4))
picOrderCntType := d.FieldUFn("pic_order_cnt_type", uEV)
picOrderCntType := d.FieldUintFn("pic_order_cnt_type", uEV)
if picOrderCntType == 0 {
d.FieldUFn("log2_max_pic_order_cnt_lsb", uEV, scalar.ActualUAdd(4))
d.FieldUintFn("log2_max_pic_order_cnt_lsb", uEV, scalar.UintActualAdd(4))
} else if picOrderCntType == 1 {
d.FieldBool("delta_pic_order_always_zero_flag")
d.FieldSFn("offset_for_non_ref_pic", sEV)
d.FieldSFn("offset_for_top_to_bottom_field", sEV)
numRefFramesInPicOrderCntCycle := d.FieldUFn("num_ref_frames_in_pic_order_cnt_cycle", uEV)
d.FieldSintFn("offset_for_non_ref_pic", sEV)
d.FieldSintFn("offset_for_top_to_bottom_field", sEV)
numRefFramesInPicOrderCntCycle := d.FieldUintFn("num_ref_frames_in_pic_order_cnt_cycle", uEV)
d.FieldArray("offset_for_ref_frames", func(d *decode.D) {
for i := uint64(0); i < numRefFramesInPicOrderCntCycle; i++ {
sEV(d)
@ -181,10 +181,10 @@ func avcSPSDecode(d *decode.D, _ any) any {
})
}
d.FieldUFn("max_num_ref_frames", uEV)
d.FieldUintFn("max_num_ref_frames", uEV)
d.FieldBool("gaps_in_frame_num_value_allowed_flag")
d.FieldUFn("pic_width_in_mbs", uEV, scalar.ActualUAdd(1))
d.FieldUFn("pic_height_in_map_units", uEV, scalar.ActualUAdd(1))
d.FieldUintFn("pic_width_in_mbs", uEV, scalar.UintActualAdd(1))
d.FieldUintFn("pic_height_in_map_units", uEV, scalar.UintActualAdd(1))
frameMbsOnlyFlag := d.FieldBool("frame_mbs_only_flag")
if !frameMbsOnlyFlag {
d.FieldBool("mb_adaptive_frame_field_flag")
@ -192,10 +192,10 @@ func avcSPSDecode(d *decode.D, _ any) any {
d.FieldBool("direct_8x8_inference_flag")
frameCroppingFlag := d.FieldBool("frame_cropping_flag")
if frameCroppingFlag {
d.FieldUFn("frame_crop_left_offset", uEV)
d.FieldUFn("frame_crop_right_offset", uEV)
d.FieldUFn("frame_crop_top_offset", uEV)
d.FieldUFn("frame_crop_bottom_offset", uEV)
d.FieldUintFn("frame_crop_left_offset", uEV)
d.FieldUintFn("frame_crop_right_offset", uEV)
d.FieldUintFn("frame_crop_top_offset", uEV)
d.FieldUintFn("frame_crop_bottom_offset", uEV)
}
vuiParametersPresentFlag := d.FieldBool("vui_parameters_present_flag")
if vuiParametersPresentFlag {

View File

@ -35,14 +35,14 @@ func hevcDcrDecode(d *decode.D, _ any) any {
d.FieldU6("reserved2")
d.FieldU2("chroma_format_idc")
d.FieldU5("reserved3")
d.FieldU3("bit_depth_luma", scalar.ActualUAdd(8))
d.FieldU3("bit_depth_luma", scalar.UintActualAdd(8))
d.FieldU5("reserved4")
d.FieldU3("bit_depth_chroma", scalar.ActualUAdd(8))
d.FieldU3("bit_depth_chroma", scalar.UintActualAdd(8))
d.FieldU16("avg_frame_rate")
d.FieldU2("constant_frame_rate")
d.FieldU3("num_temporal_layers")
d.FieldU1("temporal_id_nested")
lengthSize := d.FieldU2("length_size", scalar.ActualUAdd(1))
lengthSize := d.FieldU2("length_size", scalar.UintActualAdd(1))
numArrays := d.FieldU8("num_of_arrays")
d.FieldArray("arrays", func(d *decode.D) {
for i := uint64(0); i < numArrays; i++ {

View File

@ -31,7 +31,7 @@ const (
hevcNALNUTPPS = 34
)
var hevcNALNames = scalar.UToSymStr{
var hevcNALNames = scalar.UintMapSymStr{
0: "TRAIL_N",
1: "TRAIL_R",
2: "TSA_N",

View File

@ -18,24 +18,24 @@ func init() {
// H.265 page 36
func hevcPPSDecode(d *decode.D, _ any) any {
d.FieldUFn("pps_pic_parameter_set_id", uEV)
d.FieldUFn("pps_seq_parameter_set_id", uEV)
d.FieldUintFn("pps_pic_parameter_set_id", uEV)
d.FieldUintFn("pps_seq_parameter_set_id", uEV)
d.FieldBool("dependent_slice_segments_enabled_flag")
d.FieldBool("output_flag_present_flag")
d.FieldU3("num_extra_slice_header_bits")
d.FieldBool("sign_data_hiding_enabled_flag")
d.FieldBool("cabac_init_present_flag")
d.FieldUFn("num_ref_idx_l0_default_active_minus1", uEV)
d.FieldUFn("num_ref_idx_l1_default_active_minus1", uEV)
d.FieldSFn("init_qp_minus26", sEV)
d.FieldUintFn("num_ref_idx_l0_default_active_minus1", uEV)
d.FieldUintFn("num_ref_idx_l1_default_active_minus1", uEV)
d.FieldSintFn("init_qp_minus26", sEV)
d.FieldBool("constrained_intra_pred_flag")
d.FieldBool("transform_skip_enabled_flag")
cuQpDeltaEnabledFlag := d.FieldBool("cu_qp_delta_enabled_flag")
if cuQpDeltaEnabledFlag {
d.FieldUFn("diff_cu_qp_delta_depth", uEV)
d.FieldUintFn("diff_cu_qp_delta_depth", uEV)
}
d.FieldSFn("pps_cb_qp_offset", sEV)
d.FieldSFn("pps_cr_qp_offset", sEV)
d.FieldSintFn("pps_cb_qp_offset", sEV)
d.FieldSintFn("pps_cr_qp_offset", sEV)
d.FieldBool("pps_slice_chroma_qp_offsets_present_flag")
d.FieldBool("weighted_pred_flag")
d.FieldBool("weighted_bipred_flag")
@ -43,18 +43,18 @@ func hevcPPSDecode(d *decode.D, _ any) any {
tilesEnabledFlag := d.FieldBool("tiles_enabled_flag")
d.FieldBool("entropy_coding_sync_enabled_flag")
if tilesEnabledFlag {
numTileColumnsMinus1 := d.FieldUFn("num_tile_columns_minus1", uEV)
numTileRowsMinus1 := d.FieldUFn("num_tile_rows_minus1", uEV)
numTileColumnsMinus1 := d.FieldUintFn("num_tile_columns_minus1", uEV)
numTileRowsMinus1 := d.FieldUintFn("num_tile_rows_minus1", uEV)
uniformSpacingFlag := d.FieldBool("uniform_spacing_flag")
if !uniformSpacingFlag {
d.FieldArray("column_widths", func(d *decode.D) {
for i := uint64(0); i < numTileColumnsMinus1; i++ {
d.FieldUFn("column_width", uEV)
d.FieldUintFn("column_width", uEV)
}
})
d.FieldArray("row_heights", func(d *decode.D) {
for i := uint64(0); i < numTileRowsMinus1; i++ {
d.FieldUFn("row_height", uEV)
d.FieldUintFn("row_height", uEV)
}
})
}
@ -66,8 +66,8 @@ func hevcPPSDecode(d *decode.D, _ any) any {
d.FieldBool("deblocking_filter_override_enabled_flag")
ppsDeblockingFilterDisabledFlag := d.FieldBool("pps_deblocking_filter_disabled_flag")
if !ppsDeblockingFilterDisabledFlag {
d.FieldSFn("pps_beta_offset_div2", sEV)
d.FieldSFn("pps_tc_offset_div2", sEV)
d.FieldSintFn("pps_beta_offset_div2", sEV)
d.FieldSintFn("pps_tc_offset_div2", sEV)
}
}
ppsScalingListDataPresentFlag := d.FieldBool("pps_scaling_list_data_present_flag")
@ -76,7 +76,7 @@ func hevcPPSDecode(d *decode.D, _ any) any {
return nil
}
d.FieldBool("lists_modification_present_flag")
d.FieldUFn("log2_parallel_merge_level_minus2", uEV)
d.FieldUintFn("log2_parallel_merge_level_minus2", uEV)
d.FieldBool("slice_segment_header_extension_present_flag")
ppsExtensionPresentFlag := d.FieldBool("pps_extension_present_flag")
if ppsExtensionPresentFlag {

View File

@ -106,11 +106,11 @@ func profileTierLevelDecode(d *decode.D, profilePresentFlag bool, maxNumSubLayer
func hevcSubLayerHrdParameters(d *decode.D, subPicHrdParamsPresentFlag bool, cpbCntMinus1 int) {
for i := 0; i <= cpbCntMinus1; i++ {
d.FieldStruct("parameters", func(d *decode.D) {
d.FieldUFn("bit_rate_value_minus1", uEV)
d.FieldUFn("cpb_size_value_minus1", uEV)
d.FieldUintFn("bit_rate_value_minus1", uEV)
d.FieldUintFn("cpb_size_value_minus1", uEV)
if subPicHrdParamsPresentFlag {
d.FieldUFn("cpb_size_du_value_minus1", uEV)
d.FieldUFn("bit_rate_du_value_minus1", uEV)
d.FieldUintFn("cpb_size_du_value_minus1", uEV)
d.FieldUintFn("bit_rate_du_value_minus1", uEV)
}
d.FieldBool("cbr_flag")
})
@ -152,13 +152,13 @@ func hevcHrdParameters(d *decode.D, commonInfPresentFlag bool, maxNumSubLayersMi
}
var lowDelayHrdFlag bool
if fixedPicRateWithinCvsFlag {
d.FieldUFn("elemental_duration_in_tc_minus1", uEV)
d.FieldUintFn("elemental_duration_in_tc_minus1", uEV)
} else {
lowDelayHrdFlag = d.FieldBool("low_delay_hrd_flag")
}
var cpbCntMinus1 int
if !lowDelayHrdFlag {
cpbCntMinus1 = int(d.FieldUFn("cpb_cnt_minus1", uEV))
cpbCntMinus1 = int(d.FieldUintFn("cpb_cnt_minus1", uEV))
}
if nalHrdParametersPresentFlag {
hevcSubLayerHrdParameters(d, subPicHrdParamsPresentFlag, cpbCntMinus1)
@ -198,8 +198,8 @@ func hevcVuiParameters(d *decode.D, spsMaxSubLayersMinus1 uint64) {
}
chromaLocInfoPresentFlag := d.FieldBool("chroma_loc_info_present_flag")
if chromaLocInfoPresentFlag {
d.FieldUFn("chroma_sample_loc_type_top_field", uEV)
d.FieldUFn("chroma_sample_loc_type_bottom_field", uEV)
d.FieldUintFn("chroma_sample_loc_type_top_field", uEV)
d.FieldUintFn("chroma_sample_loc_type_bottom_field", uEV)
}
d.FieldBool("neutral_chroma_indication_flag")
@ -207,10 +207,10 @@ func hevcVuiParameters(d *decode.D, spsMaxSubLayersMinus1 uint64) {
d.FieldBool("frame_field_info_present_flag")
defaultDisplayWindowFlag := d.FieldBool("default_display_window_flag")
if defaultDisplayWindowFlag {
d.FieldUFn("def_disp_win_left_offset", uEV)
d.FieldUFn("def_disp_win_right_offset", uEV)
d.FieldUFn("def_disp_win_top_offset", uEV)
d.FieldUFn("def_disp_win_bottom_offset", uEV)
d.FieldUintFn("def_disp_win_left_offset", uEV)
d.FieldUintFn("def_disp_win_right_offset", uEV)
d.FieldUintFn("def_disp_win_top_offset", uEV)
d.FieldUintFn("def_disp_win_bottom_offset", uEV)
}
vuiTimingInfoPresentFlag := d.FieldBool("vui_timing_info_present_flag")
@ -219,7 +219,7 @@ func hevcVuiParameters(d *decode.D, spsMaxSubLayersMinus1 uint64) {
d.FieldU32("vui_time_scale")
vuiPocProportionalToTimingFlag := d.FieldBool("vui_poc_proportional_to_timing_flag")
if vuiPocProportionalToTimingFlag {
d.FieldUFn("vui_num_ticks_poc_diff_one_minus1", uEV)
d.FieldUintFn("vui_num_ticks_poc_diff_one_minus1", uEV)
}
vuiHrdParametersPresentFlag := d.FieldBool("vui_hrd_parameters_present_flag")
if vuiHrdParametersPresentFlag {
@ -232,11 +232,11 @@ func hevcVuiParameters(d *decode.D, spsMaxSubLayersMinus1 uint64) {
d.FieldBool("tiles_fixed_structure_flag")
d.FieldBool("motion_vectors_over_pic_boundaries_flag")
d.FieldBool("restricted_ref_pic_lists_flag")
d.FieldUFn("min_spatial_segmentation_idc", uEV)
d.FieldUFn("max_bytes_per_pic_denom", uEV)
d.FieldUFn("max_bits_per_min_cu_denom", uEV)
d.FieldUFn("log2_max_mv_length_horizontal", uEV)
d.FieldUFn("log2_max_mv_length_vertical", uEV)
d.FieldUintFn("min_spatial_segmentation_idc", uEV)
d.FieldUintFn("max_bytes_per_pic_denom", uEV)
d.FieldUintFn("max_bits_per_min_cu_denom", uEV)
d.FieldUintFn("log2_max_mv_length_horizontal", uEV)
d.FieldUintFn("log2_max_mv_length_vertical", uEV)
}
}
@ -246,23 +246,23 @@ func hevcSPSDecode(d *decode.D, _ any) any {
spsMaxSubLayersMinus1 := d.FieldU3("sps_max_sub_layers_minus1")
d.FieldBool("sps_temporal_id_nesting_flag")
profileTierLevelDecode(d, true, spsMaxSubLayersMinus1)
d.FieldUFn("sps_seq_parameter_set_id", uEV)
chromaFormatIdc := d.FieldUFn("chroma_format_idc", uEV, chromaFormatMap)
d.FieldUintFn("sps_seq_parameter_set_id", uEV)
chromaFormatIdc := d.FieldUintFn("chroma_format_idc", uEV, chromaFormatMap)
if chromaFormatIdc == 3 {
d.FieldBool("separate_colour_plane_flag")
}
d.FieldUFn("pic_width_in_luma_samples", uEV)
d.FieldUFn("pic_height_in_luma_samples", uEV)
d.FieldUintFn("pic_width_in_luma_samples", uEV)
d.FieldUintFn("pic_height_in_luma_samples", uEV)
conformanceWindowFlag := d.FieldBool("conformance_window_flag")
if conformanceWindowFlag {
d.FieldUFn("conf_win_left_offset", uEV)
d.FieldUFn("conf_win_right_offset", uEV)
d.FieldUFn("conf_win_top_offset", uEV)
d.FieldUFn("conf_win_bottom_offset", uEV)
d.FieldUintFn("conf_win_left_offset", uEV)
d.FieldUintFn("conf_win_right_offset", uEV)
d.FieldUintFn("conf_win_top_offset", uEV)
d.FieldUintFn("conf_win_bottom_offset", uEV)
}
d.FieldUFn("bit_depth_luma_minus8", uEV)
d.FieldUFn("bit_depth_chroma_minus8", uEV)
d.FieldUFn("log2_max_pic_order_cnt_lsb_minus4", uEV)
d.FieldUintFn("bit_depth_luma_minus8", uEV)
d.FieldUintFn("bit_depth_chroma_minus8", uEV)
d.FieldUintFn("log2_max_pic_order_cnt_lsb_minus4", uEV)
spsSubLayerOrderingInfoPresentFlag := d.FieldBool("sps_sub_layer_ordering_info_present_flag")
d.FieldArray("sps_sub_layer_ordering_infos", func(d *decode.D) {
i := spsMaxSubLayersMinus1
@ -271,18 +271,18 @@ func hevcSPSDecode(d *decode.D, _ any) any {
}
for ; i <= spsMaxSubLayersMinus1; i++ {
d.FieldStruct("sps_sub_layer_ordering_info", func(d *decode.D) {
d.FieldUFn("sps_max_dec_pic_buffering_minus1", uEV)
d.FieldUFn("sps_max_num_reorder_pics", uEV)
d.FieldUFn("sps_max_latency_increase_plus1", uEV)
d.FieldUintFn("sps_max_dec_pic_buffering_minus1", uEV)
d.FieldUintFn("sps_max_num_reorder_pics", uEV)
d.FieldUintFn("sps_max_latency_increase_plus1", uEV)
})
}
})
d.FieldUFn("log2_min_luma_coding_block_size_minus3", uEV)
d.FieldUFn("log2_diff_max_min_luma_coding_block_size", uEV)
d.FieldUFn("log2_min_luma_transform_block_size_minus2", uEV)
d.FieldUFn("log2_diff_max_min_luma_transform_block_size", uEV)
d.FieldUFn("max_transform_hierarchy_depth_inter", uEV)
d.FieldUFn("max_transform_hierarchy_depth_intra", uEV)
d.FieldUintFn("log2_min_luma_coding_block_size_minus3", uEV)
d.FieldUintFn("log2_diff_max_min_luma_coding_block_size", uEV)
d.FieldUintFn("log2_min_luma_transform_block_size_minus2", uEV)
d.FieldUintFn("log2_diff_max_min_luma_transform_block_size", uEV)
d.FieldUintFn("max_transform_hierarchy_depth_inter", uEV)
d.FieldUintFn("max_transform_hierarchy_depth_intra", uEV)
scalingListEnabledFlag := d.FieldBool("scaling_list_enabled_flag")
if scalingListEnabledFlag {
spsScalingListDataPresentFlag := d.FieldBool("sps_scaling_list_data_present_flag")
@ -297,11 +297,11 @@ func hevcSPSDecode(d *decode.D, _ any) any {
if pcmEnabledFlag {
d.FieldU4("pcm_sample_bit_depth_luma_minus1")
d.FieldU4("pcm_sample_bit_depth_chroma_minus1")
d.FieldUFn("log2_min_pcm_luma_coding_block_size_minus3", uEV)
d.FieldUFn("log2_diff_max_min_pcm_luma_coding_block_size", uEV)
d.FieldUintFn("log2_min_pcm_luma_coding_block_size_minus3", uEV)
d.FieldUintFn("log2_diff_max_min_pcm_luma_coding_block_size", uEV)
d.FieldBool("pcm_loop_filter_disabled_flag")
}
numShortTermRefPicSets := d.FieldUFn("num_short_term_ref_pic_sets", uEV)
numShortTermRefPicSets := d.FieldUintFn("num_short_term_ref_pic_sets", uEV)
if numShortTermRefPicSets > 0 {
// TODO
return nil

View File

@ -36,14 +36,14 @@ func hevcVPSDecode(d *decode.D, _ any) any {
}
for ; i <= vpsMaxSubLayersMinus1; i++ {
d.FieldStruct("sps_sub_layer_ordering_info", func(d *decode.D) {
d.FieldUFn("sps_max_dec_pic_buffering_minus1", uEV)
d.FieldUFn("sps_max_num_reorder_pics", uEV)
d.FieldUFn("sps_max_latency_increase_plus1", uEV)
d.FieldUintFn("sps_max_dec_pic_buffering_minus1", uEV)
d.FieldUintFn("sps_max_num_reorder_pics", uEV)
d.FieldUintFn("sps_max_latency_increase_plus1", uEV)
})
}
})
vpsMaxLayerID := d.FieldU6("vps_max_layer_id")
vpsNumLayerSetsMinus1 := d.FieldUFn("vps_num_layer_sets_minus1", uEV)
vpsNumLayerSetsMinus1 := d.FieldUintFn("vps_num_layer_sets_minus1", uEV)
if vpsNumLayerSetsMinus1 > maxVpsLayers {
d.Errorf("too many vps layers %d > %d", vpsNumLayerSetsMinus1, maxVpsLayers)
}
@ -62,7 +62,7 @@ func hevcVPSDecode(d *decode.D, _ any) any {
d.FieldU32("vps_time_scale")
vpsPocProportionalToTimingFlag := d.FieldBool("vps_poc_proportional_to_timing_flag")
if vpsPocProportionalToTimingFlag {
d.FieldUFn("vps_num_ticks_poc_diff_one_minus1", uEV)
d.FieldUintFn("vps_num_ticks_poc_diff_one_minus1", uEV)
}
vpsHrdParametersPresentFlag := d.FieldBool("vps_hrd_parameters_present_flag")
if vpsHrdParametersPresentFlag {

View File

@ -14,6 +14,7 @@ package mpeg
import (
"github.com/wader/fq/format"
"github.com/wader/fq/pkg/bitio"
"github.com/wader/fq/pkg/checksum"
"github.com/wader/fq/pkg/decode"
"github.com/wader/fq/pkg/interp"
@ -92,7 +93,7 @@ func init() {
// 15: {4, 3},
// }
var blockTypeNames = scalar.UToSymStr{
var blockTypeNames = scalar.UintMapSymStr{
0: "reserved",
1: "start block",
2: "3 short windows",
@ -105,7 +106,7 @@ const (
mpegVersion25 = 0b00
)
var mpegVersionNames = scalar.UToScalar{
var mpegVersionNames = scalar.UintMap{
mpegVersion1: {Sym: "1", Description: "MPEG Version 1"},
mpegVersion2: {Sym: "2", Description: "MPEG Version 2"},
mpegVersion25: {Sym: "2.5", Description: "MPEG Version 2.5"},
@ -123,7 +124,7 @@ const (
mpegLayer3 = 0b01
)
var mpegLayerNames = scalar.UToScalar{
var mpegLayerNames = scalar.UintMap{
mpegLayer1: {Sym: 1, Description: "MPEG Layer 1"},
mpegLayer2: {Sym: 2, Description: "MPEG Layer 2"},
mpegLayer3: {Sym: 3, Description: "MPEG Layer 3"},
@ -135,7 +136,7 @@ var mpegLayerN = map[uint64]uint64{
mpegLayer1: 1,
}
var protectionNames = scalar.BoolToDescription{
var protectionNames = scalar.BoolMapDescription{
true: "No CRC",
false: "Has CRC",
}
@ -158,7 +159,7 @@ func frameDecode(d *decode.D, _ any) any {
var lsf bool // low sampling frequencies, 1 granule if true
d.FieldStruct("header", func(d *decode.D) {
d.FieldU11("sync", d.AssertU(0b111_1111_1111), scalar.ActualBin)
d.FieldU11("sync", d.UintAssert(0b111_1111_1111), scalar.UintBin)
// v = 3 means version 2.5
mpegVersion := d.FieldU2("mpeg_version", mpegVersionNames)
@ -172,6 +173,11 @@ func frameDecode(d *decode.D, _ any) any {
lsf = true
}
mpegLayer := d.FieldU2("layer", mpegLayerNames)
if mpegLayer > 0 {
mpegLayer = 1
}
mpegLayerNr = mpegLayerN[mpegLayer]
if mpegLayerNr != 3 {
d.Errorf("Not layer 3")
@ -185,7 +191,7 @@ func frameDecode(d *decode.D, _ any) any {
3: [...]int{0, 1152, 576, 576},
}
sampleCount = samplesFrameIndex[uint(mpegLayerNr)][uint(mpegVersionNr)]
d.FieldValueU("sample_count", uint64(sampleCount))
d.FieldValueUint("sample_count", uint64(sampleCount))
protection := d.FieldBool("protection_absent", protectionNames)
// note false mean has protection
hasCRC := !protection
@ -206,8 +212,8 @@ func frameDecode(d *decode.D, _ any) any {
0b1101: [...]int{416, 320, 256, 224, 144, 144, 224, 144, 144},
0b1110: [...]int{448, 384, 320, 256, 160, 160, 256, 160, 160},
}
d.FieldU4("bitrate", scalar.Fn(func(s scalar.S) (scalar.S, error) {
u := s.ActualU()
d.FieldU("bitrate", 4, scalar.UintFn(func(s scalar.Uint) (scalar.Uint, error) {
u := s.Actual
switch u {
case 0b0000:
s.Description = "free"
@ -231,46 +237,46 @@ func frameDecode(d *decode.D, _ any) any {
0b01: [...]int{48000, 24000, 12000},
0b10: [...]int{32000, 16000, 8000},
}
d.FieldU2("sample_rate", scalar.Fn(func(s scalar.S) (scalar.S, error) {
u := s.ActualU()
switch u {
d.FieldU2("sample_rate", scalar.UintFn(func(s scalar.Uint) (scalar.Uint, error) {
switch s.Actual {
case 0b11:
s.Description = "reserved"
default:
sampleRate = sampleRateIndex[uint(u)][mpegVersionNr-1]
sampleRate = sampleRateIndex[uint(s.Actual)][mpegVersionNr-1]
s.Sym = sampleRate
}
return s, nil
}))
paddingBytes = int(d.FieldU1("padding", scalar.UToSymStr{
paddingBytes = int(d.FieldU1("padding", scalar.UintMapSymStr{
0: "not_padded",
1: "padded",
}, scalar.ActualBin))
}, scalar.UintBin))
d.FieldU1("private")
channelsIndex = d.FieldU2("channels", scalar.UToSymStr{
channelsIndex = d.FieldU2("channels", scalar.UintMapSymStr{
0b00: "stereo",
0b01: "joint_stereo",
0b10: "dual",
0b11: "mono",
}, scalar.ActualBin)
}, scalar.UintBin)
isStereo = channelsIndex != 0b11
channelModeIndex = d.FieldU2("channel_mode", scalar.UToSymStr{
channelModeIndex = d.FieldU2("channel_mode", scalar.UintMapSymStr{
0b00: "none",
0b01: "intensity stereo",
0b10: "ms_stereo",
0b11: "intensity_stereo_ms_stereo",
}, scalar.ActualBin)
}, scalar.UintBin)
d.FieldU1("copyright")
d.FieldU1("original")
d.FieldU2("emphasis", scalar.UToSymStr{
d.FieldU2("emphasis", scalar.UintMapSymStr{
0b00: "none",
0b01: "50_15",
0b10: "reserved",
0b11: "ccit_j.17",
}, scalar.ActualBin)
}, scalar.UintBin)
if hasCRC {
d.FieldU16("crc", scalar.ActualHex)
d.FieldU16("crc", scalar.UintHex)
crcValue = d.FieldGet("crc")
crcBytes = 2
}
@ -379,9 +385,10 @@ func frameDecode(d *decode.D, _ any) any {
d.CopyBits(crcHash, d.BitBufRange(6*8, int64(sideInfoBytes)*8))
if crcValue != nil {
_ = crcValue.TryScalarFn(d.ValidateUBytes(crcHash.Sum(nil)))
_ = crcValue.TryUintScalarFn(d.UintValidateBytes(crcHash.Sum(nil)))
}
d.FieldValueRaw("crc_calculated", crcHash.Sum(nil), scalar.RawHex)
d.FieldValueBitBuf("crc_calculated", bitio.NewBitReader(crcHash.Sum(nil), -1), scalar.RawHex)
return format.MP3FrameOut{
MPEGVersion: int(mpegVersionNr),

View File

@ -17,7 +17,7 @@ func init() {
})
}
var frequencyIndexHzMap = scalar.UToSymU{
var frequencyIndexHzMap = scalar.UintMapSymUint{
0x0: 96000,
0x1: 88200,
0x2: 64000,
@ -33,7 +33,7 @@ var frequencyIndexHzMap = scalar.UToSymU{
0xc: 7350,
}
var channelConfigurationNames = scalar.UToDescription{
var channelConfigurationNames = scalar.UintMapDescription{
0: "defined in AOT Specifc Config",
1: "front-center",
2: "front-left, front-right",
@ -45,8 +45,8 @@ var channelConfigurationNames = scalar.UToDescription{
}
func ascDecoder(d *decode.D, _ any) any {
objectType := d.FieldUFn("object_type", decodeEscapeValueCarryFn(5, 6, 0), format.MPEGAudioObjectTypeNames)
d.FieldUFn("sampling_frequency", decodeEscapeValueAbsFn(4, 24, 0), frequencyIndexHzMap)
objectType := d.FieldUintFn("object_type", decodeEscapeValueCarryFn(5, 6, 0), format.MPEGAudioObjectTypeNames)
d.FieldUintFn("sampling_frequency", decodeEscapeValueAbsFn(4, 24, 0), frequencyIndexHzMap)
d.FieldU4("channel_configuration", channelConfigurationNames)
// TODO: GASpecificConfig etc
d.FieldRawLen("var_aot_or_byte_align", d.BitsLeft())

View File

@ -73,7 +73,7 @@ const (
Forbidden1 = 0xFF
)
var odTagNames = scalar.UToSymStr{
var odTagNames = scalar.UintMapSymStr{
Forbidden0: "Forbidden",
ObjectDescrTag: "ObjectDescrTag",
InitialObjectDescrTag: "InitialObjectDescrTag",
@ -136,7 +136,7 @@ const (
IPMPToolStream = 0x0B
)
var streamTypeNames = scalar.UToSymStr{
var streamTypeNames = scalar.UintMapSymStr{
Forbidden: "Forbidden",
ObjectDescriptorStream: "ObjectDescriptorStream",
ClockReferenceStream: "ClockReferenceStream",
@ -220,7 +220,7 @@ func odDecodeTag(d *decode.D, edc *esDecodeContext, _ int, fn func(d *decode.D))
// Xiph-style lacing (similar to ogg) of n-1 packets, last is reset of block
d.FieldArray("laces", func(d *decode.D) {
for i := uint64(0); i < numPackets; i++ {
l := d.FieldUFn("lace", func(d *decode.D) uint64 {
l := d.FieldUintFn("lace", func(d *decode.D) uint64 {
var l uint64
for {
n := d.U8()
@ -266,7 +266,7 @@ func odDecodeTag(d *decode.D, edc *esDecodeContext, _ int, fn func(d *decode.D))
// TODO: expectedTagID
tagID := d.FieldU8("tag_id", odTagNames)
tagLen := d.FieldUFn("length", esLengthEncoding)
tagLen := d.FieldUintFn("length", esLengthEncoding)
if fn != nil {
d.FramedFn(int64(tagLen)*8, fn)

View File

@ -30,41 +30,41 @@ type subStreamPacket struct {
buf []byte
}
var startAndStreamNames = scalar.URangeToScalar{
{Range: [2]uint64{0x00, 0x00}, S: scalar.S{Sym: "picture"}},
{Range: [2]uint64{0x01, 0xaf}, S: scalar.S{Sym: "slice"}},
{Range: [2]uint64{0xb0, 0xb1}, S: scalar.S{Sym: "reserved"}},
{Range: [2]uint64{0xb2, 0xb2}, S: scalar.S{Sym: "user_data"}},
{Range: [2]uint64{0xb3, 0xb3}, S: scalar.S{Sym: "sequence_header"}},
{Range: [2]uint64{0xb4, 0xb4}, S: scalar.S{Sym: "sequence_error"}},
{Range: [2]uint64{0xb5, 0xb5}, S: scalar.S{Sym: "extension"}},
{Range: [2]uint64{0xb6, 0xb6}, S: scalar.S{Sym: "reserved"}},
{Range: [2]uint64{0xb7, 0xb7}, S: scalar.S{Sym: "sequence_end"}},
{Range: [2]uint64{0xb8, 0xb8}, S: scalar.S{Sym: "group_of_pictures"}},
{Range: [2]uint64{0xb9, 0xb9}, S: scalar.S{Sym: "program_end"}},
{Range: [2]uint64{0xba, 0xba}, S: scalar.S{Sym: "pack_header"}},
{Range: [2]uint64{0xbb, 0xbb}, S: scalar.S{Sym: "system_header"}},
{Range: [2]uint64{0xbc, 0xbc}, S: scalar.S{Sym: "program_stream_map"}},
{Range: [2]uint64{0xbd, 0xbd}, S: scalar.S{Sym: "private_stream1"}},
{Range: [2]uint64{0xbe, 0xbe}, S: scalar.S{Sym: "padding_stream"}},
{Range: [2]uint64{0xbf, 0xbf}, S: scalar.S{Sym: "private_stream2"}},
{Range: [2]uint64{0xc0, 0xdf}, S: scalar.S{Sym: "audio_stream"}},
{Range: [2]uint64{0xe0, 0xef}, S: scalar.S{Sym: "video_stream"}},
{Range: [2]uint64{0xf0, 0xf0}, S: scalar.S{Sym: "ecm_stream"}},
{Range: [2]uint64{0xf1, 0xf1}, S: scalar.S{Sym: "emm_stream"}},
{Range: [2]uint64{0xf2, 0xf2}, S: scalar.S{Sym: "itu_t_rec_h_222_0"}},
{Range: [2]uint64{0xf3, 0xf3}, S: scalar.S{Sym: "iso_iec_13522_stream"}},
{Range: [2]uint64{0xf4, 0xf4}, S: scalar.S{Sym: "itu_t_rec_h_222_1_type_a"}},
{Range: [2]uint64{0xf5, 0xf5}, S: scalar.S{Sym: "itu_t_rec_h_222_1_type_b"}},
{Range: [2]uint64{0xf6, 0xf6}, S: scalar.S{Sym: "itu_t_rec_h_222_1_type_c"}},
{Range: [2]uint64{0xf7, 0xf7}, S: scalar.S{Sym: "itu_t_rec_h_222_1_type_d"}},
{Range: [2]uint64{0xf8, 0xf8}, S: scalar.S{Sym: "itu_t_rec_h_222_1_type_e"}},
{Range: [2]uint64{0xf9, 0xf9}, S: scalar.S{Sym: "ancillary_stream"}},
{Range: [2]uint64{0xfa, 0xfe}, S: scalar.S{Sym: "reserved"}},
{Range: [2]uint64{0xff, 0xff}, S: scalar.S{Sym: "program_stream_directory"}},
var startAndStreamNames = scalar.UintRangeToScalar{
{Range: [2]uint64{0x00, 0x00}, S: scalar.Uint{Sym: "picture"}},
{Range: [2]uint64{0x01, 0xaf}, S: scalar.Uint{Sym: "slice"}},
{Range: [2]uint64{0xb0, 0xb1}, S: scalar.Uint{Sym: "reserved"}},
{Range: [2]uint64{0xb2, 0xb2}, S: scalar.Uint{Sym: "user_data"}},
{Range: [2]uint64{0xb3, 0xb3}, S: scalar.Uint{Sym: "sequence_header"}},
{Range: [2]uint64{0xb4, 0xb4}, S: scalar.Uint{Sym: "sequence_error"}},
{Range: [2]uint64{0xb5, 0xb5}, S: scalar.Uint{Sym: "extension"}},
{Range: [2]uint64{0xb6, 0xb6}, S: scalar.Uint{Sym: "reserved"}},
{Range: [2]uint64{0xb7, 0xb7}, S: scalar.Uint{Sym: "sequence_end"}},
{Range: [2]uint64{0xb8, 0xb8}, S: scalar.Uint{Sym: "group_of_pictures"}},
{Range: [2]uint64{0xb9, 0xb9}, S: scalar.Uint{Sym: "program_end"}},
{Range: [2]uint64{0xba, 0xba}, S: scalar.Uint{Sym: "pack_header"}},
{Range: [2]uint64{0xbb, 0xbb}, S: scalar.Uint{Sym: "system_header"}},
{Range: [2]uint64{0xbc, 0xbc}, S: scalar.Uint{Sym: "program_stream_map"}},
{Range: [2]uint64{0xbd, 0xbd}, S: scalar.Uint{Sym: "private_stream1"}},
{Range: [2]uint64{0xbe, 0xbe}, S: scalar.Uint{Sym: "padding_stream"}},
{Range: [2]uint64{0xbf, 0xbf}, S: scalar.Uint{Sym: "private_stream2"}},
{Range: [2]uint64{0xc0, 0xdf}, S: scalar.Uint{Sym: "audio_stream"}},
{Range: [2]uint64{0xe0, 0xef}, S: scalar.Uint{Sym: "video_stream"}},
{Range: [2]uint64{0xf0, 0xf0}, S: scalar.Uint{Sym: "ecm_stream"}},
{Range: [2]uint64{0xf1, 0xf1}, S: scalar.Uint{Sym: "emm_stream"}},
{Range: [2]uint64{0xf2, 0xf2}, S: scalar.Uint{Sym: "itu_t_rec_h_222_0"}},
{Range: [2]uint64{0xf3, 0xf3}, S: scalar.Uint{Sym: "iso_iec_13522_stream"}},
{Range: [2]uint64{0xf4, 0xf4}, S: scalar.Uint{Sym: "itu_t_rec_h_222_1_type_a"}},
{Range: [2]uint64{0xf5, 0xf5}, S: scalar.Uint{Sym: "itu_t_rec_h_222_1_type_b"}},
{Range: [2]uint64{0xf6, 0xf6}, S: scalar.Uint{Sym: "itu_t_rec_h_222_1_type_c"}},
{Range: [2]uint64{0xf7, 0xf7}, S: scalar.Uint{Sym: "itu_t_rec_h_222_1_type_d"}},
{Range: [2]uint64{0xf8, 0xf8}, S: scalar.Uint{Sym: "itu_t_rec_h_222_1_type_e"}},
{Range: [2]uint64{0xf9, 0xf9}, S: scalar.Uint{Sym: "ancillary_stream"}},
{Range: [2]uint64{0xfa, 0xfe}, S: scalar.Uint{Sym: "reserved"}},
{Range: [2]uint64{0xff, 0xff}, S: scalar.Uint{Sym: "program_stream_directory"}},
}
var mpegVersion = scalar.UToDescription{
var mpegVersion = scalar.UintMapDescription{
0b01: "MPEG2",
0b10: "MPEG1",
}
@ -72,8 +72,8 @@ var mpegVersion = scalar.UToDescription{
func pesPacketDecode(d *decode.D, _ any) any {
var v any
d.FieldU24("prefix", d.AssertU(0b0000_0000_0000_0000_0000_0001), scalar.ActualBin)
startCode := d.FieldU8("start_code", startAndStreamNames, scalar.ActualHex)
d.FieldU24("prefix", d.UintAssert(0b0000_0000_0000_0000_0000_0001), scalar.UintBin)
startCode := d.FieldU8("start_code", startAndStreamNames, scalar.UintHex)
switch {
case startCode == sequenceHeader:
@ -114,7 +114,7 @@ func pesPacketDecode(d *decode.D, _ any) any {
}
d.FieldU1("marker_bits4")
scr := scr0<<30 | scr1<<15 | scr2
d.FieldValueU("scr", scr)
d.FieldValueUint("scr", scr)
d.FieldU22("mux_rate")
d.FieldU1("marker_bits5")
if isMPEG2 {

View File

@ -35,7 +35,7 @@ const (
CHG_COLCON = 0x07
)
var commandNames = scalar.UToSymStr{
var commandNames = scalar.UintMapSymStr{
CMD_END: "CMD_END",
FSTA_DSP: "FSTA_DSP",
STA_DSP: "STA_DSP",

View File

@ -20,7 +20,7 @@ func init() {
// TODO: ts_packet
func tsDecode(d *decode.D, _ any) any {
d.FieldU8("sync", d.AssertU(0x47), scalar.ActualHex)
d.FieldU8("sync", d.UintAssert(0x47), scalar.UintHex)
d.FieldBool("transport_error_indicator")
d.FieldBool("payload_unit_start")
d.FieldBool("transport_priority")

View File

@ -29,7 +29,7 @@ func init() {
type formatEntry struct {
r [2]byte
s scalar.S
s scalar.Uint
d func(d *decode.D)
}
@ -44,8 +44,8 @@ func (fes formatEntries) lookup(u byte) (formatEntry, bool) {
return formatEntry{}, false
}
func (fes formatEntries) MapScalar(s scalar.S) (scalar.S, error) {
u := s.ActualU()
func (fes formatEntries) MapUint(s scalar.Uint) (scalar.Uint, error) {
u := s.Actual
if fe, ok := fes.lookup(byte(u)); ok {
s = fe.s
s.Actual = u
@ -89,64 +89,64 @@ func decodeMsgPackValue(d *decode.D) {
// is defined here as a global map would cause a init dependency cycle
formatMap := formatEntries{
{r: [2]byte{0x00, 0x7f}, s: scalar.S{Sym: "positive_fixint"}, d: func(d *decode.D) {
{r: [2]byte{0x00, 0x7f}, s: scalar.Uint{Sym: "positive_fixint"}, d: func(d *decode.D) {
d.SeekRel(-8)
d.FieldU8("value")
}},
{r: [2]byte{0x80, 0x8f}, s: scalar.S{Sym: "fixmap"}, d: mapFn(-4, 4)},
{r: [2]byte{0x90, 0x9f}, s: scalar.S{Sym: "fixarray"}, d: arrayFn(-4, 4)},
{r: [2]byte{0xa0, 0xbf}, s: scalar.S{Sym: "fixstr"}, d: func(d *decode.D) {
{r: [2]byte{0x80, 0x8f}, s: scalar.Uint{Sym: "fixmap"}, d: mapFn(-4, 4)},
{r: [2]byte{0x90, 0x9f}, s: scalar.Uint{Sym: "fixarray"}, d: arrayFn(-4, 4)},
{r: [2]byte{0xa0, 0xbf}, s: scalar.Uint{Sym: "fixstr"}, d: func(d *decode.D) {
d.SeekRel(-4)
length := d.FieldU4("length")
d.FieldUTF8("value", int(length))
}},
{r: [2]byte{0xc0, 0xc0}, s: scalar.S{Sym: "nil"}, d: func(d *decode.D) {
d.FieldValueNil("value")
{r: [2]byte{0xc0, 0xc0}, s: scalar.Uint{Sym: "nil"}, d: func(d *decode.D) {
d.FieldValueAny("value", nil)
}},
{r: [2]byte{0xc1, 0xc1}, s: scalar.S{Sym: "never_used"}, d: func(d *decode.D) {
{r: [2]byte{0xc1, 0xc1}, s: scalar.Uint{Sym: "never_used"}, d: func(d *decode.D) {
d.Fatalf("0xc1 never used")
}},
{r: [2]byte{0xc2, 0xc2}, s: scalar.S{Sym: "false"}, d: func(d *decode.D) {
{r: [2]byte{0xc2, 0xc2}, s: scalar.Uint{Sym: "false"}, d: func(d *decode.D) {
d.FieldValueBool("value", false)
}},
{r: [2]byte{0xc3, 0xc3}, s: scalar.S{Sym: "true"}, d: func(d *decode.D) {
{r: [2]byte{0xc3, 0xc3}, s: scalar.Uint{Sym: "true"}, d: func(d *decode.D) {
d.FieldValueBool("value", true)
}},
{r: [2]byte{0xc4, 0xc4}, s: scalar.S{Sym: "bin8"}, d: func(d *decode.D) { d.FieldRawLen("value", int64(d.FieldU8("length"))*8) }},
{r: [2]byte{0xc5, 0xc5}, s: scalar.S{Sym: "bin16"}, d: func(d *decode.D) { d.FieldRawLen("value", int64(d.FieldU16("length"))*8) }},
{r: [2]byte{0xc6, 0xc6}, s: scalar.S{Sym: "bin32"}, d: func(d *decode.D) { d.FieldRawLen("value", int64(d.FieldU32("length"))*8) }},
{r: [2]byte{0xc7, 0xc7}, s: scalar.S{Sym: "ext8"}, d: extFn(8)},
{r: [2]byte{0xc8, 0xc8}, s: scalar.S{Sym: "ext16"}, d: extFn(16)},
{r: [2]byte{0xc9, 0xc9}, s: scalar.S{Sym: "ext32"}, d: extFn(32)},
{r: [2]byte{0xca, 0xca}, s: scalar.S{Sym: "float32"}, d: func(d *decode.D) { d.FieldF32("value") }},
{r: [2]byte{0xcb, 0xcb}, s: scalar.S{Sym: "float64"}, d: func(d *decode.D) { d.FieldF64("value") }},
{r: [2]byte{0xcc, 0xcc}, s: scalar.S{Sym: "uint8"}, d: func(d *decode.D) { d.FieldU8("value") }},
{r: [2]byte{0xcd, 0xcd}, s: scalar.S{Sym: "uint16"}, d: func(d *decode.D) { d.FieldU16("value") }},
{r: [2]byte{0xce, 0xce}, s: scalar.S{Sym: "uint32"}, d: func(d *decode.D) { d.FieldU32("value") }},
{r: [2]byte{0xcf, 0xcf}, s: scalar.S{Sym: "uint64"}, d: func(d *decode.D) { d.FieldU64("value") }},
{r: [2]byte{0xd0, 0xd0}, s: scalar.S{Sym: "int8"}, d: func(d *decode.D) { d.FieldS8("value") }},
{r: [2]byte{0xd1, 0xd1}, s: scalar.S{Sym: "int16"}, d: func(d *decode.D) { d.FieldS16("value") }},
{r: [2]byte{0xd2, 0xd2}, s: scalar.S{Sym: "int32"}, d: func(d *decode.D) { d.FieldS32("value") }},
{r: [2]byte{0xd3, 0xd3}, s: scalar.S{Sym: "int64"}, d: func(d *decode.D) { d.FieldS64("value") }},
{r: [2]byte{0xd4, 0xd4}, s: scalar.S{Sym: "fixext1"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 1*8) }},
{r: [2]byte{0xd5, 0xd5}, s: scalar.S{Sym: "fixext2"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 2*8) }},
{r: [2]byte{0xd6, 0xd6}, s: scalar.S{Sym: "fixext4"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 4*8) }},
{r: [2]byte{0xd7, 0xd7}, s: scalar.S{Sym: "fixext8"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 8*8) }},
{r: [2]byte{0xd8, 0xd8}, s: scalar.S{Sym: "fixext16"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 16*8) }},
{r: [2]byte{0xd9, 0xd9}, s: scalar.S{Sym: "str8"}, d: func(d *decode.D) { d.FieldUTF8("value", int(d.FieldU8("length"))) }},
{r: [2]byte{0xda, 0xda}, s: scalar.S{Sym: "str16"}, d: func(d *decode.D) { d.FieldUTF8("value", int(d.FieldU16("length"))) }},
{r: [2]byte{0xdb, 0xdb}, s: scalar.S{Sym: "str32"}, d: func(d *decode.D) { d.FieldUTF8("value", int(d.FieldU32("length"))) }},
{r: [2]byte{0xdc, 0xdc}, s: scalar.S{Sym: "array16"}, d: arrayFn(0, 16)},
{r: [2]byte{0xdd, 0xdd}, s: scalar.S{Sym: "array32"}, d: arrayFn(0, 32)},
{r: [2]byte{0xde, 0xde}, s: scalar.S{Sym: "map16"}, d: mapFn(0, 16)},
{r: [2]byte{0xdf, 0xdf}, s: scalar.S{Sym: "map32"}, d: mapFn(0, 32)},
{r: [2]byte{0xe0, 0xff}, s: scalar.S{Sym: "negative_fixint"}, d: func(d *decode.D) {
{r: [2]byte{0xc4, 0xc4}, s: scalar.Uint{Sym: "bin8"}, d: func(d *decode.D) { d.FieldRawLen("value", int64(d.FieldU8("length"))*8) }},
{r: [2]byte{0xc5, 0xc5}, s: scalar.Uint{Sym: "bin16"}, d: func(d *decode.D) { d.FieldRawLen("value", int64(d.FieldU16("length"))*8) }},
{r: [2]byte{0xc6, 0xc6}, s: scalar.Uint{Sym: "bin32"}, d: func(d *decode.D) { d.FieldRawLen("value", int64(d.FieldU32("length"))*8) }},
{r: [2]byte{0xc7, 0xc7}, s: scalar.Uint{Sym: "ext8"}, d: extFn(8)},
{r: [2]byte{0xc8, 0xc8}, s: scalar.Uint{Sym: "ext16"}, d: extFn(16)},
{r: [2]byte{0xc9, 0xc9}, s: scalar.Uint{Sym: "ext32"}, d: extFn(32)},
{r: [2]byte{0xca, 0xca}, s: scalar.Uint{Sym: "float32"}, d: func(d *decode.D) { d.FieldF32("value") }},
{r: [2]byte{0xcb, 0xcb}, s: scalar.Uint{Sym: "float64"}, d: func(d *decode.D) { d.FieldF64("value") }},
{r: [2]byte{0xcc, 0xcc}, s: scalar.Uint{Sym: "uint8"}, d: func(d *decode.D) { d.FieldU8("value") }},
{r: [2]byte{0xcd, 0xcd}, s: scalar.Uint{Sym: "uint16"}, d: func(d *decode.D) { d.FieldU16("value") }},
{r: [2]byte{0xce, 0xce}, s: scalar.Uint{Sym: "uint32"}, d: func(d *decode.D) { d.FieldU32("value") }},
{r: [2]byte{0xcf, 0xcf}, s: scalar.Uint{Sym: "uint64"}, d: func(d *decode.D) { d.FieldU64("value") }},
{r: [2]byte{0xd0, 0xd0}, s: scalar.Uint{Sym: "int8"}, d: func(d *decode.D) { d.FieldS8("value") }},
{r: [2]byte{0xd1, 0xd1}, s: scalar.Uint{Sym: "int16"}, d: func(d *decode.D) { d.FieldS16("value") }},
{r: [2]byte{0xd2, 0xd2}, s: scalar.Uint{Sym: "int32"}, d: func(d *decode.D) { d.FieldS32("value") }},
{r: [2]byte{0xd3, 0xd3}, s: scalar.Uint{Sym: "int64"}, d: func(d *decode.D) { d.FieldS64("value") }},
{r: [2]byte{0xd4, 0xd4}, s: scalar.Uint{Sym: "fixext1"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 1*8) }},
{r: [2]byte{0xd5, 0xd5}, s: scalar.Uint{Sym: "fixext2"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 2*8) }},
{r: [2]byte{0xd6, 0xd6}, s: scalar.Uint{Sym: "fixext4"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 4*8) }},
{r: [2]byte{0xd7, 0xd7}, s: scalar.Uint{Sym: "fixext8"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 8*8) }},
{r: [2]byte{0xd8, 0xd8}, s: scalar.Uint{Sym: "fixext16"}, d: func(d *decode.D) { d.FieldS8("fixtype"); d.FieldRawLen("value", 16*8) }},
{r: [2]byte{0xd9, 0xd9}, s: scalar.Uint{Sym: "str8"}, d: func(d *decode.D) { d.FieldUTF8("value", int(d.FieldU8("length"))) }},
{r: [2]byte{0xda, 0xda}, s: scalar.Uint{Sym: "str16"}, d: func(d *decode.D) { d.FieldUTF8("value", int(d.FieldU16("length"))) }},
{r: [2]byte{0xdb, 0xdb}, s: scalar.Uint{Sym: "str32"}, d: func(d *decode.D) { d.FieldUTF8("value", int(d.FieldU32("length"))) }},
{r: [2]byte{0xdc, 0xdc}, s: scalar.Uint{Sym: "array16"}, d: arrayFn(0, 16)},
{r: [2]byte{0xdd, 0xdd}, s: scalar.Uint{Sym: "array32"}, d: arrayFn(0, 32)},
{r: [2]byte{0xde, 0xde}, s: scalar.Uint{Sym: "map16"}, d: mapFn(0, 16)},
{r: [2]byte{0xdf, 0xdf}, s: scalar.Uint{Sym: "map32"}, d: mapFn(0, 32)},
{r: [2]byte{0xe0, 0xff}, s: scalar.Uint{Sym: "negative_fixint"}, d: func(d *decode.D) {
d.SeekRel(-8)
d.FieldS8("value")
}},
}
typ := d.FieldU8("type", formatMap, scalar.ActualHex)
typ := d.FieldU8("type", formatMap, scalar.UintHex)
if fe, ok := formatMap.lookup(byte(typ)); ok {
fe.d(d)
} else {

View File

@ -77,7 +77,7 @@ func decodeOgg(d *decode.D, _ any) any {
if !sFound {
var packetsD *decode.D
streamsD.FieldStruct("stream", func(d *decode.D) {
d.FieldValueU("serial_number", uint64(oggPageOut.StreamSerialNumber))
d.FieldValueUint("serial_number", uint64(oggPageOut.StreamSerialNumber))
packetsD = d.FieldArrayValue("packets")
})
s = &stream{

View File

@ -25,8 +25,8 @@ func pageDecode(d *decode.D, _ any) any {
d.Endian = decode.LittleEndian
d.FieldUTF8("capture_pattern", 4, d.AssertStr("OggS"))
d.FieldU8("version", d.AssertU(0))
d.FieldUTF8("capture_pattern", 4, d.StrAssert("OggS"))
d.FieldU8("version", d.UintAssert(0))
d.FieldU5("unused_flags")
p.IsLastPage = d.FieldBool("last_page")
p.IsFirstPage = d.FieldBool("first_page")
@ -34,7 +34,7 @@ func pageDecode(d *decode.D, _ any) any {
d.FieldU64("granule_position")
p.StreamSerialNumber = uint32(d.FieldU32("bitstream_serial_number"))
p.SequenceNo = uint32(d.FieldU32("page_sequence_no"))
d.FieldU32("crc", scalar.ActualHex)
d.FieldU32("crc", scalar.UintHex)
pageSegments := d.FieldU8("page_segments")
var segmentTable []uint64
d.FieldArray("segment_table", func(d *decode.D) {
@ -55,7 +55,7 @@ func pageDecode(d *decode.D, _ any) any {
d.Copy(pageCRC, bitio.NewIOReader(d.BitBufRange(startPos, pageChecksumValue.Range.Start-startPos))) // header before checksum
d.Copy(pageCRC, bytes.NewReader([]byte{0, 0, 0, 0})) // zero checksum bits
d.Copy(pageCRC, bitio.NewIOReader(d.BitBufRange(pageChecksumValue.Range.Stop(), endPos-pageChecksumValue.Range.Stop()))) // rest of page
_ = pageChecksumValue.TryScalarFn(d.ValidateUBytes(pageCRC.Sum(nil)))
_ = pageChecksumValue.TryUintScalarFn(d.UintValidateBytes(pageCRC.Sum(nil)))
return p
}

View File

@ -98,7 +98,7 @@ func opusDecode(d *decode.D, _ any) any {
config := configurations[n]
d.FieldValueStr("mode", config.mode)
d.FieldValueStr("bandwidth", config.bandwidth)
d.FieldValueFloat("frame_size", config.frameSize)
d.FieldValueFlt("frame_size", config.frameSize)
})
d.FieldBool("stereo")
d.FieldStruct("frames_per_packet", func(d *decode.D) {
@ -113,7 +113,7 @@ func opusDecode(d *decode.D, _ any) any {
}
n := d.FieldU2("config")
config := framesPerPacketConfigs[n]
d.FieldValueU("frames", config.frames)
d.FieldValueUint("frames", config.frames)
d.FieldValueStr("mode", config.mode)
})
d.FieldRawLen("data", d.BitsLeft())

View File

@ -31,7 +31,7 @@ const (
littleEndianNS = 0x4d3cb2a1
)
var endianMap = scalar.UToSymStr{
var endianMap = scalar.UintMapSymStr{
bigEndian: "big_endian",
littleEndian: "little_endian",
bigEndianNS: "big_endian_ns",
@ -59,12 +59,12 @@ func decodePcap(d *decode.D, _ any) any {
timestampUNSStr := "ts_usec"
d.FieldStruct("header", func(d *decode.D) {
magic := d.FieldU32("magic", d.AssertU(
magic := d.FieldU32("magic", d.UintAssert(
bigEndian,
littleEndian,
bigEndianNS,
littleEndianNS,
), endianMap, scalar.ActualHex)
), endianMap, scalar.UintHex)
switch magic {
case bigEndian:

Some files were not shown because too many files have changed in this diff Show More