1
1
mirror of https://github.com/wader/fq.git synced 2024-11-26 10:33:53 +03:00

interp,decode: Add force option to ignore asserts

This commit is contained in:
Mattias Wadman 2021-11-16 13:03:45 +01:00
parent 5cd5633374
commit f9f866000e
18 changed files with 110 additions and 100 deletions

View File

@ -108,7 +108,7 @@ Usage: fq [OPTIONS] [--] [EXPR] [FILE...]
--argjson NAME JSON Set variable $NAME to JSON
--color-output,-C Force color output
--compact-output,-c Compact output
--decode,-d NAME Force decode format (probe)
--decode,-d NAME Decode format (probe)
--decode-file NAME PATH Set variable $NAME to decode of file
--formats Show supported formats
--from-file,-f PATH Read EXPR from file
@ -153,12 +153,12 @@ notable is support for arbitrary-precision integers.
- All standard library functions from jq
- Adds a few new general functions:
- `streaks/0`, `streaks_by/1` like `group` but groups streaks based on condition.
- `count`, `count_by/1` like `group` but counts groups lengths.
- `count/0`, `count_by/1` like `group` but counts groups lengths.
- `debug/1` like `debug/0` but uses arg to produce debug message. `{a: 123} | debug({a}) | ...`.
- `path_to_expr` from `["key", 1]` to `".key[1]"`.
- `expr_to_path` from `".key[1]"` to `["key", 1]`.
- `path_to_expr/0` from `["key", 1]` to `".key[1]"`.
- `expr_to_path/0` from `".key[1]"` to `["key", 1]`.
- `diff/2` produce diff object between two values.
- `delta`, `delta_by/1`, array with difference between all consecutive pairs.
- `delta/0`, `delta_by/1`, array with difference between all consecutive pairs.
- `chunk/1`, split array or string into even chunks
- Adds some decode value specific functions:
- `root/0` return tree root for value
@ -181,20 +181,25 @@ notable is support for arbitrary-precision integers.
- `bgrep/1`, `bgrep/2` recursively match buffer
- `fgrep/1`, `fgrep/2` recursively match field name
- Buffers:
- `tobits` - Transform input into a bits buffer not preserving source range, will start at zero.
- `tobitsrange` - Transform input into a bits buffer preserving source range if possible.
- `tobytes` - Transform input into a bytes buffer not preserving source range, will start at zero.
- `tobytesrange` - Transform input into a byte buffer preserving source range if possible.
- `tobits/0` - Transform input into a bits buffer not preserving source range, will start at zero.
- `tobitsrange/0` - Transform input into a bits buffer preserving source range if possible.
- `tobytes/0` - Transform input into a bytes buffer not preserving source range, will start at zero.
- `tobytesrange/0` - Transform input into a byte buffer preserving source range if possible.
- `buffer[start:end]`, `buffer[:end]`, `buffer[start:]` - Create a sub buffer from start to end in buffer units preserving source range.
- `open` open file for reading
- `probe` or `decode` probe format and decode
- `mp3`, `matroska`, ..., `<name>`, `decode([name])` force decode as format
- `d`/`display` display value and truncate long arrays
- `f`/`full` display value and don't truncate arrays
- `v`/`verbose` display value verbosely and don't truncate array
- `p`/`preview` show preview of field tree
- `hd`/`hexdump` hexdump value
- `repl` nested REPL, must be last in a pipeline. `1 | repl`, can "slurp" multiple outputs `1, 2, 3 | repl`.
- All decode function takes a optional option argument. The only option currently is `force` to ignore decoder asserts.
For example to decode as mp3 and ignore assets do `mp3({force: true})` or `decode("mp3"; {force: true})`, from command line
you currently have to do `fq -d raw 'mp3({force: true})' file`.
- `decode/0`, `decode/1`, `decode/2` decode format
- `probe/0`, `probe/1` probe and decode format
- `mp3/0`, `mp3/1`, ..., `<name>/0`, `<name>/1` same as `decode(<name>)/1`, `decode(<name>; <opts>)/2` decode as format
- `d/0`/`display/0` display value and truncate long arrays
- `f/0`/`full/0` display value and don't truncate arrays
- `v/0`/`verbose/0` display value verbosely and don't truncate array
- `p/0`/`preview/0` show preview of field tree
- `hd/0`/`hexdump/0` hexdump value
- `repl/0` nested REPL, must be last in a pipeline. `1 | repl`, can "slurp" multiple outputs `1, 2, 3 | repl`.
## Decoded values (TODO: better name?)

View File

@ -131,7 +131,7 @@ func elfDecode(d *decode.D, in interface{}) interface{} {
var endian uint64
d.FieldStruct("ident", func(d *decode.D) {
d.FieldRawLen("magic", 4*8, d.AssertRaw([]byte("\x7fELF")))
d.FieldRawLen("magic", 4*8, d.AssertBitBuf([]byte("\x7fELF")))
archBits = int(d.FieldU8("class", d.MapUToU(classBits)))
endian = d.FieldU8("data", d.MapUToStr(endianNames))
d.FieldU8("version")

View File

@ -78,7 +78,7 @@ func flacDecode(d *decode.D, in interface{}) interface{} {
}
})
d.FieldValueRaw("md5_calculated", md5Samples.Sum(nil), d.ValidateRaw(streamInfo.MD5), d.RawHex)
d.FieldValueRaw("md5_calculated", md5Samples.Sum(nil), d.ValidateBitBuf(streamInfo.MD5), d.RawHex)
d.FieldValueU("decoded_samples", framesNDecodedSamples)
return nil

View File

@ -339,7 +339,7 @@ func frameDecode(d *decode.D, in interface{}) interface{} {
headerCRC := &crc.CRC{Bits: 8, Table: crc.ATM8Table}
decode.MustCopy(d, headerCRC, d.BitBufRange(frameStart, d.Pos()-frameStart))
d.FieldRawLen("crc", 8, d.ValidateRaw(headerCRC.Sum(nil)), d.RawHex)
d.FieldRawLen("crc", 8, d.ValidateBitBuf(headerCRC.Sum(nil)), d.RawHex)
})
var channelSamples [][]int64
@ -589,7 +589,7 @@ func frameDecode(d *decode.D, in interface{}) interface{} {
// <16> CRC-16 (polynomial = x^16 + x^15 + x^2 + x^0, initialized with 0) of everything before the crc, back to and including the frame header sync code
footerCRC := &crc.CRC{Bits: 16, Table: crc.ANSI16Table}
decode.MustCopy(d, footerCRC, d.BitBufRange(frameStart, d.Pos()-frameStart))
d.FieldRawLen("footer_crc", 16, d.ValidateRaw(footerCRC.Sum(nil)), d.RawHex)
d.FieldRawLen("footer_crc", 16, d.ValidateBitBuf(footerCRC.Sum(nil)), d.RawHex)
streamSamples := len(channelSamples[0])
for j := 0; j < len(channelSamples); j++ {

View File

@ -59,7 +59,7 @@ var deflateExtraFlagsNames = decode.UToStr{
}
func gzDecode(d *decode.D, in interface{}) interface{} {
d.FieldRawLen("identification", 2*8, d.AssertRaw([]byte("\x1f\x8b")))
d.FieldRawLen("identification", 2*8, d.AssertBitBuf([]byte("\x1f\x8b")))
compressionMethod := d.FieldU8("compression_method", d.MapUToStr(compressionMethodNames))
hasHeaderCRC := false
hasExtra := false
@ -117,7 +117,7 @@ func gzDecode(d *decode.D, in interface{}) interface{} {
d.FieldRawLen("compressed", compressedLen)
}
d.FieldRawLen("crc32", 32, d.ValidateRaw(bitio.ReverseBytes(crc32W.Sum(nil))), d.RawHex)
d.FieldRawLen("crc32", 32, d.ValidateBitBuf(bitio.ReverseBytes(crc32W.Sum(nil))), d.RawHex)
d.FieldU32LE("isize")
return nil

View File

@ -192,7 +192,7 @@ func jpegDecode(d *decode.D, in interface{}) interface{} {
} else {
d.FieldStruct("marker", func(d *decode.D) {
prefixLen := d.PeekFindByte(0xff, -1) + 1
d.FieldRawLen("prefix", prefixLen*8, d.AssertRaw([]byte{0xff}))
d.FieldRawLen("prefix", prefixLen*8, d.AssertBitBuf([]byte{0xff}))
markerCode := d.FieldU8("code", d.MapUToScalar(markers))
_, markerFound := markers[markerCode]

View File

@ -388,7 +388,7 @@ func frameDecode(d *decode.D, in interface{}) interface{} {
decode.MustCopy(d, crcHash, d.BitBufRange(6*8, sideInfoBytes*8))
if crcValue != nil {
_ = crcValue.ScalarFn(d.ValidateRaw(crcHash.Sum(nil)))
_ = crcValue.ScalarFn(d.ValidateBitBuf(crcHash.Sum(nil)))
}
d.FieldValueRaw("crc_calculated", crcHash.Sum(nil), d.RawHex)

View File

@ -51,7 +51,7 @@ func pageDecode(d *decode.D, in interface{}) interface{} {
decode.MustCopy(d, pageCRC, d.BitBufRange(startPos, pageChecksumValue.Range.Start-startPos)) // header before checksum
decode.MustCopy(d, pageCRC, bytes.NewReader([]byte{0, 0, 0, 0})) // zero checksum bits
decode.MustCopy(d, pageCRC, d.BitBufRange(pageChecksumValue.Range.Stop(), endPos-pageChecksumValue.Range.Stop())) // rest of page
_ = pageChecksumValue.ScalarFn(d.ValidateRaw(bitio.ReverseBytes(pageCRC.Sum(nil))))
_ = pageChecksumValue.ScalarFn(d.ValidateBitBuf(bitio.ReverseBytes(pageCRC.Sum(nil))))
return p
}

View File

@ -64,7 +64,7 @@ var blendOpNames = decode.UToStr{
func pngDecode(d *decode.D, in interface{}) interface{} {
iEndFound := false
d.FieldRawLen("signature", 8*8, d.AssertRaw([]byte("\x89PNG\r\n\x1a\n")))
d.FieldRawLen("signature", 8*8, d.AssertBitBuf([]byte("\x89PNG\r\n\x1a\n")))
d.FieldStructArrayLoop("chunks", "chunk", func() bool { return d.NotEnd() && !iEndFound }, func(d *decode.D) {
chunkLength := int(d.FieldU32("length"))
crcStartPos := d.Pos()
@ -192,7 +192,7 @@ func pngDecode(d *decode.D, in interface{}) interface{} {
chunkCRC := crc32.NewIEEE()
decode.MustCopy(d, chunkCRC, d.BitBufRange(crcStartPos, d.Pos()-crcStartPos))
d.FieldRawLen("crc", 32, d.ValidateRaw(chunkCRC.Sum(nil)), d.RawHex)
d.FieldRawLen("crc", 32, d.ValidateBitBuf(chunkCRC.Sum(nil)), d.RawHex)
})
return nil

View File

@ -24,6 +24,7 @@ const (
type Options struct {
Name string
Description string
Force bool
FillGaps bool
IsRoot bool
Range ranges.Range // if zero use whole buffer
@ -38,20 +39,19 @@ func Decode(ctx context.Context, bb *bitio.Buffer, formats []*Format, opts Optio
}
func decode(ctx context.Context, bb *bitio.Buffer, formats []*Format, opts Options) (*Value, interface{}, error) {
if opts.Range.IsZero() {
opts.Range = ranges.Range{Len: bb.Len()}
decodeRange := opts.Range
if decodeRange.IsZero() {
decodeRange = ranges.Range{Len: bb.Len()}
}
if formats == nil {
panic("formats is nil, failed to register format?")
}
var forceOne = len(formats) == 1
decodeErr := FormatsError{}
for _, f := range formats {
cbb, err := bb.BitBufRange(opts.Range.Start, opts.Range.Len)
cbb, err := bb.BitBufRange(decodeRange.Start, decodeRange.Len)
if err != nil {
return nil, nil, err
}
@ -84,7 +84,7 @@ func decode(ctx context.Context, bb *bitio.Buffer, formats []*Format, opts Optio
d.Value.V = vv
}
if !forceOne {
if len(formats) != 1 {
continue
}
} else {
@ -94,20 +94,20 @@ func decode(ctx context.Context, bb *bitio.Buffer, formats []*Format, opts Optio
// TODO: maybe move to Format* funcs?
if opts.FillGaps {
d.FillGaps(ranges.Range{Start: 0, Len: opts.Range.Len}, "unknown")
d.FillGaps(ranges.Range{Start: 0, Len: decodeRange.Len}, "unknown")
}
var minMaxRange ranges.Range
if err := d.Value.WalkRootPreOrder(func(v *Value, rootV *Value, depth int, rootDepth int) error {
minMaxRange = ranges.MinMax(minMaxRange, v.Range)
v.Range.Start += opts.Range.Start
v.Range.Start += decodeRange.Start
v.RootBitBuf = bb
return nil
}); err != nil {
return nil, nil, err
}
d.Value.Range = ranges.Range{Start: opts.Range.Start, Len: minMaxRange.Len}
d.Value.Range = ranges.Range{Start: decodeRange.Start, Len: minMaxRange.Len}
if opts.IsRoot {
d.Value.postProcess()
@ -123,7 +123,7 @@ type D struct {
Ctx context.Context
Endian Endian
Value *Value
Options map[string]interface{}
Options Options
bitBuf *bitio.Buffer
@ -154,7 +154,7 @@ func newDecoder(ctx context.Context, format *Format, bb *bitio.Buffer, opts Opti
Range: ranges.Range{Start: 0, Len: 0},
IsRoot: opts.IsRoot,
},
Options: opts.FormatOptions,
Options: opts,
bitBuf: bb,
readBuf: opts.ReadBuf,
@ -217,7 +217,9 @@ func (d *D) FillGaps(r ranges.Range, namePrefix string) {
// Invalid stops decode with a reason
func (d *D) Invalid(reason string) {
panic(ValidateError{Reason: reason, Pos: d.Pos()})
if !d.Options.Force {
panic(ValidateError{Reason: reason, Pos: d.Pos()})
}
}
func (d *D) IOPanic(err error) {
@ -556,6 +558,9 @@ func (d *D) FieldRangeFn(name string, firstBit int64, nBits int64, fn func() *Va
}
func (d *D) AssertAtLeastBitsLeft(nBits int64) {
if d.Options.Force {
return
}
bl := d.BitsLeft()
if bl < nBits {
// TODO:
@ -564,6 +569,9 @@ func (d *D) AssertAtLeastBitsLeft(nBits int64) {
}
func (d *D) AssertLeastBytesLeft(nBytes int64) {
if d.Options.Force {
return
}
bl := d.BitsLeft()
if bl < nBytes*8 {
// TODO:
@ -641,11 +649,12 @@ func (d *D) RangeFn(firstBit int64, nBits int64, fn func(d *D)) {
func (d *D) Format(formats []*Format, inArg interface{}) interface{} {
dv, v, err := decode(d.Ctx, d.bitBuf, formats, Options{
ReadBuf: d.readBuf,
Force: d.Options.Force,
FillGaps: false,
IsRoot: false,
Range: ranges.Range{Start: d.Pos(), Len: d.BitsLeft()},
FormatInArg: inArg,
ReadBuf: d.readBuf,
})
if dv == nil || dv.Errors() != nil {
panic(err)
@ -670,11 +679,12 @@ func (d *D) Format(formats []*Format, inArg interface{}) interface{} {
func (d *D) FieldTryFormat(name string, formats []*Format, inArg interface{}) (*Value, interface{}, error) {
dv, v, err := decode(d.Ctx, d.bitBuf, formats, Options{
Name: name,
ReadBuf: d.readBuf,
Force: d.Options.Force,
FillGaps: false,
IsRoot: false,
Range: ranges.Range{Start: d.Pos(), Len: d.BitsLeft()},
FormatInArg: inArg,
ReadBuf: d.readBuf,
})
if dv == nil || dv.Errors() != nil {
return nil, nil, err
@ -699,11 +709,12 @@ func (d *D) FieldFormat(name string, formats []*Format, inArg interface{}) (*Val
func (d *D) FieldTryFormatLen(name string, nBits int64, formats []*Format, inArg interface{}) (*Value, interface{}, error) {
dv, v, err := decode(d.Ctx, d.bitBuf, formats, Options{
Name: name,
ReadBuf: d.readBuf,
Force: d.Options.Force,
FillGaps: true,
IsRoot: false,
Range: ranges.Range{Start: d.Pos(), Len: nBits},
FormatInArg: inArg,
ReadBuf: d.readBuf,
})
if dv == nil || dv.Errors() != nil {
return nil, nil, err
@ -729,11 +740,12 @@ func (d *D) FieldFormatLen(name string, nBits int64, formats []*Format, inArg in
func (d *D) FieldTryFormatRange(name string, firstBit int64, nBits int64, formats []*Format, inArg interface{}) (*Value, interface{}, error) {
dv, v, err := decode(d.Ctx, d.bitBuf, formats, Options{
Name: name,
ReadBuf: d.readBuf,
Force: d.Options.Force,
FillGaps: true,
IsRoot: false,
Range: ranges.Range{Start: firstBit, Len: nBits},
FormatInArg: inArg,
ReadBuf: d.readBuf,
})
if dv == nil || dv.Errors() != nil {
return nil, nil, err
@ -756,10 +768,11 @@ func (d *D) FieldFormatRange(name string, firstBit int64, nBits int64, formats [
func (d *D) FieldTryFormatBitBuf(name string, bb *bitio.Buffer, formats []*Format, inArg interface{}) (*Value, interface{}, error) {
dv, v, err := decode(d.Ctx, bb, formats, Options{
Name: name,
ReadBuf: d.readBuf,
Force: d.Options.Force,
FillGaps: true,
IsRoot: true,
FormatInArg: inArg,
ReadBuf: d.readBuf,
})
if dv == nil || dv.Errors() != nil {
return nil, nil, err

View File

@ -226,10 +226,9 @@ func (d *D) BitBufValidateIsZero(s Scalar) (Scalar, error) {
}
// TODO: generate?
func (d *D) assertRaw(assert bool, bss ...[]byte) func(s Scalar) (Scalar, error) {
func (d *D) assertBitBuf(assert bool, bss ...[]byte) func(s Scalar) (Scalar, error) {
return func(s Scalar) (Scalar, error) {
// TODO: check type assert?
ab, err := s.Actual.(*bitio.Buffer).Bytes()
ab, err := s.ActualBitBuf().Bytes()
if err != nil {
return s, err
}
@ -240,18 +239,18 @@ func (d *D) assertRaw(assert bool, bss ...[]byte) func(s Scalar) (Scalar, error)
}
}
s.Description = "invalid"
if assert {
if assert && !d.Options.Force {
return s, errors.New("failed to validate raw")
}
return s, nil
}
}
func (d *D) AssertRaw(bss ...[]byte) func(s Scalar) (Scalar, error) {
return d.assertRaw(true, bss...)
func (d *D) AssertBitBuf(bss ...[]byte) func(s Scalar) (Scalar, error) {
return d.assertBitBuf(true, bss...)
}
func (d *D) ValidateRaw(bss ...[]byte) func(s Scalar) (Scalar, error) {
return d.assertRaw(false, bss...)
func (d *D) ValidateBitBuf(bss ...[]byte) func(s Scalar) (Scalar, error) {
return d.assertBitBuf(false, bss...)
}
func (d *D) TryFieldValue(name string, fn func() (*Value, error)) (*Value, error) {

View File

@ -256,11 +256,7 @@ func (d *D) TryFieldUFn(name string, fn func(d *D) (uint64, error), sfns ...Scal
// Validate/Assert Bool
func (d *D) assertBool(assert bool, vs ...bool) func(s Scalar) (Scalar, error) {
return func(s Scalar) (Scalar, error) {
// TODO: check type assert?
a, ok := s.Actual.(bool)
if !ok {
panic(fmt.Sprintf("failed to type assert s.Actual %v as bool", s.Actual))
}
a := s.ActualBool()
for _, b := range vs {
if a == b {
s.Description = "valid"
@ -268,8 +264,8 @@ func (d *D) assertBool(assert bool, vs ...bool) func(s Scalar) (Scalar, error) {
}
}
s.Description = "invalid"
if assert {
return s, errors.New("failed to validate Bool")
if assert && !d.Options.Force {
return s, errors.New("failed to assert Bool")
}
return s, nil
}
@ -285,11 +281,7 @@ func (d *D) ValidateBool(vs ...bool) func(s Scalar) (Scalar, error) {
// Validate/Assert F
func (d *D) assertF(assert bool, vs ...float64) func(s Scalar) (Scalar, error) {
return func(s Scalar) (Scalar, error) {
// TODO: check type assert?
a, ok := s.Actual.(float64)
if !ok {
panic(fmt.Sprintf("failed to type assert s.Actual %v as float64", s.Actual))
}
a := s.ActualF()
for _, b := range vs {
if a == b {
s.Description = "valid"
@ -297,8 +289,8 @@ func (d *D) assertF(assert bool, vs ...float64) func(s Scalar) (Scalar, error) {
}
}
s.Description = "invalid"
if assert {
return s, errors.New("failed to validate F")
if assert && !d.Options.Force {
return s, errors.New("failed to assert F")
}
return s, nil
}
@ -314,11 +306,7 @@ func (d *D) ValidateF(vs ...float64) func(s Scalar) (Scalar, error) {
// Validate/Assert S
func (d *D) assertS(assert bool, vs ...int64) func(s Scalar) (Scalar, error) {
return func(s Scalar) (Scalar, error) {
// TODO: check type assert?
a, ok := s.Actual.(int64)
if !ok {
panic(fmt.Sprintf("failed to type assert s.Actual %v as int64", s.Actual))
}
a := s.ActualS()
for _, b := range vs {
if a == b {
s.Description = "valid"
@ -326,8 +314,8 @@ func (d *D) assertS(assert bool, vs ...int64) func(s Scalar) (Scalar, error) {
}
}
s.Description = "invalid"
if assert {
return s, errors.New("failed to validate S")
if assert && !d.Options.Force {
return s, errors.New("failed to assert S")
}
return s, nil
}
@ -343,11 +331,7 @@ func (d *D) ValidateS(vs ...int64) func(s Scalar) (Scalar, error) {
// Validate/Assert Str
func (d *D) assertStr(assert bool, vs ...string) func(s Scalar) (Scalar, error) {
return func(s Scalar) (Scalar, error) {
// TODO: check type assert?
a, ok := s.Actual.(string)
if !ok {
panic(fmt.Sprintf("failed to type assert s.Actual %v as string", s.Actual))
}
a := s.ActualStr()
for _, b := range vs {
if a == b {
s.Description = "valid"
@ -355,8 +339,8 @@ func (d *D) assertStr(assert bool, vs ...string) func(s Scalar) (Scalar, error)
}
}
s.Description = "invalid"
if assert {
return s, errors.New("failed to validate Str")
if assert && !d.Options.Force {
return s, errors.New("failed to assert Str")
}
return s, nil
}
@ -372,11 +356,7 @@ func (d *D) ValidateStr(vs ...string) func(s Scalar) (Scalar, error) {
// Validate/Assert U
func (d *D) assertU(assert bool, vs ...uint64) func(s Scalar) (Scalar, error) {
return func(s Scalar) (Scalar, error) {
// TODO: check type assert?
a, ok := s.Actual.(uint64)
if !ok {
panic(fmt.Sprintf("failed to type assert s.Actual %v as uint64", s.Actual))
}
a := s.ActualU()
for _, b := range vs {
if a == b {
s.Description = "valid"
@ -384,8 +364,8 @@ func (d *D) assertU(assert bool, vs ...uint64) func(s Scalar) (Scalar, error) {
}
}
s.Description = "invalid"
if assert {
return s, errors.New("failed to validate U")
if assert && !d.Options.Force {
return s, errors.New("failed to assert U")
}
return s, nil
}

View File

@ -55,11 +55,7 @@ import (
// Validate/Assert {{$name}}
func (d *D) assert{{$name}}(assert bool, vs ...{{$t.go_type}}) func(s Scalar) (Scalar, error) {
return func(s Scalar) (Scalar, error) {
// TODO: check type assert?
a, ok := s.Actual.({{$t.go_type}})
if !ok {
panic(fmt.Sprintf("failed to type assert s.Actual %v as {{$t.go_type}}", s.Actual))
}
a := s.Actual{{$name}}()
for _, b := range vs {
if {{$t.compare}} {
s.Description = "valid"
@ -67,8 +63,8 @@ import (
}
}
s.Description = "invalid"
if assert {
return s, errors.New("failed to validate {{$name}}")
if assert && !d.Options.Force {
return s, errors.New("failed to assert {{$name}}")
}
return s, nil
}

View File

@ -42,14 +42,15 @@ def decode($name; $decode_opts):
| _decode(
$name;
$opts +
$decode_opts + {
{
_progress: (
if $opts.decode_progress and $opts.repl and $stdout.is_terminal then
"_decode_progress"
else null
end
)
}
),
} +
$decode_opts
)
);
def decode($name): decode($name; {});

View File

@ -162,7 +162,7 @@ def _opt_cli_opts:
"decode_format": {
short: "-d",
long: "--decode",
description: "Force decode format (probe)",
description: "Decode format (probe)",
string: "NAME"
},
"decode_file": {

View File

@ -13,7 +13,7 @@ Usage: fq [OPTIONS] [--] [EXPR] [FILE...]
--argjson NAME JSON Set variable $NAME to JSON
--color-output,-C Force color output
--compact-output,-c Compact output
--decode,-d NAME Force decode format (probe)
--decode,-d NAME Decode format (probe)
--decode-file NAME PATH Set variable $NAME to decode of file
--formats Show supported formats
--from-file,-f PATH Read EXPR from file

View File

@ -55,3 +55,17 @@ $ fq -d raw 'tobytes[0:1] | png | d' /test.mp3
0x0|49 |I | unknown0: raw bits
$ fq -d raw 'tobytes[0:1] | try probe catch . | type' /test.mp3
"array"
$ fq -d raw 'png({force: true}) | d' /test.mp3
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.: {} (png)
| | | error: png: BitBufRange: failed at position 0 (read size 2315363 seek pos 0): outside buffer
0x000|49 44 33 04 00 00 00 00 |ID3..... | signature: raw bits (invalid)
| | | chunks: [1]
| | | [0]: {}
0x000| 00 23 54 53 | .#TS | length: 2315347
0x000| 53 45 00 00| SE..| type: "SE\x00\x00"
0x000| 53 | S | ancillary: true
0x000| 45 | E | private: false
0x000| 00 | . | reserved: false
0x000| 00| .| safe_to_copy: false
0x010|00 0f 00 00 03 4c 61 76 66 35 38 2e 34 35 2e 31|.....Lavf58.45.1| unknown0: raw bits
* |until 0x283.7 (end) (628) | |

View File

@ -64,6 +64,7 @@ func (i *Interp) _toValue(c interface{}, a []interface{}) interface{} {
func (i *Interp) _decode(c interface{}, a []interface{}) interface{} {
var opts struct {
Filename string `mapstructure:"filename"`
Force bool `mapstructure:"force"`
Progress string `mapstructure:"_progress"`
Remain map[string]interface{} `mapstructure:",remain"`
}
@ -121,6 +122,7 @@ func (i *Interp) _decode(c interface{}, a []interface{}) interface{} {
decode.Options{
IsRoot: true,
FillGaps: true,
Force: opts.Force,
Range: bv.r,
Description: opts.Filename,
FormatOptions: opts.Remain,