1
1
mirror of https://github.com/wader/fq.git synced 2024-11-21 23:04:07 +03:00

interp: Rename to/from<format> functions to to_/from_<format>

Feels less cluttered, easier to read and more consistent.

Still keep tovalue, tobytes etc that are more basic functions this
only renamed format related functions.
Also there is an exceptin for to/fromjson as it comes from jq.

Also fixes lots of spelling errors while reading thru.
This commit is contained in:
Mattias Wadman 2022-12-21 13:59:54 +01:00
parent d3c172c456
commit e3ae1440c9
80 changed files with 651 additions and 612 deletions

View File

@ -167,8 +167,8 @@
"torepr", "torepr",
"tosym", "tosym",
"tovalue", "tovalue",
"toxml", "to_xml",
"toxmlentities", "to_xmlentities",
"traf", "traf",
"trak", "trak",
"trex", "trex",

View File

@ -46,7 +46,7 @@ doc/%.svg: fq
(cd doc ; ../$@.sh ../fq) | go run github.com/wader/ansisvg@master > $@ (cd doc ; ../$@.sh ../fq) | go run github.com/wader/ansisvg@master > $@
doc/formats.svg: fq doc/formats.svg: fq
# ignore graphviz version as it causes diff when nothing has changed @# ignore graphviz version as it causes diff when nothing has changed
./fq -rnf doc/formats_diagram.jq | dot -Tsvg | sed 's/Generated by graphviz.*//' >doc/formats.svg ./fq -rnf doc/formats_diagram.jq | dot -Tsvg | sed 's/Generated by graphviz.*//' >doc/formats.svg
doc/file.mp3: Makefile doc/file.mp3: Makefile
@ -88,8 +88,8 @@ update-gomod: always
# try to decode crash with all formats in order to see which one paniced: # try to decode crash with all formats in order to see which one paniced:
# cat format/testdata/fuzz/FuzzFormats/... | go run dev/fuzzbytes.go | go run . -d bytes '. as $b | formats | keys[] as $f | $b | decode($f)' # cat format/testdata/fuzz/FuzzFormats/... | go run dev/fuzzbytes.go | go run . -d bytes '. as $b | formats | keys[] as $f | $b | decode($f)'
# convert crash into raw bytes: # convert crash into raw bytes:
# cat format/testdata/fuzz/FuzzFormats/... | go run dev/fuzzbytes.go | fq -d bytes tobase64 # cat format/testdata/fuzz/FuzzFormats/... | go run dev/fuzzbytes.go | fq -d bytes to_base64
# fq -n '"..." | frombase64 | ...' # fq -n '"..." | from_base64 | ...'
fuzz: always fuzz: always
# in other terminal: tail -f /tmp/repanic # in other terminal: tail -f /tmp/repanic
FUZZTEST=1 REPANIC_LOG=/tmp/repanic go test -v -run Fuzz -fuzz=Fuzz ./format/ FUZZTEST=1 REPANIC_LOG=/tmp/repanic go test -v -run Fuzz -fuzz=Fuzz ./format/

View File

@ -163,7 +163,7 @@ For details see [formats.md](doc/formats.md) and [usage.md](doc/usage.md).
Use one of the methods listed below or download a pre-built [release](https://github.com/wader/fq/releases) for macOS, Linux or Windows. Unarchive it and move the executable to `PATH` etc. Use one of the methods listed below or download a pre-built [release](https://github.com/wader/fq/releases) for macOS, Linux or Windows. Unarchive it and move the executable to `PATH` etc.
On macOS if you don't install using a method below you might have to manually allow the binary to run. This can be done by trying to run the binary, ignore the warning and then go into security preference and allow it. Same can be done with this command: On macOS if you don't install using one of the method below you might have to manually allow the binary to run. This can be done by trying to run the binary, ignore the warning and then go into security preference and allow it. Same can be done with this command:
```sh ```sh
xattr -d com.apple.quarantine fq && spctl --add fq xattr -d com.apple.quarantine fq && spctl --add fq
@ -241,7 +241,7 @@ cp "$(go env GOPATH)/bin/fq" /usr/local/bin
To build, run and test from source: To build, run and test from source:
```sh ```sh
# build an run # build and run
go run . go run .
go run . -d mp3 file.mp3 go run . -d mp3 file.mp3
# just build # just build

View File

@ -36,8 +36,8 @@ def urldecode:
# ex: .frames | changes(.header.sample_rate) # ex: .frames | changes(.header.sample_rate)
def changes(f): streaks_by(f)[].[0]; def changes(f): streaks_by(f)[].[0];
def toradix62sp: toradix(62; "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"); def to_radix62sp: to_radix(62; "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ");
def fromradix62sp: fromradix(62; { def from_radix62sp: from_radix(62; {
"0": 0, "1": 1, "2": 2, "3": 3,"4": 4, "5": 5, "6": 6, "7": 7, "8": 8, "9": 9, "0": 0, "1": 1, "2": 2, "3": 3,"4": 4, "5": 5, "6": 6, "7": 7, "8": 8, "9": 9,
"a": 10, "b": 11, "c": 12, "d": 13, "e": 14, "f": 15, "g": 16, "a": 10, "b": 11, "c": 12, "d": 13, "e": 14, "f": 15, "g": 16,
"h": 17, "i": 18, "j": 19, "k": 20, "l": 21, "m": 22, "n": 23, "h": 17, "i": 18, "j": 19, "k": 20, "l": 21, "m": 22, "n": 23,

View File

@ -1,4 +1,4 @@
<svg viewBox="0 0 1000 880" width="1000" height="880" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <svg viewBox="0 0 1008 880" width="1008" height="880" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<style> <style>
rect { shape-rendering: crispEdges; } rect { shape-rendering: crispEdges; }
text { text {
@ -1504,19 +1504,20 @@
<text x="856" y="656">|</text> <text x="856" y="656">|</text>
<text x="872" y="656">t</text> <text x="872" y="656">t</text>
<text x="880" y="656">o</text> <text x="880" y="656">o</text>
<text x="888" y="656">y</text> <text x="888" y="656">_</text>
<text x="896" y="656">a</text> <text x="896" y="656">y</text>
<text x="904" y="656">m</text> <text x="904" y="656">a</text>
<text x="912" y="656">l</text> <text x="912" y="656">m</text>
<text x="920" y="656">&#39;</text> <text x="920" y="656">l</text>
<text x="936" y="656">f</text> <text x="928" y="656">&#39;</text>
<text x="944" y="656">i</text> <text x="944" y="656">f</text>
<text x="952" y="656">l</text> <text x="952" y="656">i</text>
<text x="960" y="656">e</text> <text x="960" y="656">l</text>
<text x="968" y="656">.</text> <text x="968" y="656">e</text>
<text x="976" y="656">m</text> <text x="976" y="656">.</text>
<text x="984" y="656">p</text> <text x="984" y="656">m</text>
<text x="992" y="656">3</text> <text x="992" y="656">p</text>
<text x="1000" y="656">3</text>
<text x="0" y="672">r</text> <text x="0" y="672">r</text>
<text x="8" y="672">e</text> <text x="8" y="672">e</text>
<text x="16" y="672">s</text> <text x="16" y="672">s</text>
@ -1596,31 +1597,33 @@
<text x="288" y="752">r</text> <text x="288" y="752">r</text>
<text x="296" y="752">o</text> <text x="296" y="752">o</text>
<text x="304" y="752">m</text> <text x="304" y="752">m</text>
<text x="312" y="752">u</text> <text x="312" y="752">_</text>
<text x="320" y="752">r</text> <text x="320" y="752">u</text>
<text x="328" y="752">l</text> <text x="328" y="752">r</text>
<text x="344" y="752">|</text> <text x="336" y="752">l</text>
<text x="360" y="752">.</text> <text x="352" y="752">|</text>
<text x="368" y="752">q</text> <text x="368" y="752">.</text>
<text x="376" y="752">u</text> <text x="376" y="752">q</text>
<text x="384" y="752">e</text> <text x="384" y="752">u</text>
<text x="392" y="752">r</text> <text x="392" y="752">e</text>
<text x="400" y="752">y</text> <text x="400" y="752">r</text>
<text x="408" y="752">.</text> <text x="408" y="752">y</text>
<text x="416" y="752">b</text> <text x="416" y="752">.</text>
<text x="432" y="752">=</text> <text x="424" y="752">b</text>
<text x="448" y="752">&#34;</text> <text x="440" y="752">=</text>
<text x="456" y="752">a</text> <text x="456" y="752">&#34;</text>
<text x="472" y="752">b</text> <text x="464" y="752">a</text>
<text x="488" y="752">c</text> <text x="480" y="752">b</text>
<text x="496" y="752">&#34;</text> <text x="496" y="752">c</text>
<text x="512" y="752">|</text> <text x="504" y="752">&#34;</text>
<text x="528" y="752">t</text> <text x="520" y="752">|</text>
<text x="536" y="752">o</text> <text x="536" y="752">t</text>
<text x="544" y="752">u</text> <text x="544" y="752">o</text>
<text x="552" y="752">r</text> <text x="552" y="752">_</text>
<text x="560" y="752">l</text> <text x="560" y="752">u</text>
<text x="568" y="752">&#39;</text> <text x="568" y="752">r</text>
<text x="576" y="752">l</text>
<text x="584" y="752">&#39;</text>
<text x="0" y="768">h</text> <text x="0" y="768">h</text>
<text x="8" y="768">t</text> <text x="8" y="768">t</text>
<text x="16" y="768">t</text> <text x="16" y="768">t</text>
@ -1743,38 +1746,40 @@
<text x="472" y="816">r</text> <text x="472" y="816">r</text>
<text x="480" y="816">o</text> <text x="480" y="816">o</text>
<text x="488" y="816">m</text> <text x="488" y="816">m</text>
<text x="496" y="816">u</text> <text x="496" y="816">_</text>
<text x="504" y="816">r</text> <text x="504" y="816">u</text>
<text x="512" y="816">l</text> <text x="512" y="816">r</text>
<text x="520" y="816">.</text> <text x="520" y="816">l</text>
<text x="528" y="816">q</text> <text x="528" y="816">.</text>
<text x="536" y="816">u</text> <text x="536" y="816">q</text>
<text x="544" y="816">e</text> <text x="544" y="816">u</text>
<text x="552" y="816">r</text> <text x="552" y="816">e</text>
<text x="560" y="816">y</text> <text x="560" y="816">r</text>
<text x="568" y="816">.</text> <text x="568" y="816">y</text>
<text x="576" y="816">p</text> <text x="576" y="816">.</text>
<text x="592" y="816">|</text> <text x="584" y="816">p</text>
<text x="608" y="816">f</text> <text x="600" y="816">|</text>
<text x="616" y="816">r</text> <text x="616" y="816">f</text>
<text x="624" y="816">o</text> <text x="624" y="816">r</text>
<text x="632" y="816">m</text> <text x="632" y="816">o</text>
<text x="640" y="816">b</text> <text x="640" y="816">m</text>
<text x="648" y="816">a</text> <text x="648" y="816">_</text>
<text x="656" y="816">s</text> <text x="656" y="816">b</text>
<text x="664" y="816">e</text> <text x="664" y="816">a</text>
<text x="672" y="816">6</text> <text x="672" y="816">s</text>
<text x="680" y="816">4</text> <text x="680" y="816">e</text>
<text x="696" y="816">|</text> <text x="688" y="816">6</text>
<text x="712" y="816">f</text> <text x="696" y="816">4</text>
<text x="720" y="816">r</text> <text x="712" y="816">|</text>
<text x="728" y="816">o</text> <text x="728" y="816">f</text>
<text x="736" y="816">m</text> <text x="736" y="816">r</text>
<text x="744" y="816">j</text> <text x="744" y="816">o</text>
<text x="752" y="816">s</text> <text x="752" y="816">m</text>
<text x="760" y="816">o</text> <text x="760" y="816">j</text>
<text x="768" y="816">n</text> <text x="768" y="816">s</text>
<text x="776" y="816">&#39;</text> <text x="776" y="816">o</text>
<text x="784" y="816">n</text>
<text x="792" y="816">&#39;</text>
<text x="0" y="832">{</text> <text x="0" y="832">{</text>
<text x="16" y="848" style="fill: #5555ff">&#34;</text> <text x="16" y="848" style="fill: #5555ff">&#34;</text>
<text x="24" y="848" style="fill: #5555ff">a</text> <text x="24" y="848" style="fill: #5555ff">a</text>

Before

Width:  |  Height:  |  Size: 82 KiB

After

Width:  |  Height:  |  Size: 82 KiB

View File

@ -29,10 +29,10 @@ s "file file.png"
rm -f file.png rm -f file.png
echo echo
c "Resolution of embedded PNG cover art as YAML" c "Resolution of embedded PNG cover art as YAML"
s "fq -r '.headers[0].frames[] | grep_by(.id == \"APIC\") | grep_by(.type == \"IHDR\") | {res: {width, height}} | toyaml' file.mp3" s "fq -r '.headers[0].frames[] | grep_by(.id == \"APIC\") | grep_by(.type == \"IHDR\") | {res: {width, height}} | to_yaml' file.mp3"
#echo #echo
c "Add query parameter to URL" c "Add query parameter to URL"
s "echo 'http://host?a=b' | fq -Rr 'fromurl | .query.b = \"a b c\" | tourl'" s "echo 'http://host?a=b' | fq -Rr 'from_url | .query.b = \"a b c\" | to_url'"
echo echo
c "Extract JSON and base64 encoded query parameter p" c "Extract JSON and base64 encoded query parameter p"
s "echo 'https://host?p=eyJhIjoiaGVsbG8ifQ%3D%3D' | fq -R 'fromurl.query.p | frombase64 | fromjson'" s "echo 'https://host?p=eyJhIjoiaGVsbG8ifQ%3D%3D' | fq -R 'from_url.query.p | from_base64 | fromjson'"

View File

@ -213,7 +213,7 @@ Supports decoding BER, CER and DER (X.690).
### Can be used to decode certificates etc ### Can be used to decode certificates etc
```sh ```sh
$ fq -d bytes 'frompem | asn1_ber | d' cert.pem $ fq -d bytes 'from_pem | asn1_ber | d' cert.pem
``` ```
### Can decode nested values ### Can decode nested values
@ -510,7 +510,7 @@ Decode value as csv
### TSV to CSV ### TSV to CSV
```sh ```sh
$ fq -d csv -o comma="\t" tocsv file.tsv $ fq -d csv -o comma="\t" to_csv file.tsv
``` ```
### Convert rows to objects based on header row ### Convert rows to objects based on header row
@ -585,7 +585,7 @@ HTML is decoded in HTML5 mode and will always include `<html>`, `<body>` and `<h
See xml format for more examples and how to preserve element order and how to encode to xml. See xml format for more examples and how to preserve element order and how to encode to xml.
There is no `tohtml` function, see `toxml` instead. There is no `to_html` function, see `to_xml` instead.
### Element as object ### Element as object
@ -751,7 +751,7 @@ $ fq 'del(.tracks) | grep_by(.type=="mdat").data = "<excluded>" | tovalue' file.
### Force decode a single box ### Force decode a single box
```sh ```sh
$ fq -n '"AAAAHGVsc3QAAAAAAAAAAQAAADIAAAQAAAEAAA==" | frombase64 | mp4({force:true}) | d' $ fq -n '"AAAAHGVsc3QAAAAAAAAAAQAAADIAAAQAAAEAAA==" | from_base64 | mp4({force:true}) | d'
``` ```
### Lookup mp4 box using a mp4 box path. ### Lookup mp4 box using a mp4 box path.
@ -887,7 +887,7 @@ Which variant to use depends a bit what you want to do. The object variant might
to query for a specific value but array might be easier to use to generate xml or to query to query for a specific value but array might be easier to use to generate xml or to query
after all elements of some kind etc. after all elements of some kind etc.
Encoding is done using the `toxml` function and it will figure what variant that is used based on the input value. Encoding is done using the `to_xml` function and it will figure what variant that is used based on the input value.
Is has two optional options `indent` and `attribute_prefix`. Is has two optional options `indent` and `attribute_prefix`.
### Elements as object ### Elements as object
@ -930,7 +930,7 @@ $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq '.a.c["#text"]'
"ccc" "ccc"
# decode to object and encode to xml # decode to object and encode to xml
$ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o seq=true 'toxml({indent:2})' $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o seq=true 'to_xml({indent:2})'
<a> <a>
<b></b> <b></b>
<b>bbb</b> <b>bbb</b>
@ -973,7 +973,7 @@ $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -d xml -o array=true
] ]
# decode to array and encode to xml # decode to array and encode to xml
$ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o array=true -o seq=true 'toxml({indent:2})' $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o array=true -o seq=true 'to_xml({indent:2})'
<a> <a>
<b></b> <b></b>
<b>bbb</b> <b>bbb</b>

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 139 KiB

After

Width:  |  Height:  |  Size: 141 KiB

View File

@ -1,7 +1,7 @@
#!/usr/bin/env fq -rnf #!/usr/bin/env fq -rnf
def color: def color:
tomd5 | [.[range(3)]] | map(band(.; 0x7f)+60 | toradix(16) | "0"[length:]+.) | join(""); to_md5 | [.[range(3)]] | map(band(.; 0x7f)+60 | to_radix(16) | "0"[length:]+.) | join("");
def _formats_dot: def _formats_dot:
def _record($title; $fields): def _record($title; $fields):

View File

@ -38,7 +38,7 @@ fq dv file
# same as # same as
fq 'd({verbose: true})' file fq 'd({verbose: true})' file
# JSON repersenation for whole file # JSON representation for whole file
fq tovalue file fq tovalue file
# JSON but raw bit fields truncated # JSON but raw bit fields truncated
fq -o bits_format=truncate tovalue file fq -o bits_format=truncate tovalue file
@ -51,7 +51,7 @@ fq 'grep(123)' file
# grep whole tree by condition # grep whole tree by condition
fq 'grep_by(. >= 100 and . =< 100)' file fq 'grep_by(. >= 100 and . =< 100)' file
# recursively look for values fullfilling some condition # recursively look for values fulfilling some condition
fq '.. | select(.type=="trak")?' file fq '.. | select(.type=="trak")?' file
fq 'grep_by(.type=="trak")' file fq 'grep_by(.type=="trak")' file
# grep_by(f) is alias for .. | select(f)?, that is: recuse, select and ignore errors # grep_by(f) is alias for .. | select(f)?, that is: recuse, select and ignore errors
@ -73,7 +73,7 @@ fq -o force=true -d mp4 file.mp4
### Display output ### Display output
`display` or `d` is the main function for displying values and is also the function that will be used if no other output function is explicitly used. If its input is a decode value it will output a dump and tree structure or otherwise it will output as JSON. `display` or `d` is the main function for displaying values and is also the function that will be used if no other output function is explicitly used. If its input is a decode value it will output a dump and tree structure or otherwise it will output as JSON.
Below demonstrates some usages: Below demonstrates some usages:
@ -81,17 +81,17 @@ First and second example does the same thing, inputs `"hello"` to `display`.
![fq demo](display_json.svg) ![fq demo](display_json.svg)
In the next few examples we select out the first "edit list" box in an mp4 file, it's a list of which part of media track to be included during playback, and displays it in various ways. In the next few examples we select out the first "edit list" box in an mp4 file, it's a list of which part of the media track to be included during playback, and displays it in various ways.
Default if not explicitly used `display` will only show the root level: Default if not explicitly used `display` will only show the root level:
![fq demo](display_decode_value.svg) ![fq demo](display_decode_value.svg)
First row shows ruler with byte offset into the line and jq path for the value. First row shows a ruler with byte offset into the line and jq path for the value.
The columns are: The columns are:
- Start address for the line. For example we see that `type` starts at `0xd60`+`0x09`. - Start address for the line. For example we see that `type` starts at `0xd60`+`0x09`.
- Hex repersenation of input bits for value. Will show the whole byte even if the value only partially uses bits from it. - Hex representation of input bits for value. Will show the whole byte even if the value only partially uses bits from it.
- ASCII representation of input bits for value. Will show the whole byte even if the value only partially uses bits from it. - ASCII representation of input bits for value. Will show the whole byte even if the value only partially uses bits from it.
- Tree structure of decoded value, symbolic value and description. - Tree structure of decoded value, symbolic value and description.
@ -110,7 +110,7 @@ Same but verbose `dv`:
In verbose mode bit ranges and array element names as shown. In verbose mode bit ranges and array element names as shown.
Bit range uses `bytes.bits` notation. For example `type` start at byte `0xd69` bit `0` (left out if zero) and ends at `0xd6c` bit `7` (inclusive) and have byte size of `4`. Bit range uses `bytes.bits` notation. For example `type` starts at byte `0xd69` bit `0` (left out if zero) and ends at `0xd6c` bit `7` (inclusive) and have byte size of `4`.
There are also some other `display` aliases: There are also some other `display` aliases:
- `da` same as `display({array_truncate: 0})` which will not truncate long arrays. - `da` same as `display({array_truncate: 0})` which will not truncate long arrays.
@ -195,13 +195,13 @@ fq '.frames[0:10] | map(tobytesrange.start)' file.mp3
```sh ```sh
# decode byte range 100 to end as mp3_frame # decode byte range 100 to end as mp3_frame
fq -d bytes '.[100:] | mp3_frame | d' file.mp3 fq -d bytes '.[100:] | mp3_frame | d' file.mp3
# decode byte range 10 bytes from .somefield and preseve relative position in file # decode byte range 10 bytes from .somefield and preserve relative position in file
fq '.somefield | tobytesrange[10:] | mp3_frame | d' file.mp3 fq '.somefield | tobytesrange[10:] | mp3_frame | d' file.mp3
``` ```
#### Show AVC SPS difference between two mp4 files #### Show AVC SPS difference between two mp4 files
`-n` tells fq to not have an implicit `input`, `f` is function to select out some interesting value, call `diff` with two arguments, `-n` tells fq to not have an implicit `input`, `f` is a function to select out some interesting value, call `diff` with two arguments,
decoded value for `a.mp4` and `b.mp4` filtered thru `f`. decoded value for `a.mp4` and `b.mp4` filtered thru `f`.
```sh ```sh
@ -210,7 +210,7 @@ fq -n 'def f: .. | select(format=="avc_sps"); diff(input|f; input|f)' a.mp4 b.mp
#### Extract first JPEG found in file #### Extract first JPEG found in file
Recursively look for first value that is a `jpeg` decode value root. Use `tobytes` to get bytes for value. Redirect bytes to a file. Recursively look for the first value that is a `jpeg` decode value root. Use `tobytes` to get bytes for value. Redirect bytes to a file.
```sh ```sh
fq 'first(.. | select(format=="jpeg")) | tobytes' file > file.jpeg fq 'first(.. | select(format=="jpeg")) | tobytes' file > file.jpeg
@ -218,7 +218,7 @@ fq 'first(.. | select(format=="jpeg")) | tobytes' file > file.jpeg
#### Sample size histogram #### Sample size histogram
Recursively look for a all sample size boxes "stsz" and use `?` to ignore errors when doing `.type` on arrays etc. Save reference to box, count unique values, save the max, output the path to the box and output a historgram scaled to 0-100. Recursively look for a all sample size boxes "stsz" and use `?` to ignore errors when doing `.type` on arrays etc. Save reference to box, count unique values, save the max, output the path to the box and output a histogram scaled to 0-100.
```sh ```sh
fq '.. | select(.type=="stsz")? as $stsz | .entries | count | max_by(.[1])[1] as $m | ($stsz | topath | path_to_expr), (.[] | "\(.[0]): \((100*.[1]/$m)*"=") \(.[1])") | println' file.mp4 fq '.. | select(.type=="stsz")? as $stsz | .entries | count | max_by(.[1])[1] as $m | ($stsz | topath | path_to_expr), (.[] | "\(.[0]): \((100*.[1]/$m)*"=") \(.[1])") | println' file.mp4
@ -281,21 +281,21 @@ For a more convenient jq experience these might be interesting:
- [vscode-jq](https://github.com/wader/vscode-jq) - [vscode-jq](https://github.com/wader/vscode-jq)
- [jq-lsp](https://github.com/wader/jq-lsp) - [jq-lsp](https://github.com/wader/jq-lsp)
Common beginner gotcha are: Common beginner gotchas are:
- jq's use of `;` and `,`. jq uses `;` as argument separator - jq's use of `;` and `,`. jq uses `;` as argument separator
and `,` as output separator. To call a function `f` with two arguments use `f(1; 2)`. If you do `f(1, 2)` you pass a and `,` as output separator. To call a function `f` with two arguments use `f(1; 2)`. If you do `f(1, 2)` you pass a
single argument `1, 2` (a lambda expression that output `1` and then output `2`) to `f`. single argument `1, 2` (a lambda expression that outputs `1` and then outputs `2`) to `f`.
- Expressions can return or "output" zero or more values. This is how loops, foreach etc is - Expressions can return or "output" zero or more values. This is how loops, foreach etc is
achieved. achieved.
- Expressions have one implicit input and output value. This how pipelines like `1 | . * 2` work. - Expressions have one implicit input and output value. This how pipelines like `1 | . * 2` work.
## Types specific to fq ## Types specific to fq
fq has two additional types compared to jq, decode value and binary. In standard jq expressions they will in most case behave as some standard jq type. fq has two additional types compared to jq, decode value and binary. In standard jq expressions they will in most cases behave as some standard jq type.
### Decode value ### Decode value
This type is returned by decoders and it used to represent parts of the decoed input. It can act as all standard jq types, object, array, number, string etc. This type is returned by decoders and it is used to represent parts of the decoded input. It can act as all standard jq types, object, array, number, string etc.
Each decode value has these properties: Each decode value has these properties:
- A bit range in the input - A bit range in the input
@ -317,18 +317,18 @@ Each non-compound decode value has these properties:
- `topath` is the jq path for the decode value - `topath` is the jq path for the decode value
- `torepr` convert decode value to its representation if possible - `torepr` convert decode value to its representation if possible
The value of a decode value is the symbolic value if available and otherwise the actual value. To explicitly access the value use `tovalue`. In most expression this is not needed as it will be done automactically. The value of a decode value is the symbolic value if available and otherwise the actual value. To explicitly access the value use `tovalue`. In most expressions this is not needed as it will be done automatically.
### Binary ### Binary
Binaries are raw bits with a unit size, 1 (bits) or 8 (bytes), that can have a non-byte aligned size. Will act as byte padded strings in standard jq expressions. Binaries are raw bits with a unit size, 1 (bits) or 8 (bytes), that can have a non-byte aligned size. Will act as byte padded strings in standard jq expressions.
Use `tobits` and `tobytes` to create them from a decode values, strings, numbers or binary arrays. `tobytes` will if needed zero pad most significant bits to be byte aligned. Use `tobits` and `tobytes` to create them from decode values, strings, numbers or binary arrays. `tobytes` will, if needed zero pad most significant bits to be byte aligned.
There is also `tobitsrange` and `tobytesrange` which does the same thing but will preserve it's source range when displayed. There is also `tobitsrange` and `tobytesrange` which does the same thing but will preserve its source range when displayed.
- `"string" | tobytes` produces a binary with UTF8 codepoint bytes. - `"string" | tobytes` produces a binary with UTF8 codepoint bytes.
- `1234 | tobits` produces a binary with the unsigned big-endian integer 1234 with enough bits to represent the number. Use `tobytes` to get the same but with enough bytes to represent the number. This is different to how numbers works inside binary arrays where they are limited to 0-255. - `1234 | tobits` produces a binary with the unsigned big-endian integer 1234 with enough bits to represent the number. Use `tobytes` to get the same but with enough bytes to represent the number. This is different to how numbers work inside binary arrays where they are limited to 0-255.
- `["abc", 123, ...] | tobytes` produce a binary from a binary array. See [binary array](#binary-array) below. - `["abc", 123, ...] | tobytes` produce a binary from a binary array. See [binary array](#binary-array) below.
- `.[index]` access bit or byte at index `index`. Index is in units. - `.[index]` access bit or byte at index `index`. Index is in units.
- `[0x12, 0x34, 0x56] | tobytes[1]` is `0x35` - `[0x12, 0x34, 0x56] | tobytes[1]` is `0x35`
@ -369,13 +369,19 @@ TODO: padding and alignment
## Functions ## Functions
All decode functions are available in two forms, just `<format>` (like `mp3`) that returns a decode value on error and `from_<format>` which throws error on decode error.
Note that jq sometimes uses the notation `name/0`, `name/1` etc in error messages and documentation which means `<function-name>/<arity>`. Same function names with different arity are treated as separate functions, but are usually related in some way in practice.
### Function added in fq
- All standard library functions from jq - All standard library functions from jq
- Adds a few new general functions: - Adds a few new general functions:
- `print`, `println`, `printerr`, `printerrln` prints to stdout and stderr. - `print`, `println`, `printerr`, `printerrln` prints to stdout and stderr.
- `group` group values, same as `group_by(.)`. - `group` group values, same as `group_by(.)`.
- `streaks`, `streaks_by(f)` like `group` but groups streaks based on condition. - `streaks`, `streaks_by(f)` like `group` but groups streaks based on condition.
- `count`, `count_by(f)` like `group` but counts groups lengths. - `count`, `count_by(f)` like `group` but counts groups lengths.
- `debug(f)` like `debug` but uses arg to produce debug message. `{a: 123} | debug({a}) | ...`. - `debug(f)` like `debug` but uses arg to produce a debug message. `{a: 123} | debug({a}) | ...`.
- `path_to_expr` from `["key", 1]` to `".key[1]"`. - `path_to_expr` from `["key", 1]` to `".key[1]"`.
- `expr_to_path` from `".key[1]"` to `["key", 1]`. - `expr_to_path` from `".key[1]"` to `["key", 1]`.
- `diff($a; $b)` produce diff object between two values. - `diff($a; $b)` produce diff object between two values.
@ -394,7 +400,7 @@ unary uses input and if more than one argument all as arguments ignoring the inp
- `toactual`, `toactual($opts)` actual value (usually the decoded value) - `toactual`, `toactual($opts)` actual value (usually the decoded value)
- `tosym`, `tosym($opts)` symbolic value (mapped etc) - `tosym`, `tosym($opts)` symbolic value (mapped etc)
- `todescription` description of value - `todescription` description of value
- `torepr` convert decode value into what it reptresents. For example convert msgpack decode value - `torepr` converts decode value into what it reptresents. For example convert msgpack decode value
into a value representing its JSON representation. into a value representing its JSON representation.
- All regexp functions work with binary as input and pattern argument with these differences - All regexp functions work with binary as input and pattern argument with these differences
compared to when using string input: compared to when using string input:
@ -402,14 +408,14 @@ unary uses input and if more than one argument all as arguments ignoring the inp
- For `capture` the `.string` value is a binary. - For `capture` the `.string` value is a binary.
- If pattern is a binary it will be matched literally and not as a regexp. - If pattern is a binary it will be matched literally and not as a regexp.
- If pattern is a binary or flags include "b" each input byte will be read as separate code points - If pattern is a binary or flags include "b" each input byte will be read as separate code points
- String function are not overloaded to support binary for now as some of them are bahaviours that might be confusing. - String functions are not overloaded to support binary for now as some of them might have behaviors that might be confusing.
- `explode` is overloaded to work with binary. Will explode into array of the unit of the binary. - `explode` is overloaded to work with binary. Will explode into array of the unit of the binary.
end of binary. end of binary.
instead of possibly multi-byte UTF-8 codepoints. This allows to match raw bytes. Ex: `match("\u00ff"; "b")` instead of possibly multi-byte UTF-8 codepoints. This allows to match raw bytes. Ex: `match("\u00ff"; "b")`
will match the byte `0xff` and not the UTF-8 encoded codepoint for 255, `match("[^\u00ff]"; "b")` will match will match the byte `0xff` and not the UTF-8 encoded codepoint for 255, `match("[^\u00ff]"; "b")` will match
all non-`0xff` bytes. all non-`0xff` bytes.
- `grep` functions take 1 or 2 arguments. First is a scalar to match, where a string is - `grep` functions take 1 or 2 arguments. First is a scalar to match, where a string is
treated as a regexp. A binary will be matches exact bytes. Second argument are regexp treated as a regexp. A binary will match exact bytes. Second argument are regexp
flags with addition that "b" will treat each byte in the input binary as a code point, this flags with addition that "b" will treat each byte in the input binary as a code point, this
makes it possible to match exact bytes. makes it possible to match exact bytes.
- `grep($v)`, `grep($v; $flags)` recursively match value and binary - `grep($v)`, `grep($v; $flags)` recursively match value and binary
@ -418,39 +424,55 @@ unary uses input and if more than one argument all as arguments ignoring the inp
- `fgrep($v)`, `fgrep($v; $flags)` recursively match field name - `fgrep($v)`, `fgrep($v; $flags)` recursively match field name
- `grep_by(f)` recursively match using a filter. Ex: `grep_by(. > 180 and . < 200)`, `first(grep_by(format == "id3v2"))`. - `grep_by(f)` recursively match using a filter. Ex: `grep_by(. > 180 and . < 200)`, `first(grep_by(format == "id3v2"))`.
- Binary: - Binary:
- `tobits` - Transform input to binary with bit as unit, does not preserving source range, will start at zero. - `tobits` - Transform input to binary with bit as unit, does not preserve source range, will start at zero.
- `tobitsrange` - Transform input to binary with bit as unit, preserves source range if possible. - `tobitsrange` - Transform input to binary with bit as unit, preserves source range if possible.
- `tobytes` - Transform input to binary with byte as unit, does not preserving source range, will start at zero. - `tobytes` - Transform input to binary with byte as unit, does not preserve source range, will start at zero.
- `tobytesrange` - Transform input binary with byte as unit, preserves source range if possible. - `tobytesrange` - Transform input binary with byte as unit, preserves source range if possible.
- `.[start:end]`, `.[:end]`, `.[start:]` - Slice binary from start to end preserving source range. - `.[start:end]`, `.[:end]`, `.[start:]` - Slice binary from start to end preserve source range.
- `open` open file for reading - `open` open file for reading
- All decode function takes a optional option argument. The only option currently is `force` to ignore decoder asserts. - All decode functions take an optional option argument. The only option currently is `force` to ignore decoder asserts.
For example to decode as mp3 and ignore assets do `mp3({force: true})` or `decode("mp3"; {force: true})`, from command line For example to decode as mp3 and ignore assets do `mp3({force: true})` or `decode("mp3"; {force: true})`, from command line
you currently have to do `fq -d bytes 'mp3({force: true})' file`. you currently have to do `fq -d bytes 'mp3({force: true})' file`.
- `decode`, `decode("<format>")`, `decode("<format>"; $opts)` decode format - `decode`, `decode("<format>")`, `decode("<format>"; $opts)` decode format
- `probe`, `probe($opts)` probe and decode format - `probe`, `probe($opts)` probe and decode format
- `mp3`, `mp3($opts)`, ..., `<format>`, `<format>($opts)` same as `decode("<format>")`, `decode("<format>"; $opts)` decode as format - `mp3`, `mp3($opts)`, ..., `<format>`, `<format>($opts)` same as `decode("<format>")`, `decode("<format>"; $opts)` decode as format and return decode value even on decode error.
- `from_mp3`, `from_mp3($opts)`, ..., `from_<format>`, `from_<format>($opts)` same as `decode("<format>")`, `decode("<format>"; $opts)` decode as format but throw error on decode error.
- Display shows hexdump/ASCII/tree for decode values and jq value for other types. - Display shows hexdump/ASCII/tree for decode values and jq value for other types.
- `d`/`d($opts)` display value and truncate long arrays and binaries - `d`/`d($opts)` display value and truncate long arrays and binaries
- `da`/`da($opts)` display value and don't truncate arrays - `da`/`da($opts)` display value and don't truncate arrays
- `dd`/`dd($opts)` display value and don't truncate arrays or binaries - `dd`/`dd($opts)` display value and don't truncate arrays or binaries
- `dv`/`dv($opts)` verbosely display value and don't truncate arrays but truncate binaries - `dv`/`dv($opts)` verbosely display value and don't truncate arrays but truncate binaries
- `ddv`/`ddv($opts)` verbosely display value and don't truncate arrays or binaries - `ddv`/`ddv($opts)` verbosely display value and don't truncate arrays or binaries
- `p`/`preview` show preview of field tree
- `hd`/`hexdump` hexdump value - `hd`/`hexdump` hexdump value
- `repl`/`repl($opts)` nested REPL, must be last in a pipeline. `1 | repl`, can "slurp" outputs. Ex: `1, 2, 3 | repl`, `[1,2,3] | repl({compact: true})`. - `repl`/`repl($opts)` nested REPL, must be last in a pipeline. `1 | repl`, can "slurp" outputs. Ex: `1, 2, 3 | repl`, `[1,2,3] | repl({compact: true})`.
- `slurp("<name>")` slurp outputs and save them to `$name`, must be last in pipeline. Will be available as global array `$name`. Ex `1,2,3 | slurp("a")`, `$a[]` same as `spew("a")`. - `slurp("<name>")` slurp outputs and save them to `$name`, must be last in the pipeline. Will be available as a global array `$name`. Ex `1,2,3 | slurp("a")`, `$a[]` same as `spew("a")`.
- `spew`/`spew("<name>")` output previously slurped values. `spew` outputs all slurps as an object, `spew("<name>")` outouts one slurp. Ex: `spew("a")`. - `spew`/`spew("<name>")` output previously slurped values. `spew` outputs all slurps as an object, `spew("<name>")` outputs one slurp. Ex: `spew("a")`.
- `paste` read string from stdin until ^D. Useful for pasting text. - `paste` read string from stdin until ^D. Useful for pasting text.
- Ex: `paste | frompem | asn1_ber | repl` read from stdin then decode and start a new sub-REPL with result. - Ex: `paste | from_pem | asn1_ber | repl` read from stdin then decode and start a new sub-REPL with result.
### Naming inconsistencies
jq's naming conversion is a bit inconsistent, some standard library functions are named `tojson` while others `from_entries`. fq follows this tradition a bit by but tries to use snake_case unless there is a good reason.
Here are all the non-snake_case functions added by. Most of them deal with decode and binary values which are new "primitive" types:
- `toactual`
- `tobits`
- `tobitsrange`
- `tobytes`
- `tobytesrange`
- `todescription`
- `topath`
- `torepr`
- `tosym`
- `tovalue`
### Encodings, serializations and hashes ### Encodings, serializations and hashes
In an addition to binary formats fq also support reading to and from encodings and serialization formats. In addition to binary formats fq also support reading to and from encodings and serialization formats.
At the moment fq does not have any dedicated argument for serialization formats but raw string input `-R` slurp `-s` and raw string output `-r` can make things easier. The combination `-Rs` will read all inputs into one string (same as jq). At the moment fq does not have any dedicated argument for serialization formats but raw string input `-R` slurp `-s` and raw string output `-r` can make things easier. The combination `-Rs` will read all inputs into one string (same as jq).
Note that `from*` functions output jq values and `to*` takes jq values as input so in some cases not all information will be properly preserved. For example for the element and attribute order might change and text and comment nodes might move or be merged. [yq](https://github.com/mikefarah/yq) might be a better tool if that is needed. Note that `from*` functions output jq values and `to*` takes jq values as input so in some cases not all information will be properly preserved. For example, for the element and attribute order might change and text and comment nodes might move or be merged. [yq](https://github.com/mikefarah/yq) might be a better tool if that is needed.
Some example usages: Some example usages:
@ -463,19 +485,19 @@ $ fq '...' file.yml
$ fq -r 'tojson({indent:2})' file.yml $ fq -r 'tojson({indent:2})' file.yml
# add token to URL # add token to URL
$ echo -n "https://host.org" | fq -Rsr 'fromurl | .user.username="token" | tourl' $ echo -n "https://host.org" | fq -Rsr 'from_url | .user.username="token" | tourl'
https://token@host.org https://token@host.org
# top 3 hosts in src or href attributes: # top 3 hosts in src or href attributes:
# -d to decode as html, can't be probed as html5 parsers always produce some parse tree # -d to decode as html, can't be probed as html5 parsers always produce some parse tree
# [...] to start collect values into an array # [...] to start collect values into an array
# .. | ."@src"?, ."@href"? | values, recurse and try (?) to get src and href attributes and filter out nulls # .. | ."@src"?, ."@href"? | values, recurse and try (?) to get src and href attributes and filter out nulls
# fromurl.host | values, parse as url and filter out those without a host # from_url.host | values, parse as url and filter out those without a host
# count to count unique values, returns [[key, count], ...] # count to count unique values, returns [[key, count], ...]
# reverse sort by count and pick first 3 # reverse sort by count and pick first 3
# map [key, count] tuples into {key: key, values: count} # map [key, count] tuples into {key: key, values: count}
# from_entries, convert into object # from_entries, convert into object
$ curl -s https://www.discogs.com/ | fq -d html '[.. | ."@src"?, ."@href"? | values | fromurl.host | values] | count | sort_by(-.[1])[0:3] | map({key: .[0], value: .[1]}) | from_entries' $ curl -s https://www.discogs.com/ | fq -d html '[.. | ."@src"?, ."@href"? | values | from_url.host | values] | count | sort_by(-.[1])[0:3] | map({key: .[0], value: .[1]}) | from_entries'
{ {
"blog.discogs.com": 9, "blog.discogs.com": 9,
"st.discogs.com": 10, "st.discogs.com": 10,
@ -488,7 +510,7 @@ $ fq -i . <(curl -sL https://github.com/stefangabos/world_countries/archive/mas
# select from interesting xml file # select from interesting xml file
zip> .local_files[] | select(.file_name == "world_countries-master/data/countries/en/world.xml").uncompressed | repl zip> .local_files[] | select(.file_name == "world_countries-master/data/countries/en/world.xml").uncompressed | repl
# convert xml into jq value # convert xml into jq value
> .local_files[95].uncompressed string> fromxml | repl > .local_files[95].uncompressed string> from_xml | repl
# sort countries by and select the first one # sort countries by and select the first one
>> object> .countries.country | sort_by(."@name") | first | repl >> object> .countries.country | sort_by(."@name") | first | repl
# see what current input is # see what current input is
@ -500,7 +522,7 @@ zip> .local_files[] | select(.file_name == "world_countries-master/data/countrie
"@name": "Afghanistan" "@name": "Afghanistan"
} }
# remove "@" prefix from keys and convert to YAML and print it # remove "@" prefix from keys and convert to YAML and print it
>>> object> with_entries(.key |= .[1:]) | toyaml | print >>> object> with_entries(.key |= .[1:]) | to_yaml | print
alpha2: af alpha2: af
alpha3: afg alpha3: afg
id: "4" id: "4"
@ -512,21 +534,21 @@ name: Afghanistan
zip> ^D zip> ^D
``` ```
- `fromxml`/`fromxml($opts)` Parse XML into jq value.<br> - `from_xml`/`from_xml($opts)` Parse XML into jq value.<br>
`{seq: true}` preserve element ordering if more than one sibling.<br> `{seq: true}` preserve element ordering if more than one sibling.<br>
`{array: true}` use nested `[name, attributes, children]` arrays to represent elements. Attributes will be `null` if none and children will be `[]` if none, this is to make it easier to work it. `toxml` does not require this.<br> `{array: true}` use nested `[name, attributes, children]` arrays to represent elements. Attributes will be `null` if none and children will be `[]` if none, this is to make it easier to work with as the array as 3 values. `to_xml` does not require this.<br>
- `fromhtml`/`fromhtml($opts)` Parse HTML into jq value.<br> - `from_html`/`from_html($opts)` Parse HTML into jq value.<br>
Similar to `fromxml` but parses html5 in non-script mode. Will always have a `html` root with `head` and `body` elements.<br> Similar to `from_xml` but parses html5 in non-script mode. Will always have a `html` root with `head` and `body` elements.<br>
`{array: true}` use nested arrays to represent elements.<br> `{array: true}` use nested arrays to represent elements.<br>
`{seq: true}` preserve element ordering if more than one sibling.<br> `{seq: true}` preserve element ordering if more than one sibling.<br>
- `toxml`/`toxml($opts})` Serialize jq value into XML.<br> - `to_xml`/`to_xml($opts})` Serialize jq value into XML.<br>
`{indent: number}` indent child elements.<br> `{indent: number}` indent child elements.<br>
Assumes object representation if input is an object, and nested arrays if input is an array.<br> Assumes object representation if input is an object, and nested arrays if input is an array.<br>
Will automatically add a root `doc` element if jq value has more then one root element.<br> Will automatically add a root `doc` element if jq value has more then one root element.<br>
If a `#seq` is found on at least one element all siblings will be sort by sequence number. Attributes are always sorted.<br> If a `#seq` is found on at least one element all siblings will be sort by sequence number. Attributes are always sorted.<br>
XML elements can be represented as jq value in two ways, as objects (inspired by [mxj](https://github.com/clbanning/mxj) and [xml.com's Converting Between XML and JSON XML elements can be represented as jq value in two ways, as objects (inspired by [mxj](https://github.com/clbanning/mxj) and [xml.com's Converting Between XML and JSON
](https://www.xml.com/pub/a/2006/05/31/converting-between-xml-and-json.html)) or nested arrays. Both representations are lossy and might lose ordering of elements, text nodes and comments. In object representation `fromxml`, `fromhtml` and `toxml` support `{seq:true}` option to parse/serialize `{"#seq"=<number>}` attributes to preserve element sibling ordering. ](https://www.xml.com/pub/a/2006/05/31/converting-between-xml-and-json.html)) or nested arrays. Both representations are lossy and might lose ordering of elements, text nodes and comments. In object representation `from_xml`, `from_html` and `to_xml` support `{seq:true}` option to parse/serialize `{"#seq"=<number>}` attributes to preserve element sibling ordering.
The object version is denser and convenient to query, the nested arrays version is probably easier to use when generating XML. The object version is denser and convenient to query, the nested arrays version is probably easier to use when generating XML.
@ -546,9 +568,9 @@ zip> ^D
- For explicit sibling ordering `#seq` keys with a number, can be negative, assumed zero if missing. - For explicit sibling ordering `#seq` keys with a number, can be negative, assumed zero if missing.
- Child element with only text as `<name>` key with text as value. - Child element with only text as `<name>` key with text as value.
- Child element with more than just text as `<name>` key with value an object. - Child element with more than just text as `<name>` key with value an object.
- Multiple child element sibling with same name as `name` key with value as array with strings and objects. - Multiple child element siblings with same name as `name` key with value as array with strings and objects.
```jq ```jq
> $xml | fromxml > $xml | from_xml
{ {
"doc": { "doc": {
"child": [ "child": [
@ -566,12 +588,12 @@ zip> ^D
``` ```
With nested array representation, an array with these values `["<name>", {attributes...}, [children...]]` With nested array representation, an array with these values `["<name>", {attributes...}, [children...]]`
- Index 0 is element name. - Index 0 is an element name.
- Index 1 object attributes (including `#text` and `#comment` keys). - Index 1 object attributes (including `#text` and `#comment` keys).
- Index 2 array of child elements. - Index 2 array of child elements.
# #
```jq ```jq
> $xml | fromxml({array: true}) > $xml | from_xml({array: true})
[ [
"doc", "doc",
[ [
@ -599,7 +621,7 @@ zip> ^D
``` ```
Parse and include `#seq` attributes if needed: Parse and include `#seq` attributes if needed:
```jq ```jq
> $xml | fromxml({seq:true}) > $xml | from_xml({seq:true})
{ {
"doc": { "doc": {
"child": [ "child": [
@ -622,7 +644,7 @@ zip> ^D
```` ````
Select values in `<doc>`, remove `<child>`, add a `<new>` element, serialize to xml with 2 space indent and print the string Select values in `<doc>`, remove `<child>`, add a `<new>` element, serialize to xml with 2 space indent and print the string
```jq ```jq
> $xml | fromxml.doc | del(.child) | .new = "abc" | {root: .} | toxml({indent: 2}) | println > $xml | from_xml.doc | del(.child) | .new = "abc" | {root: .} | to_xml({indent: 2}) | println
<root> <root>
<new>abc</new> <new>abc</new>
<other>text</other> <other>text</other>
@ -633,45 +655,47 @@ JSON and jq-flavoured JSON
- `fromjson` Parse JSON into jq value. - `fromjson` Parse JSON into jq value.
- `tojson`/`tojson($opt)` Serialize jq value into JSON.<br> - `tojson`/`tojson($opt)` Serialize jq value into JSON.<br>
`{indent: number}` indent array/object values.<br> `{indent: number}` indent array/object values.<br>
- `fromjq` Parse jq-flavoured JSON into jq value. - `from_jq` Parse jq-flavoured JSON into jq value.
- `tojq`/`tojq($opt)` Serialize jq value into jq-flavoured JSON<br> - `to_jq`/`to_jq($opt)` Serialize jq value into jq-flavoured JSON<br>
`{indent: number}` indent array/object values.<br> `{indent: number}` indent array/object values.<br>
jq-flavoured JSON has optional key quotes, `#` comments and can have trailing comma in arrays. jq-flavoured JSON has optional key quotes, `#` comments and can have trailing comma in arrays.
- `fromjsonl` Parse JSON lines into jq array. - `from_jsonl` Parse JSON lines into jq array.
- `tojsonl` Serialize jq array into JSONL. - `to_jsonl` Serialize jq array into JSONL.
Note that `fromjson` and `tojson` use different naming conventions as they originate from jq's standard library.
YAML YAML
- `fromyaml` Parse YAML into jq value. - `from_yaml` Parse YAML into jq value.
- `toyaml` Serialize jq value into YAML. - `to_yaml` Serialize jq value into YAML.
TOML TOML
- `fromtoml` Parse TOML into jq value. - `from_toml` Parse TOML into jq value.
- `totoml` Serialize jq value into TOML. - `to_toml` Serialize jq value into TOML.
CSV CSV
- `fromcsv`/`fromcvs($opts)` Parse CSV into jq value.<br> - `from_csv`/`from_cvs($opts)` Parse CSV into jq value.<br>
`{comma: string}` field separator, default ",".<br> `{comma: string}` field separator, default ",".<br>
`{comment: string}` comment line character, default "#".<br> `{comment: string}` comment line character, default "#".<br>
To work with tab separated values you can use `fromcvs({comma: "\t"})` or `fq -d csv -o 'comma="\t"'` To work with tab separated values you can use `fromcvs({comma: "\t"})` or `fq -d csv -o 'comma="\t"'`
- `tocsv`/`tocsv($opts)` Serialize jq value into CSV.<br> - `to_csv`/`to_csv($opts)` Serialize jq value into CSV.<br>
`{comma: string}` field separator, default ",".<br> `{comma: string}` field separator, default ",".<br>
XML encoding XML encoding
- `fromxmlentities` Decode XML entities. - `from_xmlentities` Decode XML entities.
- `toxmlentities` Encode XML entities. - `to_xmlentities` Encode XML entities.
URL parts and XML encodings URL parts and XML encodings
- `fromurlpath` Decode URL path component. - `from_urlpath` Decode URL path component.
- `tourlpath` Encode URL path component. Whitespace as %20. - `to_urlpath` Encode URL path component. Whitespace as %20.
- `fromurlencode` Decode URL query encoding. - `from_urlencode` Decode URL query encoding.
- `tourlencode` Encode URL to query encoding. Whitespace as "+". - `to_urlencode` Encode URL to query encoding. Whitespace as "+".
- `fromurlquery` Decode URL query into object. For duplicates keys value will be an array. - `from_urlquery` Decode URL query into object. For duplicates keys value will be an array.
- `tourlquery` Encode objet into query string. - `to_urlquery` Encode object into query string.
- `fromurl` Decode URL into object. - `from_url` Decode URL into object.
```jq ```jq
> "schema://user:pass@host/path?key=value#fragment" | fromurl > "schema://user:pass@host/path?key=value#fragment" | from_url
{ {
"fragment": "fragement", "fragment": "fragment",
"host": "host", "host": "host",
"path": "/path", "path": "/path",
"query": { "query": {
@ -685,50 +709,50 @@ URL parts and XML encodings
} }
} }
``` ```
- `tourl` Encode object into URL string. - `to_url` Encode object into URL string.
- `fromhex` Decode hexstring to binary.
Binary encodings like hex and base64 Binary encodings like hex and base64
- `tohex` Encode binay into hexstring. - `from_hex` Decode hex string to binary.
- `frombase64`/`frombase64($opts)` Decode base64 encodings into binary.<br> - `to_hex` Encode binary into hex string.
- `from_base64`/`from_base64($opts)` Decode base64 encodings into binary.<br>
`{encoding:string}` encoding variant: `std` (default), `url`, `rawstd` or `rawurl` `{encoding:string}` encoding variant: `std` (default), `url`, `rawstd` or `rawurl`
- `tobase64`/`tobase64($opts)` Encode binary into base64 encodings.<br> - `to_base64`/`to_base64($opts)` Encode binary into base64 encodings.<br>
`{encoding:string}` encoding variant: `std` (default), `url`, `rawstd` or `rawurl` `{encoding:string}` encoding variant: `std` (default), `url`, `rawstd` or `rawurl`
Hash functions Hash functions
- `tomd4` Hash binary using md4. - `to_md4` Hash binary using md4.
- `tomd5` Hash binary using md5. - `to_md5` Hash binary using md5.
- `tosha1` Hash binary using sha1. - `to_sha1` Hash binary using sha1.
- `tosha256` Hash binary using sha256. - `to_sha256` Hash binary using sha256.
- `tosha512` Hash binary using sha512. - `to_sha512` Hash binary using sha512.
- `tosha3_224` Hash binary using sha3 224. - `to_sha3_224` Hash binary using sha3 224.
- `tosha3_256` Hash binary using sha3 256. - `to_sha3_256` Hash binary using sha3 256.
- `tosha3_384` Hash binary using sha3 384. - `to_sha3_384` Hash binary using sha3 384.
- `tosha3_512` Hash binary using sha3 512. - `to_sha3_512` Hash binary using sha3 512.
Text encodings Text encodings
- `toiso8859_1` Decode binary as ISO8859-1 into string. - `to_iso8859_1` Decode binary as ISO8859-1 into string.
- `fromiso8859_1` Encode string as ISO8859-1 into binary. - `from_iso8859_1` Encode string as ISO8859-1 into binary.
- `toutf8` Encode string as UTF8 into binary. - `to_utf8` Encode string as UTF8 into binary.
- `fromutf8` Decode binary as UTF8 into string. - `from_utf8` Decode binary as UTF8 into string.
- `toutf16` Encode string as UTF16 into binary. - `to_utf16` Encode string as UTF16 into binary.
- `fromutf16` Decode binary as UTF16 into string. - `from_utf16` Decode binary as UTF16 into string.
- `toutf16le` Encode string as UTF16 little-endian into binary. - `to_utf16le` Encode string as UTF16 little-endian into binary.
- `fromutf16le` Decode binary as UTF16 little-endian into string. - `from_utf16le` Decode binary as UTF16 little-endian into string.
- `toutf16be` Encode string as UTF16 big-endian into binary. - `to_utf16be` Encode string as UTF16 big-endian into binary.
- `fromutf16be` Decode binary as UTF16 big-endian into string. - `from_utf16be` Decode binary as UTF16 big-endian into string.
## Options ## Options
fq has some general options in addition to decode and decoders specific options. They all use the same `-o <name>=<value>` argument. fq has some general options in addition to decode and decoders specific options. They all use the same `-o <name>=<value>` argument.
`<value>` is fuzzily parsed based on the type of the option. Ex: a string can specified as `-o name=string` or `-o name="string"`. `<value>` is fuzzily parsed based on the type of the option. Ex: a string can be specified as `-o name=string` or `-o name="string"`.
### `bits_format` ### `bits_format`
How to represent raw bits as JSON. How to represent raw bits as JSON.
- `-o bits_foramt=string` String with raw bytes (zero bit padded). The string is binary safe internally in fq but bytes not represetable as UTF-8 will be lost if turn to JSON. - `-o bits_foramt=string` String with raw bytes (zero bit padded). The string is binary safe internally in fq but bytes not representable as UTF-8 will be lost if turn to JSON.
- `-o bits_format=md5` MD5 hex string (zero bit padded). - `-o bits_format=md5` MD5 hex string (zero bit padded).
- `-o bits_format=base64` Base64 string. - `-o bits_format=base64` Base64 string.
- `-p bits_foramt=truncate` Truncated string. - `-p bits_foramt=truncate` Truncated string.
@ -737,7 +761,7 @@ How to represent raw bits as JSON.
## Color and unicode output ## Color and unicode output
fq by default tries to use colors if possible, this can be disabled with `-M`. You can also fq by default tries to use colors if possible, this can be disabled with `-M`. You can also
enable useage of unicode characters for improved output by setting the environment enable usage of unicode characters for improved output by setting the environment
variable `CLIUNICODE`. variable `CLIUNICODE`.
## Configuration ## Configuration
@ -749,7 +773,7 @@ To add own functions you can use `init.fq` that will be read from
## Use as script interpreter ## Use as script interpreter
fq can be used as a scrip interpreter: fq can be used as a script interpreter:
`mp3_duration.jq`: `mp3_duration.jq`:
```jq ```jq
@ -761,9 +785,9 @@ fq can be used as a scrip interpreter:
- [gojq's differences to jq](https://github.com/itchyny/gojq#difference-to-jq), - [gojq's differences to jq](https://github.com/itchyny/gojq#difference-to-jq),
notable is support for arbitrary-precision integers. notable is support for arbitrary-precision integers.
- Supports hexdecimal `0xab`, octal `0o77` and binary `0b101` integer literals. - Supports hexadecimal `0xab`, octal `0o77` and binary `0b101` integer literals.
- Try include `include "file?";` that don't fail if file is missing. - Try include `include "file?";` that doesn't fail if file is missing or has errors.
- Some values can act as a object with keys even when it's an array, number etc. - Some values can act as an object with keys even when it's an array, number etc.
- There can be keys hidden from `keys` and `[]`. - There can be keys hidden from `keys` and `[]`.
- Some values are readonly and can't be updated. - Some values are readonly and can't be updated.

View File

@ -7,7 +7,7 @@ Supports decoding BER, CER and DER (X.690).
### Can be used to decode certificates etc ### Can be used to decode certificates etc
```sh ```sh
$ fq -d bytes 'frompem | asn1_ber | d' cert.pem $ fq -d bytes 'from_pem | asn1_ber | d' cert.pem
``` ```
### Can decode nested values ### Can decode nested values

View File

@ -1,4 +1,4 @@
$ fq -d bytes 'frompem | asn1_ber | dv' ed25519.cer $ fq -d bytes 'from_pem | asn1_ber | dv' ed25519.cer
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x182.7 (387) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x182.7 (387)
0x0000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2) 0x0000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2)
0x0000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1) 0x0000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1)

View File

@ -18,7 +18,7 @@ Supports decoding BER, CER and DER (X.690).
Can be used to decode certificates etc Can be used to decode certificates etc
====================================== ======================================
$ fq -d bytes 'frompem | asn1_ber | d' cert.pem $ fq -d bytes 'from_pem | asn1_ber | d' cert.pem
Can decode nested values Can decode nested values
======================== ========================

View File

@ -1,4 +1,4 @@
$ fq -d bytes 'frompem | asn1_ber | dv' letsencrypt-x3.cer $ fq -d bytes 'from_pem | asn1_ber | dv' letsencrypt-x3.cer
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x495.7 (1174) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x495.7 (1174)
0x00000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2) 0x00000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2)
0x00000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1) 0x00000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1)

View File

@ -1,4 +1,4 @@
$ fq -d bytes 'frompem | asn1_ber | dv' sig-p256-ber.p7m $ fq -d bytes 'from_pem | asn1_ber | dv' sig-p256-ber.p7m
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x2aa1.7 (10914) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x2aa1.7 (10914)
0x000000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2) 0x000000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2)
0x000000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1) 0x000000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1)

View File

@ -1,4 +1,4 @@
$ fq -d bytes 'frompem | asn1_ber | dv' sig-p256-der.p7m $ fq -d bytes 'from_pem | asn1_ber | dv' sig-p256-der.p7m
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x2a77.7 (10872) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x2a77.7 (10872)
0x0000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2) 0x0000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2)
0x0000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1) 0x0000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1)

View File

@ -1,4 +1,4 @@
$ fq -d bytes 'frompem | asn1_ber | dv' sig-rsa1024-sha1.p7s $ fq -d bytes 'from_pem | asn1_ber | dv' sig-rsa1024-sha1.p7s
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x353.7 (852) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0x353.7 (852)
0x0000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2) 0x0000|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2)
0x0000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1) 0x0000|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1)

View File

@ -1,4 +1,4 @@
$ fq -d bytes 'frompem | asn1_ber | dv, torepr' test.pem $ fq -d bytes 'from_pem | asn1_ber | dv, torepr' test.pem
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0xa1.7 (162) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (asn1_ber) 0x0-0xa1.7 (162)
0x00|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2) 0x00|30 |0 | class: "universal" (0) 0x0-0x0.1 (0.2)
0x00|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1) 0x00|30 |0 | form: "constructed" (1) 0x0.2-0x0.2 (0.1)

View File

@ -3,7 +3,7 @@
$ fq -i -d json . appendix_a.json $ fq -i -d json . appendix_a.json
json> length json> length
82 82
json> map(select(.decoded) | (.cbor | frombase64 | cbor | torepr) as $a | select( .decoded != $a) | {test: ., actual: $a}) json> map(select(.decoded) | (.cbor | from_base64 | cbor | torepr) as $a | select( .decoded != $a) | {test: ., actual: $a})
[ [
{ {
"actual": { "actual": {
@ -32,7 +32,7 @@ json> map(select(.decoded) | (.cbor | frombase64 | cbor | torepr) as $a | select
} }
} }
] ]
json> .[] | select(.decoded) | .cbor | frombase64 | cbor | dv json> .[] | select(.decoded) | .cbor | from_base64 | cbor | dv
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (cbor) 0x0-0x0.7 (1) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (cbor) 0x0-0x0.7 (1)
0x0|00| |.| | major_type: "positive_int" (0) 0x0-0x0.2 (0.3) 0x0|00| |.| | major_type: "positive_int" (0) 0x0-0x0.2 (0.3)
0x0|00| |.| | short_count: 0 0x0.3-0x0.7 (0.5) 0x0|00| |.| | short_count: 0 0x0.3-0x0.7 (0.5)

View File

@ -1,4 +1,4 @@
$ fq -n '"v2NGdW71Y0FtdCH/" | frombase64 | cbor | torepr' $ fq -n '"v2NGdW71Y0FtdCH/" | from_base64 | cbor | torepr'
{ {
"Amt": -2, "Amt": -2,
"Fun": true "Fun": true

View File

@ -23,7 +23,7 @@ import (
var hashFS embed.FS var hashFS embed.FS
func init() { func init() {
interp.RegisterFunc1("_tohash", toHash) interp.RegisterFunc1("_to_hash", toHash)
interp.RegisterFS(hashFS) interp.RegisterFS(hashFS)
} }

View File

@ -1,9 +1,9 @@
def tomd4: _tohash({name: "md4"}); def to_md4: _to_hash({name: "md4"});
def tomd5: _tohash({name: "md5"}); def to_md5: _to_hash({name: "md5"});
def tosha1: _tohash({name: "sha1"}); def to_sha1: _to_hash({name: "sha1"});
def tosha256: _tohash({name: "sha256"}); def to_sha256: _to_hash({name: "sha256"});
def tosha512: _tohash({name: "sha512"}); def to_sha512: _to_hash({name: "sha512"});
def tosha3_224: _tohash({name: "sha3_224"}); def to_sha3_224: _to_hash({name: "sha3_224"});
def tosha3_256: _tohash({name: "sha3_256"}); def to_sha3_256: _to_hash({name: "sha3_256"});
def tosha3_384: _tohash({name: "sha3_384"}); def to_sha3_384: _to_hash({name: "sha3_384"});
def tosha3_512: _tohash({name: "sha3_512"}); def to_sha3_512: _to_hash({name: "sha3_512"});

View File

@ -1,14 +1,14 @@
# https://en.wikipedia.org/wiki/Privacy-Enhanced_Mail # https://en.wikipedia.org/wiki/Privacy-Enhanced_Mail
def frompem: def from_pem:
( tobytes ( tobytes
| tostring | tostring
| capture("-----BEGIN(.*?)-----(?<s>.*?)-----END(.*?)-----"; "mg").s | capture("-----BEGIN(.*?)-----(?<s>.*?)-----END(.*?)-----"; "mg").s
| _frombase64({encoding: "std"}) | _from_base64({encoding: "std"})
) // error("no pem header or footer found"); ) // error("no pem header or footer found");
def topem($label): def to_pem($label):
( tobytes ( tobytes
| _tobase64({encoding: "std"}) | _to_base64({encoding: "std"})
| ($label | if $label != "" then " " + $label end) as $label | ($label | if $label != "" then " " + $label end) as $label
| [ "-----BEGIN\($label)-----" | [ "-----BEGIN\($label)-----"
, . , .
@ -17,4 +17,4 @@ def topem($label):
] ]
| join("\n") | join("\n")
); );
def topem: topem(""); def to_pem: to_pem("");

View File

@ -1,5 +1,5 @@
$ fq -i $ fq -i
null> "test" | tomd4, tomd5, tosha1, tosha256, tosha512, tosha3_224, tosha3_256, tosha3_384, tosha3_512 | tohex null> "test" | to_md4, to_md5, to_sha1, to_sha256, to_sha512, to_sha3_224, to_sha3_256, to_sha3_384, to_sha3_512 | to_hex
"db346d691d7acc4dc2625db19f9e3f52" "db346d691d7acc4dc2625db19f9e3f52"
"098f6bcd4621d373cade4e832627b4f6" "098f6bcd4621d373cade4e832627b4f6"
"a94a8fe5ccb19ba61c4c0873d391e987982fbbd3" "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
@ -9,7 +9,7 @@ null> "test" | tomd4, tomd5, tosha1, tosha256, tosha512, tosha3_224, tosha3_256,
"36f028580bb02cc8272a9a020f4200e346e276ae664e45ee80745574e2f5ab80" "36f028580bb02cc8272a9a020f4200e346e276ae664e45ee80745574e2f5ab80"
"e516dabb23b6e30026863543282780a3ae0dccf05551cf0295178d7ff0f1b41eecb9db3ff219007c4e097260d58621bd" "e516dabb23b6e30026863543282780a3ae0dccf05551cf0295178d7ff0f1b41eecb9db3ff219007c4e097260d58621bd"
"9ece086e9bac491fac5c1d1046ca11d737b92a2b2ebd93f005d7b710110c0a678288166e7fbe796883a4f2e9b3ca9f484f521d0ce464345cc1aec96779149c14" "9ece086e9bac491fac5c1d1046ca11d737b92a2b2ebd93f005d7b710110c0a678288166e7fbe796883a4f2e9b3ca9f484f521d0ce464345cc1aec96779149c14"
null> 0xf08 | tobits | .[:4,5,6,7,8,9] | tomd5 | tohex null> 0xf08 | tobits | .[:4,5,6,7,8,9] | to_md5 | to_hex
"8c493a43d8c1ef798860bb02b62e8e79" "8c493a43d8c1ef798860bb02b62e8e79"
"8c493a43d8c1ef798860bb02b62e8e79" "8c493a43d8c1ef798860bb02b62e8e79"
"8c493a43d8c1ef798860bb02b62e8e79" "8c493a43d8c1ef798860bb02b62e8e79"

View File

@ -1,7 +1,7 @@
$ fq -i $ fq -i
null> "abc" | topem null> "abc" | to_pem
"-----BEGIN-----\nYWJj\n-----END-----\n" "-----BEGIN-----\nYWJj\n-----END-----\n"
null> "abc" | topem | "before" + . + "between" + . + "after" | frompem | tostring null> "abc" | to_pem | "before" + . + "between" + . + "after" | from_pem | tostring
"abc" "abc"
"abc" "abc"
null> ^D null> ^D

View File

@ -33,7 +33,7 @@ func init() {
Functions: []string{"_todisplay"}, Functions: []string{"_todisplay"},
}) })
interp.RegisterFS(csvFS) interp.RegisterFS(csvFS)
interp.RegisterFunc1("_tocsv", toCSV) interp.RegisterFunc1("_to_csv", toCSV)
} }
func decodeCSV(d *decode.D, in any) any { func decodeCSV(d *decode.D, in any) any {

View File

@ -1,3 +1,3 @@
def tocsv($opts): _tocsv($opts); def to_csv($opts): _to_csv($opts);
def tocsv: _tocsv(null); def to_csv: _to_csv(null);
def _csv__todisplay: tovalue; def _csv__todisplay: tovalue;

View File

@ -1,7 +1,7 @@
### TSV to CSV ### TSV to CSV
```sh ```sh
$ fq -d csv -o comma="\t" tocsv file.tsv $ fq -d csv -o comma="\t" to_csv file.tsv
``` ```
### Convert rows to objects based on header row ### Convert rows to objects based on header row

View File

@ -9,7 +9,7 @@ $ fq -d csv . /test
] ]
] ]
$ fq -i $ fq -i
null> "a,b,c,d" | fromcsv | ., tocsv null> "a,b,c,d" | from_csv | ., to_csv
[ [
[ [
"a", "a",
@ -19,7 +19,7 @@ null> "a,b,c,d" | fromcsv | ., tocsv
] ]
] ]
"a,b,c,d\n" "a,b,c,d\n"
null> "a,\"b, c\",d" | fromcsv | ., tocsv null> "a,\"b, c\",d" | from_csv | ., to_csv
[ [
[ [
"a", "a",
@ -28,7 +28,7 @@ null> "a,\"b, c\",d" | fromcsv | ., tocsv
] ]
] ]
"a,\"b, c\",d\n" "a,\"b, c\",d\n"
null> "a\t\"b\t c\"\td" | fromcsv({comma:"\t"}) | ., tocsv({comma: "\t"}) null> "a\t\"b\t c\"\td" | from_csv({comma:"\t"}) | ., to_csv({comma: "\t"})
[ [
[ [
"a", "a",
@ -37,7 +37,7 @@ null> "a\t\"b\t c\"\td" | fromcsv({comma:"\t"}) | ., tocsv({comma: "\t"})
] ]
] ]
"a\t\"b\t c\"\td\n" "a\t\"b\t c\"\td\n"
null> [[bsl(1;100)]] | tocsv | ., fromcsv null> [[bsl(1;100)]] | to_csv | ., from_csv
"1267650600228229401496703205376\n" "1267650600228229401496703205376\n"
[ [
[ [

View File

@ -22,7 +22,7 @@ Decode examples
TSV to CSV TSV to CSV
========== ==========
$ fq -d csv -o comma="\t" tocsv file.tsv $ fq -d csv -o comma="\t" to_csv file.tsv
Convert rows to objects based on header row Convert rows to objects based on header row
=========================================== ===========================================

View File

@ -1,5 +1,5 @@
# to jq-flavoured json # to jq-flavoured json
def _tojq($opts): def _to_jq($opts):
def _is_ident: test("^[a-zA-Z_][a-zA-Z_0-9]*$"); def _is_ident: test("^[a-zA-Z_][a-zA-Z_0-9]*$");
def _key: if _is_ident | not then tojson end; def _key: if _is_ident | not then tojson end;
def _f($opts; $indent): def _f($opts; $indent):
@ -45,8 +45,8 @@ def _tojq($opts):
( _f($opts; $opts.indent * " ") ( _f($opts; $opts.indent * " ")
| if _is_array then flatten | join("") end | if _is_array then flatten | join("") end
); );
def tojq($opts): def to_jq($opts):
_tojq( _to_jq(
( { indent: 0, ( { indent: 0,
key_sep: ":", key_sep: ":",
object_sep: ",", object_sep: ",",
@ -62,10 +62,10 @@ def tojq($opts):
end end
) )
); );
def tojq: tojq(null); def to_jq: to_jq(null);
# from jq-flavoured json # from jq-flavoured json
def fromjq: def from_jq:
def _f: def _f:
( . as $v ( . as $v
| .term.type | .term.type
@ -93,4 +93,4 @@ def fromjq:
try try
(_query_fromstring | _f) (_query_fromstring | _f)
catch catch
error("fromjq only supports constant literals"); error("from_jq only supports constant literals");

View File

@ -31,7 +31,7 @@ func init() {
Functions: []string{"_todisplay"}, Functions: []string{"_todisplay"},
}) })
interp.RegisterFS(jsonFS) interp.RegisterFS(jsonFS)
interp.RegisterFunc1("_tojson", toJSON) interp.RegisterFunc1("_to_json", toJSON)
} }
func decodeJSONEx(d *decode.D, lines bool) any { func decodeJSONEx(d *decode.D, lines bool) any {

View File

@ -1,3 +1,8 @@
def tojson($opts): _tojson($opts); # overrides jq's standard tojson
def tojson: _tojson(null); def tojson($opts): _to_json($opts);
def tojson: _to_json(null);
# overrides jq's standard fromjson
# NOTE: should be kept in sync with format_decode.jq
def fromjson: decode("json") | if ._error then error(._error.error) end;
def _json__todisplay: tovalue; def _json__todisplay: tovalue;

View File

@ -24,7 +24,7 @@ func init() {
Functions: []string{"_todisplay"}, Functions: []string{"_todisplay"},
}) })
interp.RegisterFS(jsonlFS) interp.RegisterFS(jsonlFS)
interp.RegisterFunc0("tojsonl", toJSONL) interp.RegisterFunc0("to_jsonl", toJSONL)
} }
func decodeJSONL(d *decode.D, _ any) any { func decodeJSONL(d *decode.D, _ any) any {

View File

@ -1,4 +1,4 @@
$ fq -n "{a: bsl(1;100)} | tojq | ., fromjq" $ fq -n "{a: bsl(1;100)} | to_jq | ., from_jq"
"{a:1267650600228229401496703205376}" "{a:1267650600228229401496703205376}"
{ {
"a": 1267650600228229401496703205376 "a": 1267650600228229401496703205376

View File

@ -1,4 +1,4 @@
$ fq -rRs 'fromjson[] | (tojq | ., fromjq), "----", (tojq({indent:2}) | ., fromjq), "----"' variants.json $ fq -rRs 'fromjson[] | (to_jq | ., from_jq), "----", (to_jq({indent:2}) | ., from_jq), "----"' variants.json
null null
null null
---- ----

View File

@ -35,9 +35,9 @@ $ fq -d jsonl . error.jsonl
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: error.jsonl (jsonl) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: error.jsonl (jsonl)
| | | error: jsonl: error at position 0xf: invalid character 'a' looking for beginning of value | | | error: jsonl: error at position 0xf: invalid character 'a' looking for beginning of value
0x0|7b 22 61 22 3a 20 31 32 33 7d 20 61 73 64 0a| |{"a": 123} asd.|| gap0: raw bits 0x0|7b 22 61 22 3a 20 31 32 33 7d 20 61 73 64 0a| |{"a": 123} asd.|| gap0: raw bits
$ fq -n '[{"a":123}, [123]] | tojsonl' $ fq -n '[{"a":123}, [123]] | to_jsonl'
"{\"a\":123}\n[123]\n" "{\"a\":123}\n[123]\n"
$ fq -n '123 | tojsonl' $ fq -n '123 | to_jsonl'
exitcode: 5 exitcode: 5
stderr: stderr:
error: tojsonl cannot be applied to: number (123) error: to_jsonl cannot be applied to: number (123)

View File

@ -1,5 +1,5 @@
def fromradix($base; $table): def from_radix($base; $table):
( if _is_string | not then error("cannot fromradix convert: \(.)") end ( if _is_string | not then error("cannot from_radix convert: \(.)") end
| split("") | split("")
| reverse | reverse
| map($table[.]) | map($table[.])
@ -12,8 +12,8 @@ def fromradix($base; $table):
) )
| .[1] | .[1]
); );
def fromradix($base): def from_radix($base):
fromradix($base; { from_radix($base; {
"0": 0, "1": 1, "2": 2, "3": 3,"4": 4, "5": 5, "6": 6, "7": 7, "8": 8, "9": 9, "0": 0, "1": 1, "2": 2, "3": 3,"4": 4, "5": 5, "6": 6, "7": 7, "8": 8, "9": 9,
"a": 10, "b": 11, "c": 12, "d": 13, "e": 14, "f": 15, "g": 16, "a": 10, "b": 11, "c": 12, "d": 13, "e": 14, "f": 15, "g": 16,
"h": 17, "i": 18, "j": 19, "k": 20, "l": 21, "m": 22, "n": 23, "h": 17, "i": 18, "j": 19, "k": 20, "l": 21, "m": 22, "n": 23,
@ -26,8 +26,8 @@ def fromradix($base):
"@": 62, "_": 63, "@": 62, "_": 63,
}); });
def toradix($base; $table): def to_radix($base; $table):
( if type != "number" then error("cannot toradix convert: \(.)") end ( if type != "number" then error("cannot to_radix convert: \(.)") end
| if . == 0 then "0" | if . == 0 then "0"
else else
( [ recurse(if . > 0 then _intdiv(.; $base) else empty end) | . % $base] ( [ recurse(if . > 0 then _intdiv(.; $base) else empty end) | . % $base]
@ -41,5 +41,5 @@ def toradix($base; $table):
) )
end end
); );
def toradix($base): def to_radix($base):
toradix($base; "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@_"); to_radix($base; "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@_");

View File

@ -1,5 +1,5 @@
$ fq -i $ fq -i
null> (0,1,1024,99999999999999999999) as $n | (2,8,16,62,64) as $r | "\($r): \($n) \($n | toradix($r)) \($n | toradix($r) | fromradix($r))" | println null> (0,1,1024,99999999999999999999) as $n | (2,8,16,62,64) as $r | "\($r): \($n) \($n | to_radix($r)) \($n | to_radix($r) | from_radix($r))" | println
2: 0 0 0 2: 0 0 0
8: 0 0 0 8: 0 0 0
16: 0 0 0 16: 0 0 0

View File

@ -20,7 +20,7 @@ $ fq 'del(.tracks) | grep_by(.type=="mdat").data = "<excluded>" | tovalue' file.
### Force decode a single box ### Force decode a single box
```sh ```sh
$ fq -n '"AAAAHGVsc3QAAAAAAAAAAQAAADIAAAQAAAEAAA==" | frombase64 | mp4({force:true}) | d' $ fq -n '"AAAAHGVsc3QAAAAAAAAAAQAAADIAAAQAAAEAAA==" | from_base64 | mp4({force:true}) | d'
``` ```
### Lookup mp4 box using a mp4 box path. ### Lookup mp4 box using a mp4 box path.

View File

@ -38,7 +38,7 @@ Whole box tree as JSON (exclude mdat data and tracks)
Force decode a single box Force decode a single box
========================= =========================
$ fq -n '"AAAAHGVsc3QAAAAAAAAAAQAAADIAAAQAAAEAAA==" | frombase64 | mp4({force:true}) | d' $ fq -n '"AAAAHGVsc3QAAAAAAAAAAQAAADIAAAQAAAEAAA==" | from_base64 | mp4({force:true}) | d'
Lookup mp4 box using a mp4 box path. Lookup mp4 box using a mp4 box path.
==================================== ====================================

View File

@ -20,7 +20,7 @@ import (
var textFS embed.FS var textFS embed.FS
func init() { func init() {
interp.RegisterFunc0("fromhex", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("from_hex", func(_ *interp.Interp, c string) any {
b, err := hex.DecodeString(c) b, err := hex.DecodeString(c)
if err != nil { if err != nil {
return err return err
@ -31,7 +31,7 @@ func init() {
} }
return bb return bb
}) })
interp.RegisterFunc0("tohex", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("to_hex", func(_ *interp.Interp, c string) any {
br, err := interp.ToBitReader(c) br, err := interp.ToBitReader(c)
if err != nil { if err != nil {
return err return err
@ -59,7 +59,7 @@ func init() {
type fromBase64Opts struct { type fromBase64Opts struct {
Encoding string Encoding string
} }
interp.RegisterFunc1("_frombase64", func(_ *interp.Interp, c string, opts fromBase64Opts) any { interp.RegisterFunc1("_from_base64", func(_ *interp.Interp, c string, opts fromBase64Opts) any {
b, err := base64Encoding(opts.Encoding).DecodeString(c) b, err := base64Encoding(opts.Encoding).DecodeString(c)
if err != nil { if err != nil {
return err return err
@ -73,7 +73,7 @@ func init() {
type toBase64Opts struct { type toBase64Opts struct {
Encoding string Encoding string
} }
interp.RegisterFunc1("_tobase64", func(_ *interp.Interp, c string, opts toBase64Opts) any { interp.RegisterFunc1("_to_base64", func(_ *interp.Interp, c string, opts toBase64Opts) any {
br, err := interp.ToBitReader(c) br, err := interp.ToBitReader(c)
if err != nil { if err != nil {
return err return err
@ -197,7 +197,7 @@ func init() {
type toStrEncodingOpts struct { type toStrEncodingOpts struct {
Encoding string Encoding string
} }
interp.RegisterFunc1("_tostrencoding", func(_ *interp.Interp, c string, opts toStrEncodingOpts) any { interp.RegisterFunc1("_to_strencoding", func(_ *interp.Interp, c string, opts toStrEncodingOpts) any {
h := strEncoding(opts.Encoding) h := strEncoding(opts.Encoding)
if h == nil { if h == nil {
return fmt.Errorf("unknown string encoding %s", opts.Encoding) return fmt.Errorf("unknown string encoding %s", opts.Encoding)
@ -219,7 +219,7 @@ func init() {
type fromStrEncodingOpts struct { type fromStrEncodingOpts struct {
Encoding string Encoding string
} }
interp.RegisterFunc1("_fromstrencoding", func(_ *interp.Interp, c any, opts fromStrEncodingOpts) any { interp.RegisterFunc1("_from_strencoding", func(_ *interp.Interp, c any, opts fromStrEncodingOpts) any {
inBR, err := interp.ToBitReader(c) inBR, err := interp.ToBitReader(c)
if err != nil { if err != nil {
return err return err

View File

@ -1,19 +1,21 @@
def toiso8859_1: _tostrencoding({encoding: "ISO8859_1"}); def to_iso8859_1: _to_strencoding({encoding: "ISO8859_1"});
def fromiso8859_1: _fromstrencoding({encoding: "ISO8859_1"}); def from_iso8859_1: _from_strencoding({encoding: "ISO8859_1"});
def toutf8: _tostrencoding({encoding: "UTF8"}); def to_utf8: _to_strencoding({encoding: "UTF8"});
def fromutf8: _fromstrencoding({encoding: "UTF8"}); def from_utf8: _from_strencoding({encoding: "UTF8"});
def toutf16: _tostrencoding({encoding: "UTF16"}); def to_utf16: _to_strencoding({encoding: "UTF16"});
def fromutf16: _fromstrencoding({encoding: "UTF16"}); def from_utf16: _from_strencoding({encoding: "UTF16"});
def toutf16le: _tostrencoding({encoding: "UTF16LE"}); def to_utf16le: _to_strencoding({encoding: "UTF16LE"});
def fromutf16le: _fromstrencoding({encoding: "UTF16LE"}); def from_utf16le: _from_strencoding({encoding: "UTF16LE"});
def toutf16be: _tostrencoding({encoding: "UTF16BE"}); def to_utf16be: _to_strencoding({encoding: "UTF16BE"});
def fromutf16be: _fromstrencoding({encoding: "UTF16BE"}); def from_utf16be: _from_strencoding({encoding: "UTF16BE"});
def frombase64($opts): _frombase64({encoding: "std"} + $opts); def from_base64($opts): _from_base64({encoding: "std"} + $opts);
def frombase64: _frombase64(null); def from_base64: _from_base64(null);
def tobase64($opts): _tobase64({encoding: "std"} + $opts); def to_base64($opts): _to_base64({encoding: "std"} + $opts);
def tobase64: _tobase64(null); def to_base64: _to_base64(null);
# TODO: compat: remove at some point # TODO: compat: remove at some point
def hex: _binary_or_orig(tohex; fromhex); def hex: _binary_or_orig(to_hex; from_hex);
def base64: _binary_or_orig(tobase64; frombase64); def base64: _binary_or_orig(to_base64; from_base64);
def tohex: to_hex;
def fromhex: from_hex;

View File

@ -1,4 +1,4 @@
$ fq -n '"ff7f00ff" | fromhex | ., ("", "std", "url", "rawstd", "rawurl") as $e | tobase64({encoding: $e}) | ., frombase64({encoding: $e})' $ fq -n '"ff7f00ff" | from_hex | ., ("", "std", "url", "rawstd", "rawurl") as $e | to_base64({encoding: $e}) | ., from_base64({encoding: $e})'
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|ff 7f 00 ff| |....| |.: raw bits 0x0-0x3.7 (4) 0x0|ff 7f 00 ff| |....| |.: raw bits 0x0-0x3.7 (4)
"/38A/w==" "/38A/w=="

View File

@ -1,41 +1,41 @@
$ fq -i $ fq -i
null> "åäö" | toiso8859_1 | ., fromiso8859_1 null> "åäö" | to_iso8859_1 | ., from_iso8859_1
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|e5 e4 f6| |...| |.: raw bits 0x0-0x2.7 (3) 0x0|e5 e4 f6| |...| |.: raw bits 0x0-0x2.7 (3)
"åäö" "åäö"
null> "åäö" | toutf8 | ., fromutf8 null> "åäö" | to_utf8 | ., from_utf8
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|c3 a5 c3 a4 c3 b6| |......| |.: raw bits 0x0-0x5.7 (6) 0x0|c3 a5 c3 a4 c3 b6| |......| |.: raw bits 0x0-0x5.7 (6)
"åäö" "åäö"
null> "åäö" | toutf16 | ., fromutf16 null> "åäö" | to_utf16 | ., from_utf16
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|ff fe e5 00 e4 00 f6 00| |........| |.: raw bits 0x0-0x7.7 (8) 0x0|ff fe e5 00 e4 00 f6 00| |........| |.: raw bits 0x0-0x7.7 (8)
"åäö" "åäö"
null> "åäö" | toutf16le | ., fromutf16le null> "åäö" | to_utf16le | ., from_utf16le
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|e5 00 e4 00 f6 00| |......| |.: raw bits 0x0-0x5.7 (6) 0x0|e5 00 e4 00 f6 00| |......| |.: raw bits 0x0-0x5.7 (6)
"åäö" "åäö"
null> "åäö" | toutf16be | ., fromutf16be null> "åäö" | to_utf16be | ., from_utf16be
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|00 e5 00 e4 00 f6| |......| |.: raw bits 0x0-0x5.7 (6) 0x0|00 e5 00 e4 00 f6| |......| |.: raw bits 0x0-0x5.7 (6)
"åäö" "åäö"
null> [97,98,99] | fromiso8859_1 | ., toiso8859_1 null> [97,98,99] | from_iso8859_1 | ., to_iso8859_1
"abc" "abc"
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|61 62 63| |abc| |.: raw bits 0x0-0x2.7 (3) 0x0|61 62 63| |abc| |.: raw bits 0x0-0x2.7 (3)
null> [97,98,99] | fromutf8 | ., toutf8 null> [97,98,99] | from_utf8 | ., to_utf8
"abc" "abc"
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|61 62 63| |abc| |.: raw bits 0x0-0x2.7 (3) 0x0|61 62 63| |abc| |.: raw bits 0x0-0x2.7 (3)
null> [97,0,98,0,99,0] | fromutf16 | ., toutf16 null> [97,0,98,0,99,0] | from_utf16 | ., to_utf16
"abc" "abc"
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|ff fe 61 00 62 00 63 00| |..a.b.c.| |.: raw bits 0x0-0x7.7 (8) 0x0|ff fe 61 00 62 00 63 00| |..a.b.c.| |.: raw bits 0x0-0x7.7 (8)
null> [97,0,98,0,99,0] | fromutf16le | ., toutf16le null> [97,0,98,0,99,0] | from_utf16le | ., to_utf16le
"abc" "abc"
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|61 00 62 00 63 00| |a.b.c.| |.: raw bits 0x0-0x5.7 (6) 0x0|61 00 62 00 63 00| |a.b.c.| |.: raw bits 0x0-0x5.7 (6)
null> [0,97,0,98,0,99] | fromutf16be | ., toutf16be null> [0,97,0,98,0,99] | from_utf16be | ., to_utf16be
"abc" "abc"
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef| |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|
0x0|00 61 00 62 00 63| |.a.b.c| |.: raw bits 0x0-0x5.7 (6) 0x0|00 61 00 62 00 63| |.a.b.c| |.: raw bits 0x0-0x5.7 (6)

View File

@ -1,5 +1,5 @@
$ fq -i $ fq -i
null> "schema://user:pass@host/path/a/b?a=1+2&b=2%20%33#fragment" | fromurl | ., tourl null> "schema://user:pass@host/path/a/b?a=1+2&b=2%20%33#fragment" | from_url | ., to_url
{ {
"fragment": "fragment", "fragment": "fragment",
"host": "host", "host": "host",
@ -16,7 +16,7 @@ null> "schema://user:pass@host/path/a/b?a=1+2&b=2%20%33#fragment" | fromurl | .,
} }
} }
"schema://user:pass@host/path/a/b?a=1+2&b=2+3#fragment" "schema://user:pass@host/path/a/b?a=1+2&b=2+3#fragment"
null> "schema://host?query" | fromurl | ., tourl null> "schema://host?query" | from_url | ., to_url
{ {
"host": "host", "host": "host",
"query": { "query": {
@ -26,7 +26,7 @@ null> "schema://host?query" | fromurl | ., tourl
"scheme": "schema" "scheme": "schema"
} }
"schema://host?query=" "schema://host?query="
null> "schema://user@host" | fromurl | ., tourl null> "schema://user@host" | from_url | ., to_url
{ {
"host": "host", "host": "host",
"scheme": "schema", "scheme": "schema",
@ -35,7 +35,7 @@ null> "schema://user@host" | fromurl | ., tourl
} }
} }
"schema://user@host" "schema://user@host"
null> "schema://" | fromurl | ., tourl null> "schema://" | from_url | ., to_url
{ {
"scheme": "schema" "scheme": "schema"
} }

View File

@ -1,3 +1,3 @@
$ fq -n '"2%20%33" | fromurlencode | ., tourlencode' $ fq -n '"2%20%33" | from_urlencode | ., to_urlencode'
"2 3" "2 3"
"2+3" "2+3"

View File

@ -1,3 +1,3 @@
$ fq -n '"abc%2fdef" | fromurlpath | ., tourlpath' $ fq -n '"abc%2fdef" | from_urlpath | ., to_urlpath'
"abc/def" "abc/def"
"abc%2Fdef" "abc%2Fdef"

View File

@ -1,4 +1,4 @@
$ fq -n '"a=1+2&b=2%20%33" | fromurlquery | ., tourlquery' $ fq -n '"a=1+2&b=2%20%33" | from_urlquery | ., to_urlquery'
{ {
"a": "1 2", "a": "1 2",
"b": "2 3" "b": "2 3"

View File

@ -8,25 +8,25 @@ import (
) )
func init() { func init() {
interp.RegisterFunc0("fromurlencode", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("from_urlencode", func(_ *interp.Interp, c string) any {
u, err := url.QueryUnescape(c) u, err := url.QueryUnescape(c)
if err != nil { if err != nil {
return err return err
} }
return u return u
}) })
interp.RegisterFunc0("tourlencode", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("to_urlencode", func(_ *interp.Interp, c string) any {
return url.QueryEscape(c) return url.QueryEscape(c)
}) })
interp.RegisterFunc0("fromurlpath", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("from_urlpath", func(_ *interp.Interp, c string) any {
u, err := url.PathUnescape(c) u, err := url.PathUnescape(c)
if err != nil { if err != nil {
return err return err
} }
return u return u
}) })
interp.RegisterFunc0("tourlpath", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("to_urlpath", func(_ *interp.Interp, c string) any {
return url.PathEscape(c) return url.PathEscape(c)
}) })
@ -46,7 +46,7 @@ func init() {
return qm return qm
} }
interp.RegisterFunc0("fromurlquery", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("from_urlquery", func(_ *interp.Interp, c string) any {
q, err := url.ParseQuery(c) q, err := url.ParseQuery(c)
if err != nil { if err != nil {
return err return err
@ -70,7 +70,7 @@ func init() {
} }
return qv return qv
} }
interp.RegisterFunc0("tourlquery", func(_ *interp.Interp, c map[string]any) any { interp.RegisterFunc0("to_urlquery", func(_ *interp.Interp, c map[string]any) any {
// TODO: nicer // TODO: nicer
c, ok := gojqex.NormalizeToStrings(c).(map[string]any) c, ok := gojqex.NormalizeToStrings(c).(map[string]any)
if !ok { if !ok {
@ -79,7 +79,7 @@ func init() {
return toURLValues(c).Encode() return toURLValues(c).Encode()
}) })
interp.RegisterFunc0("fromurl", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("from_url", func(_ *interp.Interp, c string) any {
u, err := url.Parse(c) u, err := url.Parse(c)
if err != nil { if err != nil {
return err return err
@ -116,7 +116,7 @@ func init() {
} }
return m return m
}) })
interp.RegisterFunc0("tourl", func(_ *interp.Interp, c map[string]any) any { interp.RegisterFunc0("to_url", func(_ *interp.Interp, c map[string]any) any {
// TODO: nicer // TODO: nicer
c, ok := gojqex.NormalizeToStrings(c).(map[string]any) c, ok := gojqex.NormalizeToStrings(c).(map[string]any)
if !ok { if !ok {

View File

@ -1,4 +1,4 @@
$ fq.go -n '"SUkqAAwAAAAwMDAwAQAwMDAwMDAwMDAwMDAhAAAAMDAwAAAhAAAA" | frombase64 | tiff' $ fq.go -n '"SUkqAAwAAAAwMDAwAQAwMDAwMDAwMDAwMDAhAAAAMDAwAAAhAAAA" | from_base64 | tiff'
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (tiff) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: (tiff)
| | | error: tiff: error at position 0x27: ifd loop detected for 33 | | | error: tiff: error at position 0x27: ifd loop detected for 33
0x00|49 49 2a 00 |II*. | endian: "little-endian" (0x49492a00) 0x00|49 49 2a 00 |II*. | endian: "little-endian" (0x49492a00)

View File

@ -1,4 +1,4 @@
$ fq -n "{a: bsl(1;100)} | totoml | ., fromtoml" $ fq -n "{a: bsl(1;100)} | to_toml | ., from_toml"
"a = \"1267650600228229401496703205376\"\n" "a = \"1267650600228229401496703205376\"\n"
{ {
"a": "1267650600228229401496703205376" "a": "1267650600228229401496703205376"

View File

@ -9,8 +9,8 @@ $ fq . probe.toml
} }
# toml does not support null in arrays # toml does not support null in arrays
# TODO: add uint64 norm test # TODO: add uint64 norm test
$ fq -rRs 'fromjson[] | (walk(if type == "array" then map(select(. != null)) end) | try (totoml | ., fromtoml) catch .), "----"' variants.json $ fq -rRs 'fromjson[] | (walk(if type == "array" then map(select(. != null)) end) | try (to_toml | ., from_toml) catch .), "----"' variants.json
totoml cannot be applied to: null to_toml cannot be applied to: null
---- ----
toml: top-level values must be Go maps or structs toml: top-level values must be Go maps or structs
---- ----
@ -65,7 +65,7 @@ toml: top-level values must be Go maps or structs
error at position 0x0: root object has no values error at position 0x0: root object has no values
---- ----
$ fq -n '"" | fromtoml' $ fq -n '"" | from_toml'
exitcode: 5 exitcode: 5
stderr: stderr:
error: error at position 0x0: root object has no values error: error at position 0x0: root object has no values

View File

@ -1,4 +1,4 @@
$ fq -n '"[a] trailing" | fromtoml._error.error' $ fq -n '"[a] trailing" | from_toml._error.error'
exitcode: 5 exitcode: 5
stderr: stderr:
error: error at position 0xc: toml: line 1 (last key "a"): expected a top-level item to end with a newline, comment, or EOF, but got 't' instead error: error at position 0xc: toml: line 1 (last key "a"): expected a top-level item to end with a newline, comment, or EOF, but got 't' instead

View File

@ -26,7 +26,7 @@ func init() {
Functions: []string{"_todisplay"}, Functions: []string{"_todisplay"},
}) })
interp.RegisterFS(tomlFS) interp.RegisterFS(tomlFS)
interp.RegisterFunc0("totoml", toTOML) interp.RegisterFunc0("to_toml", toTOML)
} }
func decodeTOML(d *decode.D, _ any) any { func decodeTOML(d *decode.D, _ any) any {
@ -58,7 +58,7 @@ func decodeTOML(d *decode.D, _ any) any {
func toTOML(_ *interp.Interp, c any) any { func toTOML(_ *interp.Interp, c any) any {
if c == nil { if c == nil {
return gojqex.FuncTypeError{Name: "totoml", V: c} return gojqex.FuncTypeError{Name: "to_toml", V: c}
} }
b := &bytes.Buffer{} b := &bytes.Buffer{}

View File

@ -1,6 +1,6 @@
# ffmpeg -f lavfi -i sine -f lavfi -i testsrc=s=4x4:r=1:d=1 -t 10ms -disposition:v attached_pic -f flac test.flac # ffmpeg -f lavfi -i sine -f lavfi -i testsrc=s=4x4:r=1:d=1 -t 10ms -disposition:v attached_pic -f flac test.flac
# ffmpeg -f lavfi -i sine -t 10ms -f ogg test.ogg # ffmpeg -f lavfi -i sine -t 10ms -f ogg test.ogg
# vorbiscomment -a test.ogg -t METADATA_BLOCK_PICTURE=$(fq -r '.. | select(format=="flac_picture") | tobytes | frombase64' test.flac) # vorbiscomment -a test.ogg -t METADATA_BLOCK_PICTURE=$(fq -r '.. | select(format=="flac_picture") | tobytes | from_base64' test.flac)
# fq '.. | select(format=="vorbis_comment") | tobytes' test.ogg > vorbis-comment-picture # fq '.. | select(format=="vorbis_comment") | tobytes' test.ogg > vorbis-comment-picture
$ fq -d vorbis_comment dv vorbis-comment-picture $ fq -d vorbis_comment dv vorbis-comment-picture
|00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: vorbis-comment-picture (vorbis_comment) 0x0-0x11f.7 (288) |00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f|0123456789abcdef|.{}: vorbis-comment-picture (vorbis_comment) 0x0-0x11f.7 (288)

View File

@ -2,7 +2,7 @@ HTML is decoded in HTML5 mode and will always include `<html>`, `<body>` and `<h
See xml format for more examples and how to preserve element order and how to encode to xml. See xml format for more examples and how to preserve element order and how to encode to xml.
There is no `tohtml` function, see `toxml` instead. There is no `to_html` function, see `to_xml` instead.
### Element as object ### Element as object

View File

@ -1,4 +1,4 @@
$ fq -n "{a: bsl(1;100)} | toxml | ., fromxml" $ fq -n "{a: bsl(1;100)} | to_xml | ., from_xml"
"<a>1267650600228229401496703205376</a>" "<a>1267650600228229401496703205376</a>"
{ {
"a": "1267650600228229401496703205376" "a": "1267650600228229401496703205376"

View File

@ -24,7 +24,7 @@ HTML is decoded in HTML5 mode and will always include <html>, <body> and <head>
See xml format for more examples and how to preserve element order and how to encode to xml. See xml format for more examples and how to preserve element order and how to encode to xml.
There is no tohtml function, see toxml instead. There is no to_html function, see to_xml instead.
Element as object Element as object
================= =================

View File

@ -24,7 +24,7 @@ XML can be decoded and encoded into jq values in two ways, elements as object or
want to do. The object variant might be easier to query for a specific value but array might be easier to use to generate xml or to want to do. The object variant might be easier to query for a specific value but array might be easier to use to generate xml or to
query after all elements of some kind etc. query after all elements of some kind etc.
Encoding is done using the toxml function and it will figure what variant that is used based on the input value. Is has two optional Encoding is done using the to_xml function and it will figure what variant that is used based on the input value. Is has two optional
options indent and attribute_prefix. options indent and attribute_prefix.
Elements as object Elements as object
@ -67,7 +67,7 @@ order might be lost.
"ccc" "ccc"
# decode to object and encode to xml # decode to object and encode to xml
$ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o seq=true 'toxml({indent:2})' $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o seq=true 'to_xml({indent:2})'
<a> <a>
<b></b> <b></b>
<b>bbb</b> <b>bbb</b>
@ -109,7 +109,7 @@ Elements are arrays of the shape ["#text": "body text", "attr_name", {key: "attr
] ]
# decode to array and encode to xml # decode to array and encode to xml
$ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o array=true -o seq=true 'toxml({indent:2})' $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o array=true -o seq=true 'to_xml({indent:2})'
<a> <a>
<b></b> <b></b>
<b>bbb</b> <b>bbb</b>

View File

@ -9,7 +9,7 @@ $ fq -d html . /test
} }
$ fq -d bytes -ni . all.xml multi_diff.xml multi_same.xml ns.xml simple.xml escape.xml noscript.html $ fq -d bytes -ni . all.xml multi_diff.xml multi_same.xml ns.xml simple.xml escape.xml noscript.html
null> inputs | {name: input_filename, str: tostring} | slurp("files") null> inputs | {name: input_filename, str: tostring} | slurp("files")
null> spew("files") | .name, (.str | fromhtml | ., (toxml({indent: 2}) | println)) null> spew("files") | .name, (.str | from_html | ., (to_xml({indent: 2}) | println))
"all.xml" "all.xml"
{ {
"html": { "html": {
@ -174,7 +174,7 @@ null> spew("files") | .name, (.str | fromhtml | ., (toxml({indent: 2}) | println
<noscript></noscript> <noscript></noscript>
</head> </head>
</html> </html>
null> spew("files") | .name, (.str | fromhtml({seq: true}) | ., (toxml({indent: 2}) | println)) null> spew("files") | .name, (.str | from_html({seq: true}) | ., (to_xml({indent: 2}) | println))
"all.xml" "all.xml"
{ {
"html": { "html": {
@ -380,7 +380,7 @@ null> spew("files") | .name, (.str | fromhtml({seq: true}) | ., (toxml({indent:
<a>text</a> <a>text</a>
</body> </body>
</html> </html>
null> spew("files") | .name, (.str | fromhtml({array: true}) | ., (toxml({indent: 2}) | println)) null> spew("files") | .name, (.str | from_html({array: true}) | ., (to_xml({indent: 2}) | println))
"all.xml" "all.xml"
[ [
"html", "html",

View File

@ -1,2 +1,2 @@
$ fq -n '{r: {c: {"#seq":1}, b: {"#seq":0}, a: {"#seq":-1}}} | toxml' $ fq -n '{r: {c: {"#seq":1}, b: {"#seq":0}, a: {"#seq":-1}}} | to_xml'
"<r><a></a><b></b><c></c></r>" "<r><a></a><b></b><c></c></r>"

View File

@ -1,4 +1,4 @@
$ fq -r '. as $a | ., (toxml({indent: 2}) | ., (fromxml | ., (diff($a; .) // "no diff")))' test.svg $ fq -r '. as $a | ., (to_xml({indent: 2}) | ., (from_xml | ., (diff($a; .) // "no diff")))' test.svg
{ {
"svg": { "svg": {
"@height": "2500", "@height": "2500",
@ -50,7 +50,7 @@ $ fq -r '. as $a | ., (toxml({indent: 2}) | ., (fromxml | ., (diff($a; .) // "no
} }
} }
no diff no diff
$ fq -r -o array=true '. as $a | ., (toxml({indent: 2}) | ., (fromxml | ., (diff($a; .) // "no diff")))' test.svg $ fq -r -o array=true '. as $a | ., (to_xml({indent: 2}) | ., (from_xml | ., (diff($a; .) // "no diff")))' test.svg
[ [
"svg", "svg",
{ {

View File

@ -1,12 +1,12 @@
$ fq -n '"<a></a> trailing" | fromxml' $ fq -n '"<a></a> trailing" | from_xml'
exitcode: 5 exitcode: 5
stderr: stderr:
error: error at position 0x7: root element has trailing non-whitespace " trailing" error: error at position 0x7: root element has trailing non-whitespace " trailing"
$ fq -n '"<a></a> <trailingelm>" | fromxml' $ fq -n '"<a></a> <trailingelm>" | from_xml'
exitcode: 5 exitcode: 5
stderr: stderr:
error: error at position 0x8: root element has trailing element <trailingelm> error: error at position 0x8: root element has trailing element <trailingelm>
$ fq -n '"<a></a> <?procinstr?>" | fromxml' $ fq -n '"<a></a> <?procinstr?>" | from_xml'
{ {
"a": "" "a": ""
} }

View File

@ -6,7 +6,7 @@ $ fq . probe.xml
} }
$ fq -d bytes -ni . all.xml decl.xml multi_diff.xml multi_same.xml ns.xml simple.xml escape.xml $ fq -d bytes -ni . all.xml decl.xml multi_diff.xml multi_same.xml ns.xml simple.xml escape.xml
null> inputs | {name: input_filename, str: tostring} | slurp("files") null> inputs | {name: input_filename, str: tostring} | slurp("files")
null> spew("files") | .name, try (.str | fromxml | ., (toxml({indent: 2}) | println)) catch . null> spew("files") | .name, try (.str | from_xml | ., (to_xml({indent: 2}) | println)) catch .
"all.xml" "all.xml"
{ {
"elm": { "elm": {
@ -85,7 +85,7 @@ null> spew("files") | .name, try (.str | fromxml | ., (toxml({indent: 2}) | prin
} }
} }
<a attr="&amp;&lt;&gt;">&amp;&lt;&gt;</a> <a attr="&amp;&lt;&gt;">&amp;&lt;&gt;</a>
null> spew("files") | .name, try (.str | fromxml({seq: true}) | ., (toxml({indent: 2}) | println)) catch . null> spew("files") | .name, try (.str | from_xml({seq: true}) | ., (to_xml({indent: 2}) | println)) catch .
"all.xml" "all.xml"
{ {
"elm": { "elm": {
@ -177,7 +177,7 @@ null> spew("files") | .name, try (.str | fromxml({seq: true}) | ., (toxml({inden
} }
} }
<a attr="&amp;&lt;&gt;">&amp;&lt;&gt;</a> <a attr="&amp;&lt;&gt;">&amp;&lt;&gt;</a>
null> spew("files") | .name, try (.str | fromxml({array: true}) | ., (toxml({indent: 2}) | println)) catch . null> spew("files") | .name, try (.str | from_xml({array: true}) | ., (to_xml({indent: 2}) | println)) catch .
"all.xml" "all.xml"
[ [
"elm", "elm",
@ -308,7 +308,7 @@ null> spew("files") | .name, try (.str | fromxml({array: true}) | ., (toxml({ind
[] []
] ]
<a attr="&amp;&lt;&gt;">&amp;&lt;&gt;</a> <a attr="&amp;&lt;&gt;">&amp;&lt;&gt;</a>
null> {doc: {a: "", "#text": "text", "#comment": "comment", "@attr": "value"}} | toxml, toxml({indent: 1}), toxml({indent: 8}) | println null> {doc: {a: "", "#text": "text", "#comment": "comment", "@attr": "value"}} | to_xml, to_xml({indent: 1}), to_xml({indent: 8}) | println
<doc attr="value">text<!--comment--><a></a></doc> <doc attr="value">text<!--comment--><a></a></doc>
<doc attr="value">text <doc attr="value">text
<!--comment--> <!--comment-->
@ -318,7 +318,7 @@ null> {doc: {a: "", "#text": "text", "#comment": "comment", "@attr": "value"}} |
<!--comment--> <!--comment-->
<a></a> <a></a>
</doc> </doc>
null> ["doc", {"#text": "text", "#comment": "comment", attr: "value"}, [["a"]]] | toxml, toxml({indent: 1}), toxml({indent: 8}) | println null> ["doc", {"#text": "text", "#comment": "comment", attr: "value"}, [["a"]]] | to_xml, to_xml({indent: 1}), to_xml({indent: 8}) | println
<doc attr="value">text<!--comment--><a></a></doc> <doc attr="value">text<!--comment--><a></a></doc>
<doc attr="value">text <doc attr="value">text
<!--comment--> <!--comment-->
@ -328,16 +328,16 @@ null> ["doc", {"#text": "text", "#comment": "comment", attr: "value"}, [["a"]]]
<!--comment--> <!--comment-->
<a></a> <a></a>
</doc> </doc>
null> {a: ""} | toxml null> {a: ""} | to_xml
"<a></a>" "<a></a>"
null> {a: [""]} | toxml null> {a: [""]} | to_xml
"<a></a>" "<a></a>"
null> {a: {"@attr": "value", "#text": "text", "#comment": "comment"}} | toxml null> {a: {"@attr": "value", "#text": "text", "#comment": "comment"}} | to_xml
"<a attr=\"value\">text<!--comment--></a>" "<a attr=\"value\">text<!--comment--></a>"
null> {a: ["b", "c"]} | toxml null> {a: ["b", "c"]} | to_xml
"<doc><a>b</a><a>c</a></doc>" "<doc><a>b</a><a>c</a></doc>"
null> {a: [123, null, true, false]} | toxml null> {a: [123, null, true, false]} | to_xml
"<doc><a>123</a><a></a><a>true</a><a>false</a></doc>" "<doc><a>123</a><a></a><a>true</a><a>false</a></doc>"
null> 123 | toxml null> 123 | to_xml
error: toxml cannot be applied to: number (123) error: to_xml cannot be applied to: number (123)
null> ^D null> ^D

View File

@ -1,3 +1,3 @@
$ fq -n '"abc&amp;&lt;&gt;" | fromxmlentities | ., toxmlentities' $ fq -n '"abc&amp;&lt;&gt;" | from_xmlentities | ., to_xmlentities'
"abc&<>" "abc&<>"
"abc&amp;&lt;&gt;" "abc&amp;&lt;&gt;"

View File

@ -47,11 +47,11 @@ func init() {
Functions: []string{"_todisplay"}, Functions: []string{"_todisplay"},
}) })
interp.RegisterFS(xmlFS) interp.RegisterFS(xmlFS)
interp.RegisterFunc1("toxml", toXML) interp.RegisterFunc1("to_xml", toXML)
interp.RegisterFunc0("fromxmlentities", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("from_xmlentities", func(_ *interp.Interp, c string) any {
return html.UnescapeString(c) return html.UnescapeString(c)
}) })
interp.RegisterFunc0("toxmlentities", func(_ *interp.Interp, c string) any { interp.RegisterFunc0("to_xmlentities", func(_ *interp.Interp, c string) any {
return html.EscapeString(c) return html.EscapeString(c)
}) })
} }
@ -488,12 +488,12 @@ func toXMLFromArray(c any, opts ToXMLOpts) any {
ca, ok := c.([]any) ca, ok := c.([]any)
if !ok { if !ok {
return gojqex.FuncTypeError{Name: "toxml", V: c} return gojqex.FuncTypeError{Name: "to_xml", V: c}
} }
n, ok := f(ca) n, ok := f(ca)
if !ok { if !ok {
// TODO: better error // TODO: better error
return gojqex.FuncTypeError{Name: "toxml", V: c} return gojqex.FuncTypeError{Name: "to_xml", V: c}
} }
bb := &bytes.Buffer{} bb := &bytes.Buffer{}
e := xml.NewEncoder(bb) e := xml.NewEncoder(bb)
@ -514,5 +514,5 @@ func toXML(_ *interp.Interp, c any, opts ToXMLOpts) any {
} else if v, ok := gojqex.Cast[[]any](c); ok { } else if v, ok := gojqex.Cast[[]any](c); ok {
return toXMLFromArray(gojqex.NormalizeToStrings(v), opts) return toXMLFromArray(gojqex.NormalizeToStrings(v), opts)
} }
return gojqex.FuncTypeError{Name: "toxml", V: c} return gojqex.FuncTypeError{Name: "to_xml", V: c}
} }

View File

@ -1,2 +1,2 @@
def toxml: toxml(null); def to_xml: to_xml(null);
def _xml__todisplay: tovalue; def _xml__todisplay: tovalue;

View File

@ -3,7 +3,7 @@ Which variant to use depends a bit what you want to do. The object variant might
to query for a specific value but array might be easier to use to generate xml or to query to query for a specific value but array might be easier to use to generate xml or to query
after all elements of some kind etc. after all elements of some kind etc.
Encoding is done using the `toxml` function and it will figure what variant that is used based on the input value. Encoding is done using the `to_xml` function and it will figure what variant that is used based on the input value.
Is has two optional options `indent` and `attribute_prefix`. Is has two optional options `indent` and `attribute_prefix`.
### Elements as object ### Elements as object
@ -46,7 +46,7 @@ $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq '.a.c["#text"]'
"ccc" "ccc"
# decode to object and encode to xml # decode to object and encode to xml
$ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o seq=true 'toxml({indent:2})' $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o seq=true 'to_xml({indent:2})'
<a> <a>
<b></b> <b></b>
<b>bbb</b> <b>bbb</b>
@ -89,7 +89,7 @@ $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -d xml -o array=true
] ]
# decode to array and encode to xml # decode to array and encode to xml
$ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o array=true -o seq=true 'toxml({indent:2})' $ echo '<a><b/><b>bbb</b><c attr="value">ccc</c></a>' | fq -r -d xml -o array=true -o seq=true 'to_xml({indent:2})'
<a> <a>
<b></b> <b></b>
<b>bbb</b> <b>bbb</b>

View File

@ -1,4 +1,4 @@
$ fq -n "{a: bsl(1;100)} | toyaml | ., fromyaml" $ fq -n "{a: bsl(1;100)} | to_yaml | ., from_yaml"
"a: \"1267650600228229401496703205376\"\n" "a: \"1267650600228229401496703205376\"\n"
{ {
"a": "1267650600228229401496703205376" "a": "1267650600228229401496703205376"

View File

@ -1,8 +1,8 @@
$ fq -n '"- a\ntrailing" | fromyaml._error.error' $ fq -n '"- a\ntrailing" | from_yaml._error.error'
exitcode: 5 exitcode: 5
stderr: stderr:
error: error at position 0xc: yaml: line 2: could not find expected ':' error: error at position 0xc: yaml: line 2: could not find expected ':'
$ fq -n '`{"a":123}{"b":444}` | fromyaml' $ fq -n '`{"a":123}{"b":444}` | from_yaml'
exitcode: 5 exitcode: 5
stderr: stderr:
error: error at position 0x12: trialing data after top-level value error: error at position 0x12: trialing data after top-level value

View File

@ -8,7 +8,7 @@ $ fq . probe.yaml
} }
} }
# TODO: add uint64 norm test # TODO: add uint64 norm test
$ fq -rRs 'fromjson[] | (try (toyaml | ., fromyaml) catch .), "----"' variants.json $ fq -rRs 'fromjson[] | (try (to_yaml | ., from_yaml) catch .), "----"' variants.json
null null
error at position 0x5: root not object or array error at position 0x5: root not object or array

View File

@ -29,7 +29,7 @@ func init() {
Functions: []string{"_todisplay"}, Functions: []string{"_todisplay"},
}) })
interp.RegisterFS(yamlFS) interp.RegisterFS(yamlFS)
interp.RegisterFunc0("toyaml", toYAML) interp.RegisterFunc0("to_yaml", toYAML)
} }
func decodeYAML(d *decode.D, _ any) any { func decodeYAML(d *decode.D, _ any) any {

View File

@ -26,6 +26,6 @@ def open: empty;
def scope: empty; def scope: empty;
# used by help.jq # used by help.jq
def tojq: empty; def to_jq: empty;
# used by funcs.jq iprint # used by funcs.jq iprint
def toradix($base): empty; def to_radix($base): empty;

View File

@ -1,15 +1,18 @@
# note this is a "dynamic" include, output string will be used as source # note this is a "dynamic" include, output string will be used as source
# generates decode functions # generates decode functions, ex:
# frommp3 and mp3 calls decode("mp3") # mp3/0 calls decode("mp3"; {})
# mp3/1 calls decode("mp3"; $opts)
# from_mp3/* same but throws error on decode error
[ _registry as $r [ _registry as $r
| $r.groups | $r.groups
| to_entries[] | to_entries[]
# TODO: nicer way to skip "all" which also would override builtin all/* # TODO: nicer way to skip "all" which also would override builtin all/*
# skip_decode_function is used to skip bits/bytes as they are special tobits/tobytes
| select(.key != "all" and ($r.formats[.key].skip_decode_function | not)) | select(.key != "all" and ($r.formats[.key].skip_decode_function | not))
| "def \(.key)($opts): decode(\(.key | tojson); $opts);" | "def \(.key)($opts): decode(\(.key | tojson); $opts);"
, "def \(.key): decode(\(.key | tojson); {});" , "def \(.key): decode(\(.key | tojson); {});"
, "def from\(.key)($opts): decode(\(.key | tojson); $opts) | if ._error then error(._error.error) end;" , "def from_\(.key)($opts): decode(\(.key | tojson); $opts) | if ._error then error(._error.error) end;"
, "def from\(.key): from\(.key)({});" , "def from_\(.key): from_\(.key)({});"
] | join("\n") ] | join("\n")

View File

@ -109,10 +109,10 @@ def table(colmap; render):
# TODO: move? # TODO: move?
def iprint: def iprint:
{ {
bin: "0b\(toradix(2))", bin: "0b\(to_radix(2))",
oct: "0o\(toradix(8))", oct: "0o\(to_radix(8))",
dec: "\(.)", dec: "\(.)",
hex: "0x\(toradix(16))", hex: "0x\(to_radix(16))",
str: (try ([.] | implode) catch null), str: (try ([.] | implode) catch null),
}; };

View File

@ -70,7 +70,7 @@ def _help_format_enrich($arg0; $f; $include_basic):
, shell: "\($arg0) -d \($f.name)\($f.decode_in_arg | to_entries | map(" -o ", .key, "=", (.value | tojson)) | join("")) . file" , shell: "\($arg0) -d \($f.name)\($f.decode_in_arg | to_entries | map(" -o ", .key, "=", (.value | tojson)) | join("")) . file"
} }
, { comment: "Decode value as \($f.name)" , { comment: "Decode value as \($f.name)"
, expr: "\($f.name)(\($f.decode_in_arg | tojq))" , expr: "\($f.name)(\($f.decode_in_arg | to_jq))"
} }
] ]
end end
@ -85,7 +85,7 @@ def _help($arg0; $topic):
, " fq . file" , " fq . file"
, " fq d file" , " fq d file"
, " fq tovalue file" , " fq tovalue file"
, " fq -r totoml file.yml" , " fq -r to_toml file.yml"
, " fq -s -d html 'map(.html.head.title?)' *.html" , " fq -s -d html 'map(.html.head.title?)' *.html"
, " cat file.cbor | fq -d cbor torepr" , " cat file.cbor | fq -d cbor torepr"
, " fq 'grep(\"^main$\") | parent' /bin/ls" , " fq 'grep(\"^main$\") | parent' /bin/ls"

View File

@ -6,7 +6,7 @@
# <blank>+ # <blank>+
# ... # ...
# <next test> # <next test>
def fromjqtest: def from_jqtest:
[ foreach (split("\n")[], "") as $l ( [ foreach (split("\n")[], "") as $l (
{ current_line: 0 { current_line: 0
, nr: 1 , nr: 1
@ -60,7 +60,7 @@ def fromjqtest:
def run_tests: def run_tests:
def _f: def _f:
( fromjqtest[] ( from_jqtest[]
| . as $c | . as $c
| try | try
if .error | not then if .error | not then

View File

@ -17,7 +17,7 @@ Example usages:
fq . file fq . file
fq d file fq d file
fq tovalue file fq tovalue file
fq -r totoml file.yml fq -r to_toml file.yml
fq -s -d html 'map(.html.head.title?)' *.html fq -s -d html 'map(.html.head.title?)' *.html
cat file.cbor | fq -d cbor torepr cat file.cbor | fq -d cbor torepr
fq 'grep("^main$") | parent' /bin/ls fq 'grep("^main$") | parent' /bin/ls

View File

@ -1,5 +1,5 @@
# TODO: various gojq fq fork regression tests, should probably be move to fork code instead # TODO: various gojq fq fork regression tests, should probably be move to fork code instead
# 0xf_ffff_ffff_fffff_fffff-1 | toradix(2,8,16) # 0xf_ffff_ffff_fffff_fffff-1 | to_radix(2,8,16)
$ fq -n '0b1111111111111111111111111111111111111111111111111111111111111111111111111110, 0o17777777777777777777777776, 0xffffffffffffffffffe' $ fq -n '0b1111111111111111111111111111111111111111111111111111111111111111111111111110, 0o17777777777777777777777776, 0xffffffffffffffffffe'
75557863725914323419134 75557863725914323419134
75557863725914323419134 75557863725914323419134
@ -255,7 +255,7 @@ $ fq -d mp3 '.frames[0] | to_entries[].key' test.mp3
"crc_calculated" "crc_calculated"
# xml, csv does string normalization, make sure it works with nested JQValue:s # xml, csv does string normalization, make sure it works with nested JQValue:s
# TODO: move this test as it depends on xml # TODO: move this test as it depends on xml
$ fq -r '.headers[0] | toxml({indent: 2})' test.mp3 $ fq -r '.headers[0] | to_xml({indent: 2})' test.mp3
<doc> <doc>
<flags> <flags>
<experimental_indicator>false</experimental_indicator> <experimental_indicator>false</experimental_indicator>