Linux:Scripting:JQ-Ivo
From Cheatsheet
Jump to navigationJump to search
This page was written and contributed by Ivo Palli.
Also see:
General
Program to get information from JSON files on the commandline
Usage
cat stuff.json | jq . # Prettify
jq -s add,min,max,add/length file_with_numbers # Total, min, max, avg
echo "[[1,2,3,4],[5,6,7,8],[9,10,11,12]]" | jq -r '(.[]) | @tsv' # Turns this into a TSV
echo '[{"name":"joe"}, {"name":"john"}, {"name":"jose"}]' | jq '[.[] | .name] | .[1]' # Change an object into an array from which you can then get item 1
# Take a JSON and select a number of items. Turn it into an array with arrays so we can convert it with @tsv
# https://e.printstacktrace.blog/how-to-convert-json-to-csv-from-the-command-line/
jq -r 'map({MICROSECTIENUMMER_COD, VOORNAAM, VOLLEDIGE_NAAM, NUMMER, EMAIL_ADRES}) | (first | keys_unsorted) as $keys | map([to_entries[] | .value]) as $rows | $keys,$rows[] | @tsv' input.txt
Compiling
Get the static version of jq 1.6 here: File:Jq.static
wget https://github.com/stedolan/jq/releases/download/jq-1.6/jq-1.6.tar.gz tar xzf jq-*.tar.gz cd jq-*/ cd modules rm -rf oniguruma wget https://github.com/kkos/oniguruma/releases/download/v6.9.7.1/onig-6.9.7.1.tar.gz tar xzf onig-*.tar.gz mv onig-*/ oniguruma cd oniguruma CC=musl-gcc ./configure --disable-shared make ls ./src/.libs/libonig.a cd ../.. CC=musl-gcc ./configure --disable-shared --enable-all-static make file jq
JSONPath
echo '{"name":"Ivo","sex":"male","age":99}' | jq -r .name # Print name without quotes
... | jq -r '.result[] | "\(.hostid)\t\(.host)"' # Prints the hostid and host values of the result array with a TAB seperator
Examples
# jq will print whatever data it has at the end phase of the program
jq -n '"blip"' # "blip" - The '-n' means jq shouldn't try and read STDIN for input
# You can pass data from phase to phase via the '|' command, where '.' is the data from the previous phase
jq -n '"blip" | .,"blop"' # "blip" \n "blop" - You can concatenate things together
jq -n '"blip" | "blop"' # "blop" - Or just ignore them completely
# Let's pass in some JSON data
echo '["data"]' | jq -n '.' # null - because the '-n' switch tells jq to ignore STDIN and jq has no data from a previous phase so '.' is empty
echo '["data"]' | jq '.' # [ \n "data" \n ] - by default jq prints JSON output 'pretty'
echo '["data"]' | jq '.[]' # "data" - You can strip away arrays by putting '[]' behind the '.'
echo '["data"]' | jq -r '.[]' # data - With the '-r' raw option, when the output is a string, jq will no longer quote it. If your output is json '-r' will not do anything
echo '[{"name":"Ivo","age":99,"male":true,"occupation":"IT"},{"name":"Sarah","age":66,"male":false,"occupation":"Life sciences"},{"name":"Rene","age":14,"male":true}]' > input.txt
# Pretty print the JSON
jq . input.txt
# Print JSON compact
jq -c . input.txt
# Loop over the objects in the array (and print them by default)
cat input.txt | jq '.[]'
# Loop over the objects in the array and put it in an array
cat input.txt | jq '[.[]]'
# Print the first object in the array
cat input.txt | jq '.[0]'
# Print the names, each on it's own line. We use '-r' to avoid putting quotes around the name. Note this is not JSON output.
cat input.txt | jq -r '.[] | .name'
# Make it into an array
cat input.txt | jq -c '[.[] | .name]' # ["Ivo","Sarah","Rene"]
# And sort it
cat input.txt | jq -c '[.[] | .name] | sort' # ["Sarah","Rene","Ivo"]
# Print names and ages in columns
cat input.txt | jq -r '.[] | [.name,.age] | @tsv'
# Print the names of all the men
cat input.txt | jq -r '.[] | select(.male == true) | .name'
# Only print Ivo
cat input.txt | jq '.[] | select(.name | contains("Ivo"))'
# Deleting
echo '{"a":1,"b":2,"c":3}' | jq -c -r 'del(.b)' # {"a":1,"c":3}
echo '{"a":1,"b":2,"c":3}' | jq -s -c -r 'map(del(.b))' # [{"a":1,"c":3}]
# Flattening multidimensional arrays
echo '[{"a":1,"b":{"c":3,"d":4}},{"e":5,"f":{"g":7,"h":8}}]' | \
jq -c '[.[] | [paths(scalars) as $path | { ($path | map(tostring) | join("_")): getpath($path) } ] | add]'
# [{"a":1,"b_c":3,"b_d":4},{"e":5,"f_g":7,"f_h":8}]
# Flattening, only keeping the key and value
echo '[{"a":1,"b":{"c":3,"d":4}},{"e":5,"f":{"g":7,"h":8}}]' | \
jq -c '[paths(scalars) as $path | { ($path | map(tostring)[-1] ): getpath($path) }] | add'
# {"a":1,"c":3,"d":4,"e":5,"g":7,"h":8}
# List of values to TSV
echo '{"a":1,"b":2,"c":3}' | jq -r 'to_entries | .[] | [.key,.value] | @tsv'
# Concatenating values
echo '{"name":"Ivo", "age":43}' | jq -r '.name + " is " + (.age|tostring) + " years old"'
# Concatenating them with TABs
echo '{"A":"z1","B":"z2","C":"z3"}' | jq -r '[.A,.B,.C] | join("\t")' # z1 z2 z3
# Reverse the array, then join the name as a path
./object.sh 676660251 | jq -sr '[reverse | .[] | .O_NAME] | join("/")'
# Implode an array
echo '[{"a":"aa","b":{"c":"cc","d":"dd"}},{"a":"aaa","b":{"c":"ccc","d":"ddd"}}]' | jq -r '.[] | [.a,(.b | join("_"))] | @tsv'
openstack volume list --all-projects -f json | jq -r '.[] | [.ID,.Size,([."Attached to"[].server_id] | join(","))] | @tsv'
# Convert JSON to TSV with a header line
# The map filters the JSON to what you want
# The 2nd part gives you the keys on the first line and then lines with values. This notation avoid errors
cat input.txt | jq -r 'map({name,age,occupation}) | (.[0] | keys_unsorted), (.[] | [.[]]) | @tsv'
# Printing items that sometimes don't exist
echo '[{"name":"Ivo","z":{"age":46}},{"name":"Sarah","z":[]}]' | jq -r '.[] | try(.z.age)catch(0)'
# Printing a header and dash lines beneath them
cat input.txt | jq -r '(["Naam","Leeftijd"] | (., map(length*"-"))), (.[] | [.name, .age]) | @tsv'
# Read in the whole file, then build an array of all keys, print the keys as a header line and print all values of those keys
# add create the array but the array will be sorted
# For unsorted check: https://stackoverflow.com/questions/68184175/how-to-use-add-and-preserve-duplicate-keys-with-different-values-adding-them
# and https://stedolan.github.io/jq/manual/#Assignment
jq --slurp --raw-output '.[] | (map(keys) | add | unique) as $keys | $keys, map([.[ $keys[] ] | if . == null or . == "" then "-" else . end])[] | @tsv' input.txt
# Equivalent
cat input.txt | jq '[.[] | to_entries | .[] | .key]'
cat input.txt | jq '[.[] | keys_unsorted | .[]]'
# External data
jq -n --arg var1 "Hello" '$var1' # "Hello" - Passing data from the commandline
jq -n 'env.HOME' # "/home/user1" - jq > 1.4, getting data straight from the environment
jq -n --argjson user1 '{"name":"Pete","male":true}' '$user1.name' # "Pete"
jq -n --slurpfile ex1 input.txt '$ex1' # Pretty prints the JSON content of file input.txt
# You can store data in a variable with 'as'
cat input.txt | jq '.[] | keys_unsorted as $k | $k[]'
# foreach EXP as $var (INIT; UPDATE; EXTRACT)
INIT is evaluated once to produce a state value
each output of EXP is bound to $var
UPDATE is evaluated for each output of EXP with the current state and with $var visible
Each value output by UPDATE replaces the previous state
EXTRACT is evaluated for each new state to extract an output of foreach
jq -n '["Hello", "world"] as $k | $k' # [ "Hello", "world" ]
jq -n '["Hello", "world"] as $k | $k | .[0] = "Bye" | $k' # [ "Hello", "world" ] - Altering an array doesn't affect the source array
jq -n '["Hello", "world"] as $k | $k | .[0] = "Bye"' # [ "Bye", "world" ]
jq -n '["Hello", "world"] as $k | $k | .[0] = "Bye" as $k | $k' # [ "Bye", "world" ] - You can always reassign $k again
jq -n '["Hello", "world"] | .[0] = "Bye"' # [ "Bye", "world" ] - In this case you of course don't need the variables
# Deduplicating without sorting
cat input.txt | jq '[.[] | keys_unsorted[]]' > y1
cat y1 | jq 'reduce .[] as $r ([]; . += ["-" + $r])' # Same list but with a - prefix
cat y1 | jq 'reduce .[] as $r ([]; if index($r)|not then . += [$r] else . end)' # Deduplicated unsorted!
# THIS WORKS!
jq -s -r '.[] |
([.[] | keys_unsorted[]] |
reduce .[] as $item ([]; if any(.[]; . == $item) then . else . + [$item] end)) as $keys |
$keys, (.[] | [.[$keys[]] | if . == null or . == "" then "-" else . end ]) | @tsv'
# Turn separate JSON fields into an array
echo '[1][2][3]' | jq -s .
cat log.json | jq -r 'select (.test == "NFS") | [.time,.user] | @tsv'
echo '[1][2][3]' | jq -sc . # Put loose JSONs in an array [[1],[2],[3]]
Convert text lines to JSON array
echo -e "a\nb\nc d\n1" | jq --raw-input . | jq --slurp -c # ["a","b","c d","1"]
URI encode data
echo -n 'input ( & ! ~ : # text' | jq -sRr '@uri' # input%20(%20%26%20!%20~%20%3A%20%23%20text
Ranges
echo "[1,2,3,4,5]" | jq -c '.[0:1]' # [1] echo "[1,2,3,4,5]" | jq -c '.[1:1]' # [] echo "[1,2,3,4,5]" | jq -c '.[1]' # 2 echo "[1,2,3,4,5]" | jq -c '.[2:4]' # [3,4] echo "[1,2,3,4,5]" | jq -c '.[2:]' # [3,4,5] echo "[1,2,3,4,5]" | jq -c '.[-2:]' # [4,5]
json2tsv.sh
#!/bin/bash # Input JSON must be 2D! jq -s -r '.[] | ([.[] | keys_unsorted[]] | reduce .[] as $item ([]; if any(.[]; . == $item) then . else . + [$item] end)) as $keys | $keys, (.[] | [.[$keys[]] | if . == null or . == "" then "-" else . end ]) | @tsv' "$1" # EOF
gron - Make JSON grep-able
It's written in Go and mostly portable over servers. Also see Awk_json_pivot
wget https://github.com/tomnomnom/gron/releases/download/v0.7.1/gron-linux-amd64-0.7.1.tgz tar xzf gron-linux-*-*.tgz mv gron /usr/local/bin/ gron myfile.json | grep "^json.names.father" | gron --ungron