我正在尝试使用jq解析elasticsearch的聚合搜索结果以创建CSV。但是我很难得到我需要的结果 - 希望有人可以提供帮助。我有以下json:
[
{
"key_as_string": "2017-09-01T00:00:00.000+02:00",
"key": 1506808800000,
"doc_count": 5628,
"agg1": {
"doc_count_error_upper_bound": 5,
"sum_other_doc_count": 1193,
"buckets": [
{
"key": "value3",
"doc_count": 3469,
"agg2": {
"doc_count_error_upper_bound": 1,
"sum_other_doc_count": 3459,
"buckets": [
{
"key": "10367.xxx",
"doc_count": 1
},
{
"key": "10997.xxx",
"doc_count": 1
},
{
"key": "12055.xxx",
"doc_count": 1
},
{
"key": "12157.xxx",
"doc_count": 1
},
{
"key": "12435.xxx",
"doc_count": 1
},
{
"key": "12volt.xxx",
"doc_count": 1
},
{
"key": "13158.xxx",
"doc_count": 1
},
{
"key": "13507.xxx",
"doc_count": 1
},
{
"key": "13597.xxx",
"doc_count": 1
},
{
"key": "137.xxx",
"doc_count": 1
}
]
}
},
{
"key": "value2",
"doc_count": 608,
"agg2": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 577,
"buckets": [
{
"key": "saasf.xxx",
"doc_count": 7
},
{
"key": "asfasf.xxx",
"doc_count": 5
},
{
"key": "sasfsd.xxx",
"doc_count": 3
},
{
"key": "werwer.xxx",
"doc_count": 3
},
{
"key": "werwre.xxx",
"doc_count": 3
},
{
"key": "a-werwr.xxx",
"doc_count": 2
},
{
"key": "aef.xxx",
"doc_count": 2
},
{
"key": "sadhdhh.xxx",
"doc_count": 2
},
{
"key": "dhsdfsdg.xxx",
"doc_count": 2
},
{
"key": "ertetrt.xxx",
"doc_count": 2
}
]
}
},
{
"key": "value1",
"doc_count": 358,
"agg2": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 336,
"buckets": [
{
"key": "fhshfg.xxx",
"doc_count": 3
},
{
"key": "sgh.xxx",
"doc_count": 3
},
{
"key": "12.xxx",
"doc_count": 2
},
{
"key": "sbgs.xxx",
"doc_count": 2
},
{
"key": "dp-eca.xxx",
"doc_count": 2
},
{
"key": "ztuhfb.xxx",
"doc_count": 2
},
{
"key": "javascript.xxx",
"doc_count": 2
},
{
"key": "koi-fdhfh.xxx",
"doc_count": 2
},
{
"key": "sdfh.xxx",
"doc_count": 2
},
{
"key": "etz5.xxx",
"doc_count": 2
}
]
}
}
]
}
}
]
这只是一个小小的剪切,实际上我每天都有这些结果( - > timestamp位于'key_as_string')。但是,我需要一个csv,它给我以下结果:
2017-09-01T00:00:00.000+02:00,value3,10367.xxx,1
2017-09-01T00:00:00.000+02:00,value3,10997.xxx,1
...
2017-09-01T00:00:00.000+02:00,value2,saasf.xxx,7
2017-09-01T00:00:00.000+02:00,value2,asfasf.xxx,5
...
2017-09-01T00:00:00.000+02:00,value1,fhshfg.xxx,3
2017-09-01T00:00:00.000+02:00,value1,sgh.xxx,3
..
答案 0 :(得分:3)
jq 解决方案:
jq -r '.[] | .key_as_string as $ks | .agg1.buckets[] | .key as $key
| .agg2.buckets[] | [$ks,$key,.key,.doc_count] | @csv' jsonfile
输出(用于当前输入):
"2017-09-01T00:00:00.000+02:00","value3","10367.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","10997.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","12055.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","12157.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","12435.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","12volt.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","13158.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","13507.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","13597.xxx",1
"2017-09-01T00:00:00.000+02:00","value3","137.xxx",1
"2017-09-01T00:00:00.000+02:00","value2","saasf.xxx",7
"2017-09-01T00:00:00.000+02:00","value2","asfasf.xxx",5
"2017-09-01T00:00:00.000+02:00","value2","sasfsd.xxx",3
"2017-09-01T00:00:00.000+02:00","value2","werwer.xxx",3
"2017-09-01T00:00:00.000+02:00","value2","werwre.xxx",3
"2017-09-01T00:00:00.000+02:00","value2","a-werwr.xxx",2
"2017-09-01T00:00:00.000+02:00","value2","aef.xxx",2
"2017-09-01T00:00:00.000+02:00","value2","sadhdhh.xxx",2
"2017-09-01T00:00:00.000+02:00","value2","dhsdfsdg.xxx",2
"2017-09-01T00:00:00.000+02:00","value2","ertetrt.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","fhshfg.xxx",3
"2017-09-01T00:00:00.000+02:00","value1","sgh.xxx",3
"2017-09-01T00:00:00.000+02:00","value1","12.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","sbgs.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","dp-eca.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","ztuhfb.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","javascript.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","koi-fdhfh.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","sdfh.xxx",2
"2017-09-01T00:00:00.000+02:00","value1","etz5.xxx",2