我关注了json文件。我正在尝试使用jq
{
"href" : "http://localhost:8080/api/v1/clusters/ambarihbase/configurations?type=hbase-site&tag=TOPOLOGY_RESOLVED",
"items" : [
{
"href" : "http://localhost:8080/api/v1/clusters/ambarihbase/configurations?type=hbase-site&tag=TOPOLOGY_RESOLVED",
"tag" : "TOPOLOGY_RESOLVED",
"type" : "hbase-site",
"version" : 2,
"Config" : {
"cluster_name" : "ambarihbase",
"stack_id" : "HDP-2.5"
},
"properties" : {
"dfs.domain.socket.path" : "/var/lib/hadoop-hdfs/dn_socket",
"dfs.support.append" : "false",
"hbase.bucketcache.combinedcache.enabled" : "true",
"hbase.bucketcache.ioengine" : "file:/mnt/hbase/cache.data",
"hbase.bucketcache.percentage.in.combinedcache" : "",
"hbase.bucketcache.size" : "81920",
"hbase.bulkload.staging.dir" : "/apps/hbase/staging",
"hbase.client.keyvalue.maxsize" : "1048576",
"hbase.client.retries.number" : "35",
"hbase.client.scanner.caching" : "100",
"hbase.cluster.distributed" : "true",
"hbase.coprocessor.master.classes" : "",
"hbase.coprocessor.region.classes" : "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint",
"hbase.defaults.for.version.skip" : "true",
"hbase.fs.shutdown.hook.wait" : "600000",
"hbase.hregion.majorcompaction" : "0",
"hbase.hregion.majorcompaction.jitter" : "0.50",
"hbase.hregion.max.filesize" : "3221225472",
"hbase.hregion.memstore.block.multiplier" : "4",
"hbase.hregion.memstore.flush.size" : "134217728",
"hbase.hregion.memstore.mslab.enabled" : "true",
"hbase.hstore.blockingStoreFiles" : "100",
"hbase.hstore.compaction.max" : "10",
"hbase.hstore.compaction.max.size" : "10737418240",
"hbase.hstore.compactionThreshold" : "3",
"hbase.local.dir" : "${hbase.tmp.dir}/local",
"hbase.master.distributed.log.splitting" : "true",
"hbase.master.info.bindAddress" : "0.0.0.0",
"hbase.master.info.port" : "16010",
"hbase.master.port" : "16000",
"hbase.master.ui.readonly" : "false",
"hbase.region.server.rpc.scheduler.factory.class" : "org.apache.hadoop.hbase.ipc.PhoenixRpcSchedulerFactory",
"hbase.regionserver.global.memstore.size" : "0.4",
"hbase.regionserver.handler.count" : "100",
"hbase.regionserver.hlog.blocksize" : "134217728",
"hbase.regionserver.info.port" : "16030",
"hbase.regionserver.optionalcacheflushinterval" : "0",
"hbase.regionserver.port" : "16020",
"hbase.regionserver.wal.codec" : "org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec",
"hbase.rest.port" : "8090",
"hbase.rootdir" : "/hbase",
"hbase.rpc.protection" : "authentication",
"hbase.rpc.timeout" : "90000",
"hbase.rs.cacheblocksonwrite" : "true",
"hbase.security.authentication" : "simple",
"hbase.security.authorization" : "false",
"hbase.shutdown.hook" : "true",
"hbase.superuser" : "hbase",
"hbase.tmp.dir" : "/tmp/hbase-${user.name}",
"hbase.zookeeper.property.clientPort" : "2181",
"hbase.zookeeper.useMulti" : "true",
"hfile.block.cache.size" : "0.40",
"hfile.index.block.max.size" : "131072",
"io.storefile.bloom.block.size" : "131072",
"phoenix.functions.allowUserDefinedFunctions" : "true",
"phoenix.query.timeoutMs" : "60000",
"zookeeper.recovery.retry" : "6",
"zookeeper.session.timeout" : "120000",
"zookeeper.znode.parent" : "/hbase-unsecure"
}
}
]
}
我想更新hbase.hregion.max.filesize
,所以我尝试了以下
cat initial.json |
jq 'to_entries |
map(if .items[0].properties.key == "hfile.block.cache.size"
then . + {"value":"0.20"}
else .
end
) |
from_entries'
哪个输出原始文件,但没有更新hfile.block.cache.size
由于
答案 0 :(得分:1)
原始问题询问如何更改多个“属性”。不重复这样做的关键是|=
。这是一个例子:
.items[0].properties |=
( .["hfile.block.cache.size"] = "newvalue1"
| .["hbase.hregion.max.filesize"] = "newvalue2" )
原始问题还提到更新文件中的属性。避免必须创建显式临时文件的一种可能性是使用sponge
(例如brew install moreutils
),如下所示:
$ jq .... input.json | sponge input.json
答案 1 :(得分:0)
管理去做
jq '.items[0].properties."hfile.block.cache.size"="0.2"' initial.json > 1.json