如何修复Hive和Hadoop的“ java.lang.RuntimeException:错误缓存map.xml”?

时间:2019-05-15 03:49:27

标签: hadoop hive

我试图在Hive表(apache-hive-3.1.1)中插入一些值,并且出现Error Caching map.xml错误。 “显示表”工作正常,我可以在localhost:9870上看到hadoop集群。谁能告诉我如何解决此错误?

我已经厌倦了格式化NameNode并再次运行查询。我发现的其他选项是更改权限并删除hdfs数据。我找不到任何一种方法。

创建表运行得很好。

create table employees 
(empid int, 
firstname varchar(30),
lastname varchar(30),
tenure int,
address struct<street:string,city:string>,
subordinates array<string>);

以下内容无效,

insert into employees 
select 1, "Vitthal","Srinivasan",1, named_struct("street","Bellandur","city","Bangalore"),array("Anuradha","Arun","Swetha")
union all select 2, "Swetha","Kolalapudi",4, named_struct("street","Bellandur","city","Bangalore"),array("Pradeep")
union all select 3, "Janani","Ravi",2, named_struct("street","Bellandur","city","Bangalore"),array("Navdeep")
union all select 4, "Navdeep","Singh",3, named_struct("street","Bellandur","city","Bangalore"),array("Shreya","Jitu");

下面是我收到的错误消息,

hive>  insert into employees 
    > select 1, "Vitthal","Srinivasan",1, named_struct("street","Bellandur","city","Bangalore"),array("Anuradha","Arun","Swetha")
    > union all select 2, "Swetha","Kolalapudi",4, named_struct("street","Bellandur","city","Bangalore"),array("Pradeep")
    > union all select 3, "Janani","Ravi",2, named_struct("street","Bellandur","city","Bangalore"),array("Navdeep")
    > union all select 4, "Navdeep","Singh",3, named_struct("street","Bellandur","city","Bangalore"),array("Shreya","Jitu");
Query ID = saurabhsomani_20190514204737_a73ab528-757c-4a7a-953a-4c5f1ce5ebcf
Total jobs = 3
Launching Job 1 out of 3
Number of reduce tasks is set to 0 since there's no reduce operator
java.lang.RuntimeException: Error caching map.xml
    at org.apache.hadoop.hive.ql.exec.Utilities.setBaseWork(Utilities.java:641)
    at org.apache.hadoop.hive.ql.exec.Utilities.setMapWork(Utilities.java:566)
    at org.apache.hadoop.hive.ql.exec.Utilities.setMapRedWork(Utilities.java:558)
    at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:362)
    at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:149)
    at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205)
    at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97)
    at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2664)
    at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2335)
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2011)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1709)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1703)
    at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157)
    at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218)
    at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239)
    at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188)
    at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402)
    at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:821)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:683)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.util.RunJar.run(RunJar.java:318)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:232)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /tmp/hive/saurabhsomani/255fb86f-47de-4f31-ad0d-a64b8d864d37/hive_2019-05-14_20-47-37_654_6424247597470144451-1/-mr-10004/2fe7dee9-efc9-43a9-80c7-84f922243f7d/map.xml could only be written to 0 of the 1 minReplication nodes. There are 0 datanode(s) running and no node(s) are excluded in this operation.
    at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2117)
    at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:287)
    at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2691)
    at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
    at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
    at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
    at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
    at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
    at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
    at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
    at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)

    at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
    at org.apache.hadoop.ipc.Client.call(Client.java:1443)
    at org.apache.hadoop.ipc.Client.call(Client.java:1353)
    at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
    at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
    at com.sun.proxy.$Proxy29.addBlock(Unknown Source)
    at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
    at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
    at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
    at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
    at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
    at com.sun.proxy.$Proxy30.addBlock(Unknown Source)
    at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
    at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
    at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
    at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
Job Submission failed with exception 'java.lang.RuntimeException(Error caching map.xml)'
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Error caching map.xml

0 个答案:

没有答案