hive常见错误

来源:互联网 发布:gta5 日式女孩捏脸数据 编辑:程序博客网 时间:2024/05/15 15:11

1.删除数据存放目录,重新格式化hadoop解决

java.lang.RuntimeException: Error caching map.xml: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /tmp/hive/hadoop/97c28744-9396-409d-a714-ad71a548ec63/hive_2017-03-01_01-53-54_684_594441272444452517-1/-mr-10004/50488ee5-23e1-4d27-a707-ee0f32055a9b/map.xml could only be replicated to 0 nodes instead of minReplication (=1).  There are 2 datanode(s) running and 2 node(s) are excluded in this operation.

         at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1610)

         at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3315)

         at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:679)

         at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.addBlock(AuthorizationProviderProxyClientProtocol.java:214)

         at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:489)

         at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

         at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)

         at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

         at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)

         at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)

         at java.security.AccessController.doPrivileged(Native Method)

         at javax.security.auth.Subject.doAs(Subject.java:415)

         at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)

         at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

 

         at org.apache.hadoop.hive.ql.exec.Utilities.setBaseWork(Utilities.java:737)

         at org.apache.hadoop.hive.ql.exec.Utilities.setMapWork(Utilities.java:672)

         at org.apache.hadoop.hive.ql.exec.Utilities.setMapRedWork(Utilities.java:664)

         at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:374)

         at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:137)

         at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)

         at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)

         at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1782)

         at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1539)

         at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1318)

         at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1127)

         at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1115)

         at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:220)

         at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:172)

         at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:383)

         at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:775)

         at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:693)

         at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:628)

         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

         at java.lang.reflect.Method.invoke(Method.java:606)

         at org.apache.hadoop.util.RunJar.run(RunJar.java:221)

         at org.apache.hadoop.util.RunJar.main(RunJar.java:136)

Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /tmp/hive/hadoop/97c28744-9396-409d-a714-ad71a548ec63/hive_2017-03-01_01-53-54_684_594441272444452517-1/-mr-10004/50488ee5-23e1-4d27-a707-ee0f32055a9b/map.xml could only be replicated to 0 nodes instead of minReplication (=1).  There are 2 datanode(s) running and 2 node(s) are excluded in this operation.

         at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1610)

         at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3315)

         at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:679)

         at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.addBlock(AuthorizationProviderProxyClientProtocol.java:214)

         at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:489)

         at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

         at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)

         at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

         at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)

         at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)

         at java.security.AccessController.doPrivileged(Native Method)

         at javax.security.auth.Subject.doAs(Subject.java:415)

         at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)

         at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

 

         at org.apache.hadoop.ipc.Client.call(Client.java:1471)

         at org.apache.hadoop.ipc.Client.call(Client.java:1408)

         at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)

         at com.sun.proxy.$Proxy14.addBlock(Unknown Source)

         at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:409)

         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

         at java.lang.reflect.Method.invoke(Method.java:606)

         at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:256)

         at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)

         at com.sun.proxy.$Proxy15.addBlock(Unknown Source)

         at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1733)

         at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1529)

         at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:683)

Job Submission failed with exception 'java.lang.RuntimeException(Error caching map.xml: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /tmp/hive/hadoop/97c28744-9396-409d-a714-ad71a548ec63/hive_2017-03-01_01-53-54_684_594441272444452517-1/-mr-10004/50488ee5-23e1-4d27-a707-ee0f32055a9b/map.xml could only be replicated to 0 nodes instead of minReplication (=1).  There are 2 datanode(s) running and 2 node(s) are excluded in this operation.

         at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1610)

         at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3315)

         at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:679)

         at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.addBlock(AuthorizationProviderProxyClientProtocol.java:214)

         at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:489)

         at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)

         at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)

         at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)

         at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)

         at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)

         at java.security.AccessController.doPrivileged(Native Method)

         at javax.security.auth.Subject.doAs(Subject.java:415)

         at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)

         at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

)'

 

FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask


2. 从hadoop磁盘空间不够引起。

2017-03-15 18:03:07,202 DEBUG [main]: metastore.HiveMetaStore(HiveMetaStore.java:createDefaultRoles_core(734)) - Failed while grantingglobal privs to admin

InvalidObjectException(message:All is already granted by admin)

      atorg.apache.hadoop.hive.metastore.ObjectStore.grantPrivileges(ObjectStore.java:4186)

      atsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

      atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

      atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

      at java.lang.reflect.Method.invoke(Method.java:606)

      atorg.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)

      atcom.sun.proxy.$Proxy5.grantPrivileges(Unknown Source)

      atorg.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:731)

      atorg.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:697)

      atorg.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:482)

      at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78)

      atorg.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84)

      atorg.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5923)

      atorg.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:201)

      atorg.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)

      atsun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

      atsun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)

      atsun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

      at java.lang.reflect.Constructor.newInstance(Constructor.java:526)

      atorg.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1501)

      atorg.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:67)

      at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)

      atorg.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3024)

      atorg.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3043)

      at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3268)

      atorg.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:215)

      atorg.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:201)

      atorg.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:312)

      atorg.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:273)

      atorg.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:248)

      atorg.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513)

      at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:689)

      atorg.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:628)

      atsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

      atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

      atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

      atjava.lang.reflect.Method.invoke(Method.java:606)

      atorg.apache.hadoop.util.RunJar.run(RunJar.java:221)

      atorg.apache.hadoop.util.RunJar.main(RunJar.java:136)




0 0
原创粉丝点击