chaijunjie created HDFS-17355: --------------------------------- Summary: Restart NameNode then cause client throw EOFException Key: HDFS-17355 URL: https://issues.apache.org/jira/browse/HDFS-17355 Project: Hadoop HDFS Issue Type: Bug Components: namanode Affects Versions: 3.3.1 Reporter: chaijunjie
we use hbase with 3 node cluster(include 3 DNs), do namenode switch, then HBase HMaster throw java.io.EOFException, we do switch again, then it recovered {code:java} java.io.IOException: org.apache.hadoop.ipc.RemoteException(java.io.EOFException): java.io.EOFException at java.io.DataInputStream.readInt(DataInputStream.java:393) at org.apache.hadoop.io.WritableUtils.readString(WritableUtils.java:122) at org.apache.hadoop.hdfs.server.namenode.FSDirErasureCodingOp.getErasureCodingPolicyForPath(FSDirErasureCodingOp.java:477) at org.apache.hadoop.hdfs.server.namenode.FSDirErasureCodingOp.unprotectedGetErasureCodingPolicy(FSDirErasureCodingOp.java:422) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.createFileStatus(FSDirStatAndListingOp.java:514) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.createFileStatus(FSDirStatAndListingOp.java:483) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:429) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:448) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:131) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:3594) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1278) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:1094) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:602) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:570) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:554) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1084) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1007) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1890) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3037) at org.apache.hadoop.hbase.regionserver.HRegion.initializeStores(HRegion.java:1183) at org.apache.hadoop.hbase.regionserver.HRegion.initializeStores(HRegion.java:1126) at org.apache.hadoop.hbase.regionserver.HRegion.initializeRegionInternals(HRegion.java:1021) at org.apache.hadoop.hbase.regionserver.HRegion.initialize(HRegion.java:966) at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:8024) at org.apache.hadoop.hbase.regionserver.HRegion.openHRegionFromTableDir(HRegion.java:7983) at org.apache.hadoop.hbase.master.region.MasterRegion.open(MasterRegion.java:266) at org.apache.hadoop.hbase.master.region.MasterRegion.create(MasterRegion.java:344) at org.apache.hadoop.hbase.master.region.MasterRegionFactory.create(MasterRegionFactory.java:104) at org.apache.hadoop.hbase.master.HMaster.finishActiveMasterInitialization(HMaster.java:881) at org.apache.hadoop.hbase.master.HMaster.startActiveMasterManager(HMaster.java:2360) at org.apache.hadoop.hbase.master.HMaster.lambda$run$0(HMaster.java:550) at java.lang.Thread.run(Thread.java:750)Caused by: org.apache.hadoop.ipc.RemoteException(java.io.EOFException): java.io.EOFException at java.io.DataInputStream.readInt(DataInputStream.java:393) at org.apache.hadoop.io.WritableUtils.readString(WritableUtils.java:122) at org.apache.hadoop.hdfs.server.namenode.FSDirErasureCodingOp.getErasureCodingPolicyForPath(FSDirErasureCodingOp.java:477) at org.apache.hadoop.hdfs.server.namenode.FSDirErasureCodingOp.unprotectedGetErasureCodingPolicy(FSDirErasureCodingOp.java:422) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.createFileStatus(FSDirStatAndListingOp.java:514) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.createFileStatus(FSDirStatAndListingOp.java:483) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:429) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:448) at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:131) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:3594) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1278) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:1094) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:602) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:570) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:554) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1084) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1007) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1890) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3037) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1579) at org.apache.hadoop.ipc.Client.call(Client.java:1525) at org.apache.hadoop.ipc.Client.call(Client.java:1422) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:245) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:131) at com.sun.proxy.$Proxy19.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:1022) at sun.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:435) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) at com.sun.proxy.$Proxy20.getFileInfo(Unknown Source) at sun.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361) at com.sun.proxy.$Proxy21.getFileInfo(Unknown Source) at sun.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361) at com.sun.proxy.$Proxy21.getFileInfo(Unknown Source) at sun.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361) at com.sun.proxy.$Proxy21.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1892) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1812) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1809) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1824) at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1811) at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.createStoreDir(HRegionFileSystem.java:203) at org.apache.hadoop.hbase.regionserver.HStore.<init>(HStore.java:241) at org.apache.hadoop.hbase.regionserver.HRegion.instantiateHStore(HRegion.java:6451) at org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1149) at org.apache.hadoop.hbase.regionserver.HRegion$1.call(HRegion.java:1146) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ... 1 more {code} -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: hdfs-dev-unsubscr...@hadoop.apache.org For additional commands, e-mail: hdfs-dev-h...@hadoop.apache.org