venkata ram kumar ch created HDFS-14014:
-------------------------------------------
Summary: Unable to change the state of DN to maintenance using
dfs.hosts.maintenance
Key: HDFS-14014
URL: https://issues.apache.org/jira/browse/HDFS-14014
Project: Hadoop HDFS
Issue Type: Bug
Reporter: venkata ram kumar ch
hdfs-site.xml configurations :
<property>
<name>dfs.namenode.maintenance.replication.min</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.hosts.provider.classname</name>
<value>org.apache.hadoop.hdfs.server.blockmanagement.CombinedHostFileManager</value>
</property>
<property>
<name>dfs.hosts.maintenance</name>
<value>/opt/lifeline2/install/hadoop/namenode/etc/hadoop/maintenance</value>
</property>
</configuration>
maintenance file :
{ "hostName": "vm1", "port": 50076, "adminState": "IN_MAINTENANCE",
"maintenanceExpireTimeInMS" : 1540204025000}
Command :
/hadoop/namenode/bin # ./hdfs dfsadmin -refreshNodes
2018-10-22 17:45:54,286 WARN util.NativeCodeLoader: Unable to load
native-hadoop library for your platform... using builtin-java classes where
applicable
Refresh nodes failed for vm1:65110
Refresh nodes failed for vm2:65110
refreshNodes: 2 exceptions
[org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): (No such
file or directory)
at java.io.FileInputStream.open0(Native Method)
at java.io.FileInputStream.open(FileInputStream.java:195)
at java.io.FileInputStream.<init>(FileInputStream.java:138)
at java.io.FileInputStream.<init>(FileInputStream.java:93)
at
org.apache.hadoop.hdfs.util.CombinedHostsFileReader.readFile(CombinedHostsFileReader.java:75)
at
org.apache.hadoop.hdfs.server.blockmanagement.CombinedHostFileManager.refresh(CombinedHostFileManager.java:215)
at
org.apache.hadoop.hdfs.server.blockmanagement.CombinedHostFileManager.refresh(CombinedHostFileManager.java:210)
at
org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager.refreshHostsReader(DatanodeManager.java:1195)
at
org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager.refreshNodes(DatanodeManager.java:1177)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.refreshNodes(FSNamesystem.java:4488)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.refreshNodes(NameNodeRpcServer.java:1270)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.refreshNodes(ClientNamenodeProtocolServerSideTranslatorPB.java:913)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
, org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): (No
such file or directory)
at java.io.FileInputStream.open0(Native Method)
at java.io.FileInputStream.open(FileInputStream.java:195)
at java.io.FileInputStream.<init>(FileInputStream.java:138)
at java.io.FileInputStream.<init>(FileInputStream.java:93)
at
org.apache.hadoop.hdfs.util.CombinedHostsFileReader.readFile(CombinedHostsFileReader.java:75)
at
org.apache.hadoop.hdfs.server.blockmanagement.CombinedHostFileManager.refresh(CombinedHostFileManager.java:215)
at
org.apache.hadoop.hdfs.server.blockmanagement.CombinedHostFileManager.refresh(CombinedHostFileManager.java:210)
at
org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager.refreshHostsReader(DatanodeManager.java:1195)
at
org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager.refreshNodes(DatanodeManager.java:1177)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.refreshNodes(FSNamesystem.java:4488)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.refreshNodes(NameNodeRpcServer.java:1270)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.refreshNodes(ClientNamenodeProtocolServerSideTranslatorPB.java:913)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]