* Fink版本: 1.16 * 部署:docker集群, 2个jm,3个tm高可用 * HADOOP: CDP7集群,启用kerberos * Flink配置:在conf/fink-conf.yml中添加security相关参数
security.kerberos.login.use-ticket-cache: true security.kerberos.login.keytab: /opt/flink/flink.keytab security.kerberos.login.principal: *fl...@hadoop.com <fl...@hadoop.com>* * 现象: flink集群可以正常连接cdp集群的zookeeper、hdfs等服务,实现高可用,写入checkpoint信息。使用sql-client连接hive,创建catalog时,报错GSS认证失败 sql-client日志报错信息: 2023-01-29 16:24:16,340 DEBUG org.apache.hadoop.ipc.Client [] - closing ipc connection to cdlab01/172.16.16.150:8020: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)] java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)] at org.apache.hadoop.ipc.Client$Connection$1.run(Client.java:755) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_352] at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_352] at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1836) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:718) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:811) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.access$3500(Client.java:410) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client.getConnection(Client.java:1550) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client.call(Client.java:1381) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client.call(Client.java:1345) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at com.sun.proxy.$Proxy39.getFileInfo(Unknown Source) [?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:796) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_352] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_352] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_352] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_352] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:409) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:346) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at com.sun.proxy.$Proxy40.getFileInfo(Unknown Source) [?:?] at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1717) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1437) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1434) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1434) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1437) [flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.flink.runtime.fs.hdfs.HadoopFileSystem.exists(HadoopFileSystem.java:165) [flink-dist-1.16.0.jar:1.16.0] at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95) [flink-sql-client-1.16.0.jar:1.16.0] at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187) [flink-sql-client-1.16.0.jar:1.16.0] at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161) [flink-sql-client-1.16.0.jar:1.16.0] Caused by: javax.security.sasl.SaslException: GSS initiate failed at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211) ~[?:1.8.0_352] at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:406) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:614) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.access$2200(Client.java:410) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:798) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:794) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_352] at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_352] at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1836) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:793) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] ... 43 more Caused by: org.ietf.jgss.GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt) at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:162) ~[?:1.8.0_352] at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122) ~[?:1.8.0_352] at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:189) ~[?:1.8.0_352] at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224) ~[?:1.8.0_352] at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212) ~[?:1.8.0_352] at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179) ~[?:1.8.0_352] at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192) ~[?:1.8.0_352] at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:406) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:614) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.access$2200(Client.java:410) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:798) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:794) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_352] at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_352] at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1836) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:793) ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0] ... 43 more 请问这个是需要改什么设置吗?