org.apache.hadoop.security.AccessControlException: Клиент не может пройти аутентификацию через: [TOKEN, KERBEROS] Проблема - PullRequest
0 голосов
/ 10 октября 2019

Я использую Java-клиент для доступа к защищенной HDFS с аутентификацией Kerberos. Я попытался набрать klist на сервере, он показывает уже существующий действительный билет. Исключение, которое я получил, было Клиент не может пройти аутентификацию через: [TOKEN, KERBEROS] . Помощь будет принята с благодарностью. Это журнал ошибок.

Java 8, CDH 5.12.1, Hadoop 2.6

Exception in thread "main" java.io.IOException: Failed on local exception: java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]; Host Details : local host is: 
            at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:772)
            at org.apache.hadoop.ipc.Client.call(Client.java:1508)
            at org.apache.hadoop.ipc.Client.call(Client.java:1441)
            at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
            at com.sun.proxy.$Proxy10.getListing(Unknown Source)
            at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:588)
            at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
            at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
            at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
            at java.lang.reflect.Method.invoke(Method.java:498)
            at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:258)
            at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
            at com.sun.proxy.$Proxy11.getListing(Unknown Source)
            at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2145)
            at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2128)
            at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:743)
            at org.apache.hadoop.hdfs.DistributedFileSystem.access$600(DistributedFileSystem.java:113)
            at org.apache.hadoop.hdfs.DistributedFileSystem$16.doCall(DistributedFileSystem.java:808)
            at org.apache.hadoop.hdfs.DistributedFileSystem$16.doCall(DistributedFileSystem.java:804)
            at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
            at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:804)
            at hive.Hadoop_TEST.delete(Hadoop_TEST.java:54)
            at hive.NableHive.main(NableHive.java:47)
    Caused by: java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]
            at org.apache.hadoop.ipc.Client$Connection$1.run(Client.java:718)
            at java.security.AccessController.doPrivileged(Native Method)
            at javax.security.auth.Subject.doAs(Subject.java:422)
            at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
            at org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:681)
            at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:769)
            at org.apache.hadoop.ipc.Client$Connection.access$3000(Client.java:396)
            at org.apache.hadoop.ipc.Client.getConnection(Client.java:1557)
            at org.apache.hadoop.ipc.Client.call(Client.java:1480)
            ... 21 more
    Caused by: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]
            at org.apache.hadoop.security.SaslRpcClient.selectSaslClient(SaslRpcClient.java:172)
            at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:396)
            at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:594)
            at org.apache.hadoop.ipc.Client$Connection.access$2000(Client.java:396)
            at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:761)
            at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:757)
            at java.security.AccessController.doPrivileged(Native Method)
            at javax.security.auth.Subject.doAs(Subject.java:422)
            at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
            at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:756)
            ... 24 more

Это код Java

final String CONF_CORE_SITE = "/etc/hadoop/conf/core-site.xml";
          final String CONF_HDFS_SITE = "/etc/hadoop/conf/hdfs-site.xml";

        Configuration configuration = new Configuration();

        configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
        configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
         configuration.set("hadoop.rpc.protection", "privacy");
        File hadoopCoreConfig = new File(CONF_CORE_SITE);
        File hadoopHdfsConfig = new File(CONF_HDFS_SITE);

        if (! hadoopCoreConfig.exists() || ! hadoopHdfsConfig.exists()) {
            throw new FileNotFoundException("Files core-site.xml or hdfs-site.xml are not found. Check /etc/hadoop/conf/ path.");
        }

        configuration.addResource(new Path(hadoopCoreConfig.toURI()));
        configuration.addResource(new Path(hadoopHdfsConfig.toURI()));

        //Use existing security context created by $ kinit
        UserGroupInformation.setConfiguration(configuration);
        UserGroupInformation.loginUserFromKeytab("dwh_udp@abc.com", "/home/dwh_udp/dwh_udp.keytab");
        UserGroupInformation.loginUserFromSubject(null);

        URI uri = URI.create("hdfs://****");
        FileSystem fs = FileSystem.get(uri,configuration,"User");

        FileStatus[] fsStatus = fs.listStatus(new Path("/"));

        for(int i = 0; i <= fsStatus.length; i++){
            System.out.println(fsStatus[i].getPath().toString());
        }

Maven зависимость

   <properties>
        <hadoop.version>2.6.0</hadoop.version>
        <hadoop.release>cdh5.12.1</hadoop.release>
    </properties>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-core</artifactId>
            <version>${hadoop.version}-mr1-${hadoop.release}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>${hadoop.version}-${hadoop.release}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}-${hadoop.release}</version>
        </dependency>
    </dependencies>


...