ОШИБКА BlockingRpcConnection: сбой аутентификации SASL.Наиболее вероятная причина - отсутствие или неверные учетные данные.Считайте "Кинит" - PullRequest
0 голосов
/ 26 октября 2018

Я использую 2 кластера 1 EMR 5.17.0 с HBASE, Phoenix e Kerberos другой кластер - EMR 5.17.0 со Spark без Kerberos, со стандартным Java-приложением я могу войти из EMR с помощью Spark и войти в Phoenix для запроса таблиц. Но когда я пытаюсь использовать Spark Application для входа в Phoenix, я получаю сообщение об ошибке !!

SPARK SUBMIT:

spark-submit --master yarn --deploy-mode cluster --jars /home/hadoop/hbase-site.xml,/opt/silvershark/silvershark/jars/phoenix-4.14.0-HBase-1.4-client .jar, / opt / silvershark / silvershark / jars / phoenix-spark-4.14.0-HBase-1.4.jar, / opt / silvershark / silvershark / jars / spark-xml_2.11-0.4.1.jar --conf " spark.executor.extraClassPath = / USR / Библиотека / HBase / Библиотека / HBase-common.jar: /usr/lib/hbase/lib/hbase-client.jar: /usr/lib/hbase/lib/hbase-server.jar : /usr/lib/hbase/lib/hbase-protocol.jar: /usr/lib/hbase/lib/guava-12.0.1.jar: /usr/lib/hbase/lib/htrace-core-3.1.0- incubating.jar: /usr/lib/spark/jars/spark-assembly-1.6.2.2.5.3.0-37-hadoop2.7.3.2.5.3.0-37.jar: / USR / Библиотека / искровой / банки / Феникс client.jar "--conf" spark.driver.extraClassPath = / usr / lib / hbase / lib / hbase-common.jar: /usr/lib/hbase/lib/hbase-client.jar: / usr / lib / hbase /lib/hbase-server.jar:/usr/lib/hbase/lib/hbase-protocol.jar:/usr/lib/hbase/lib/guava-12.0.1.jar:/usr/lib/hbase/lib/ htrace-ядро-3,1,0-incubating.jar: /usr/lib/spark/jars/spark-assembly-1.6.2.2.5.3.0-37 -hadoop2.7.3.2.5.3.0-37.jar: /usr/lib/phoenix/jars/phoenix-client.jar "--conf" spark.executor.extraJavaOptions = -Djava.security.auth.login.config = / opt / silvershark / silvershark / conf / jaas-client.conf "--conf" spark.driver.extraJavaOptions = -Djava.security.auth.login.config = / opt / silvershark / silvershark / conf / jaas-client.conf " --conf = "- Dsun.security.krb5.debug = true" --class "com.silversharksolutions.importtablesesocial.SparkInit" --keytab /home/hadoop/hadoop.keytab --priceal hadoop / ip-172-31- 73-159.ec2.internal@EC2.INTERNAL ImportTableESocial-1.0-SNAPSHOT.jar

Образец класса >>

пакет com.silversharksolutions.importtablesesocial;

import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.phoenix.query.QueryServices;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SQLContext;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */

/**
 *
 * @author root çã
 */
public class SparkInit {


    private static final Logger LOG = Logger.getLogger(SparkInit.class.getName());

    public static void main(String[] args) {

        // SparkConf conf = new SparkConf().setAppName("TwitterSentimentResults")
        //         .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        //         .set("spark.io.compression.codec", "snappy");

        //JavaSparkContext sc = new JavaSparkContext(conf);

        /*SparkSession spark = new SparkSession.Builder().appName("ImportTablesESocial").getOrCreate();
        spark.conf().set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        spark.conf().set("spark.io.compression.codec", "snappy");
        JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());

        //SQLContext sqlContext = new SQLContext(sc);

        //ImportTablesESocial imp = new ImportTablesESocial(spark);
        //imp.run();
        System.out.println(" Rotina  finalizada com Sucesso !!");
        spark.stop();
        */

       // SparkSession sparkConf = new SparkSession.Builder().appName("ImportTablesESocial").getOrCreate();
        SparkConf sparkConf = new SparkConf().setAppName("ImportTablesESocial");
        //sparkConf.setMaster("local");
        sparkConf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/hbase-secure");
        sparkConf.set(HConstants.ZOOKEEPER_QUORUM, "ip-172-31-73-159.ec2.internal");
        sparkConf.set(HConstants.ZOOKEEPER_CLIENT_PORT, "2181");
        sparkConf.set(HConstants.ZOOKEEPER_CONFIG_NAME, "/hbase-secure");
        sparkConf.set(QueryServices.HBASE_CLIENT_KEYTAB, "/home/hadoop/hadoop.keytab");
        sparkConf.set(QueryServices.HBASE_CLIENT_PRINCIPAL, "hadoop/ip-172-31-73-159.ec2.internal@EC2.INTERNAL");
        sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        sparkConf.set("spark.io.compression.codec", "snappy");

        JavaSparkContext conf = new JavaSparkContext(sparkConf);
        conf.setLocalProperty(HConstants.ZOOKEEPER_ZNODE_PARENT, "/hbase-secure");
        conf.setLocalProperty(HConstants.ZOOKEEPER_QUORUM, "ip-172-31-73-159.ec2.internal");
        conf.setLocalProperty(HConstants.ZOOKEEPER_CLIENT_PORT, "2181");
        conf.setLocalProperty(HConstants.ZOOKEEPER_CONFIG_NAME, "/hbase-secure");
        conf.setLocalProperty(QueryServices.HBASE_CLIENT_KEYTAB, "/home/hadoop/hadoop.keytab");
        conf.setLocalProperty(QueryServices.HBASE_CLIENT_PRINCIPAL, "hadoop/ip-172-31-73-159.ec2.internal@EC2.INTERNAL");

        try {
            UserGroupInformation.setConfiguration(new Configuration());
            UserGroupInformation.loginUserFromKeytab("hadoop/ip-172-31-73-159.ec2.internal@EC2.INTERNAL",
                    "/home/hadoop/hadoop.keytab");
        } catch (Exception e) {
            System.out.println(e.toString());
        }


        String quorum = conf.getLocalProperty("hbase.zookeeper.quorum");
        String clientPort = conf.getLocalProperty("hbase.zookeeper.property.clientPort");
        String znodeParent = conf.getLocalProperty("zookeeper.znode.parent");


        System.out.println("Quorum = " + quorum);
        System.out.println("clientPort = " + clientPort);
        System.out.println("znodeParent = " + znodeParent);


        HBaseConfiguration hbaseConf = new HBaseConfiguration();
        hbaseConf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/hbase-secure");
        hbaseConf.set(HConstants.ZOOKEEPER_QUORUM, "ip-172-31-73-159.ec2.internal");
        hbaseConf.set(HConstants.ZOOKEEPER_CLIENT_PORT, "2181");
        hbaseConf.set(HConstants.ZOOKEEPER_CONFIG_NAME, "/hbase-secure");
        hbaseConf.set(QueryServices.HBASE_CLIENT_KEYTAB, "/home/hadoop/hadoop.keytab");
        hbaseConf.set(QueryServices.HBASE_CLIENT_PRINCIPAL, "hadoop/ip-172-31-73-159.ec2.internal@EC2.INTERNAL");


        final SQLContext sqlContext = new SQLContext(conf);


        // Map<String, String> options = new HashMap<String, String>();
        // options.put("zkUrl", "lnxhdp01.smrcy.com:2181:/hbase-secure");
        // options.put("table", "TABLE1");
        // sqlContext.load("org.apache.phoenix.spark", options);


        List<Table1> dataSet = new ArrayList<Table1>();
        dataSet.add(new Table1(1, "1"));
        dataSet.add(new Table1(2, "2"));
        dataSet.add(new Table1(3, "3"));


        JavaRDD<Table1> rdd = conf.parallelize(dataSet);


        Dataset df = sqlContext.createDataFrame(rdd, Table1.class);


      //  df.write().format("org.apache.phoenix.spark").mode(SaveMode.Overwrite)
        //        .options(ImmutableMap.of("zkUrl", "ZK_QUORUM:2181:/hbase-secure", "table", "TABLE1"))
        //        .save();


        Dataset fromPhx = sqlContext.read().format("jdbc")
                .options(ImmutableMap.of("driver", "org.apache.phoenix.jdbc.PhoenixDriver", "url",
                        "jdbc:phoenix:ip-172-31-73-159.ec2.internal:2181:/hbase-secure", "dbtable", "TABLE1"))
                .load();


        fromPhx.collect();

    }

        public static class Table1 implements Serializable {
            int id;
            String col1;


            public Table1() {
            }


            public Table1(int id, String col1) {
                this.id = id;
                this.col1 = col1;
            }


            public int getId() {
                return id;
            }


            public void setId(int id) {
                this.id = id;
            }


            public String getCol1() {
                return col1;
            }


            public void setCol1(String col1) {
                this.col1 = col1;
            }

        }
    }

ВЫХОД:

18/10/26 19:12:39 WARN BlockingRpcConnection: Обнаружено исключение при подключении к серверу: javax.security.sasl.SaslException: сбой инициализации GSS [Причины GSSException: Действительные учетные данные не предоставлены (Уровень механизма: Не удалось найти любой Kerberos тгт)] 18/10/26 19:12:39 ОШИБКА BlockingRpcConnection: сбой аутентификации SASL. Наиболее вероятная причина - отсутствие или неверные учетные данные. Рассмотрим «Кинит». javax.security.sasl.SaslException: сбой инициализации GSS [вызвано GSSException: действительные учетные данные не предоставлены (уровень механизма: не удалось найти любой Kerberos tgt)] в com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge (GssKrb5Client.java:211) at org.apache.hadoop.hbase.security.AbstractHBaseSaslRpcClient.getInitialResponse (AbstractHBaseSaslRpcClient.java:130) в org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect (HBaseSaslRpcClient.java:81) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection.setupSaslConnection (BlockingRpcConnection.java:353) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection.access $ 600 (BlockingRpcConnection.java:85) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection $ 2.run (BlockingRpcConnection.java:455) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection $ 2.run (BlockingRpcConnection.java:452) at java.security.AccessController.doPrivileged (собственный метод) в javax.security.auth.Subject.doAs (Subject.java:422) в org.apache.hadoop.security.UserGroupInformation.doAs (UserGroupInformation.java:1840) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection.setupIOstreams (BlockingRpcConnection.java:452) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection.writeRequest (BlockingRpcConnection.java:540) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection.tracedWriteRequest (BlockingRpcConnection.java:520)в org.apache.hadoop.hbase.ipc.BlockingRpcConnection.access $ 200 (BlockingRpcConnection.java:85) в org.apache.hadoop.hbase.ipc.BlockingRpcConnection $ 4.run (BlockingRpcConnection.java:724) в org.,(AbstractRpcClient.java:420) в org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod (AbstractRpcClient.java:327) в org.apache.hadoop.hbase.ipc.AbstractRpcClient.jc $ (200)по адресу org.apache.hadoop.hbase.ipc.AbstractRpcClient $ BlockingRpcChannelImplementation.callBlockingMethod (AbstractRpcClient.java:571) по адресу org.apache.hadoop.hbase.protobuf.generated.MasterProtos.apache.hadoop.hbase.client.ConnectionManager $ HConnectionImplementation $ MasterServiceStubMaker.isMasterRunning (ConnectionManager.java:1644) в org.apache.hadoop.hbase.client.ConnectionManager $ HConnectionImplementation $ StubMaker.makeStubNoRetries (ConnectionManager.java:1582) в org.apache.hadooplimpan.Connect.Emplanation. $ $.StubMaker.makeStub (ConnectionManager.java:1604) в org.apache.hadoop.hbase.client.ConnectionManager $ HConnectionImplementation $ MasterServiceStubMaker.makeStub (ConnectionManager.java:1633) в org.apache.hadooplimpConnect.mp.Connection.ager.hbgetKeepAliveMasterService (ConnectionManager.java:1790) по адресу org.apache.hadoop.hbase.client.MasterCallable.prepare (MasterCallable.java:38) по адресу org.apache.hadoop.hbase.client.RpcRetringCaller.Raller.Raller.Raller.Raller.Raller.Raller.Raller.Raller.Raller.Calller.callв org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable (HBaseAdmin.java:4551) в org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable (HBaseAdmin.java:4543) в org.apaseh.hb.client.HBaseAdmin.getNamespaceDescriptor (HBAeAdmin.java:3181) в org.apache.phoenix.query.ConnectionQueryServicesImpl.ensureNamespaceCreated (ConnectionQueryServicesImpl.java:1028) в org.apache.phoenix.query.ConnectionQueryServicesImpl.hoserSignedIgned.query.ConnectionQueryServicesImpl.createTable (ConnectionQueryServicesImpl.java:1491) при org.apache.phoenix.schema.MetaDataClient.createTableInternal (MetaDataClient.java:2717) в org.apache.phoenix.schema.MetaDataClient.createTable (MetaDataClient.java:1114) в org.apache.phoenix.compile.CreateTableCompiler $ 1.execute (CreateTableCompiler.java:192) в org.apache.phoenix.jdbc.PhoenixStatement $ 2.call (PhoenixStatement.java:408) в org.apache.phoenix.jpPhoenixStatement $ 2.call (PhoenixStatement.java:391) в org.apache.phoenix.call.CallRunner.run (CallRunner.java:53) в org.apache.phoenix.jdbc.PhoenixStatement.executeMutation (PhoenixS3ment atj)org.apache.phoenix.jdbc.PhoenixStatement.executeMutation (PhoenixStatement.java: 378) at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate (PhoenixStatement.java:1806) по адресу org.apache.phoenix.query.ConnectionQueryServicesImpl $ 12.call (ConnectionQueryServicesgap.japl) at..query.ConnectionQueryServicesImpl $ 12.call (ConnectionQueryServicesImpl.java:2491) в org.apache.phoenix.util.PhoenixContextExecutor.call (PhoenixContextExecutor.java:76) в org.apache.phoenitImper.IQServiceIquery.uery2491) по адресу org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices (PhoenixDriver.java:255) по адресу org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.createConnection (PhoenixEmbeddedDrij.hoen.hoen.hob.jp.jp.jp.jp.jp.jp.jp.jp_ru.jp.jp.jp.jp_ru.jp)..Подсоединить (PhoenixDriver.java:221)в org.apache.spark.sql.execution.datasources.jdbc.DriverWrapper.connect (DriverWrapper.scala: 45) в org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils $$ anonfun $ createConnectionFactory $ 1.apply (JdbcUtils.scala: 63) в org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils $$ anonfun $ createConnectionFactory $ 1.apply (JdbcUtils.scala: 54) в org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD $ .resolveTable (JDBCRDD.scala: 56) в org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation. (JDBCRelation.scala: 115) в org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation (JdbcRelationProvider.scala: 52) в org.apache.spark.sql.execution.datasources.DataSource.resolveRelation (DataSource.scala: 340) в org.apache.spark.sql.DataFrameReader.loadV1Source (DataFrameReader.scala: 239) в org.apache.spark.sql.DataFrameReader.load (DataFrameReader.scala: 227) в org.apache.spark.sql.DataFrameReader.load (DataFrameReader.scala: 164) в com.silversharksolutions.importtablesesocial.SparkInit.main (SparkInit.java:132) at sun.reflect.NativeMethodAccessorImpl.invoke0 (собственный метод) at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43) в java.lang.reflect.Method.invoke (Method.java:498) в org.apache.spark.deploy.yarn.ApplicationMaster $$ anon $ 4.run (ApplicationMaster.scala: 721) Причина: GSSException: не предоставлены действительные учетные данные (уровень механизма: не удалось найти любой Kerberos tgt) at sun.security.jgss.krb5.Krb5InitCredential.getInstance (Krb5InitCredential.java:147) at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement (Krb5MechFactory.java:122) at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext (Krb5MechFactory.java:187) at sun.security.jgss.GSSManagerImpl.getMechanismContext (GSSManagerImpl.java:224) at sun.security.jgss.GSSContextImpl.initSecContext (GSSContextImpl.java:212) at sun.security.jgss.GSSContextImpl.initSecContext (GSSContextImpl.java:179) в com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge (GssKrb5Client.java:192) ... еще 66

...