Исключение SSL при использовании соединителя Spark CosmosDB - PullRequest
1 голос
/ 19 июня 2020

Я написал приложение Spark, которое считывает фрейм данных из ADL, выполняет поиск в CosmosDB через левое анти-соединение, чтобы отфильтровать существующие записи и записать обратно в базу данных cosmos. У меня следующее исключение.

Driver stacktrace:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 3.0 failed 4 times, most recent failure: Lost task 1.3 in stage 3.0 (TID 3531, CO01AP54C51559F, executor 1): java.lang.IllegalStateException: javax.net.ssl.SSLException: Tag mismatch!
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreResponse.asInputStream(StoreResponse.java:119)
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreResponse.<init>(StoreResponse.java:44)
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreResponse.fromHttpResponse(StoreResponse.java:183)
    at com.microsoft.azure.documentdb.internal.directconnectivity.HttpTransportClient.processResponse(HttpTransportClient.java:195)
    at com.microsoft.azure.documentdb.internal.directconnectivity.HttpTransportClient.invokeStore(HttpTransportClient.java:133)
    at com.microsoft.azure.documentdb.internal.directconnectivity.HttpTransportClient.invokeStore(HttpTransportClient.java:166)
    at com.microsoft.azure.documentdb.internal.directconnectivity.TransportClient.invokeResourceOperation(TransportClient.java:12)
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreReader.readFromStore(StoreReader.java:395)
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreReader.createStoreReadResult(StoreReader.java:357)
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreReader.readOneReplica(StoreReader.java:201)
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreReader.readEventual(StoreReader.java:70)
    at com.microsoft.azure.documentdb.internal.directconnectivity.ConsistencyReader.readAny(ConsistencyReader.java:108)
    at com.microsoft.azure.documentdb.internal.directconnectivity.ConsistencyReader.read(ConsistencyReader.java:69)
    at com.microsoft.azure.documentdb.internal.directconnectivity.ReplicatedResourceClient.invoke(ReplicatedResourceClient.java:71)
    at com.microsoft.azure.documentdb.internal.directconnectivity.ServerStoreModel$1.apply(ServerStoreModel.java:105)
    at com.microsoft.azure.documentdb.internal.RetryUtility.executeStoreClientRequest(RetryUtility.java:148)
    at com.microsoft.azure.documentdb.internal.directconnectivity.ServerStoreModel.processMessage(ServerStoreModel.java:122)
    at com.microsoft.azure.documentdb.DocumentClient$11.apply(DocumentClient.java:3148)
    at com.microsoft.azure.documentdb.internal.RetryUtility.executeDocumentClientRequest(RetryUtility.java:73)
    at com.microsoft.azure.documentdb.DocumentClient.doQuery(DocumentClient.java:3154)
    at com.microsoft.azure.documentdb.DocumentQueryClientInternal.doQuery(DocumentQueryClientInternal.java:47)
    at com.microsoft.azure.documentdb.internal.query.AbstractQueryExecutionContext.executeRequest(AbstractQueryExecutionContext.java:219)
    at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.executeOnce(DefaultQueryExecutionContext.java:150)
    at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.fillBuffer(DefaultQueryExecutionContext.java:105)
    at com.microsoft.azure.documentdb.internal.query.DefaultQueryExecutionContext.hasNext(DefaultQueryExecutionContext.java:77)
    at com.microsoft.azure.documentdb.internal.query.ProxyQueryExecutionContext.hasNext(ProxyQueryExecutionContext.java:134)
    at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42)
    at com.microsoft.azure.cosmosdb.spark.rdd.CosmosDBRDDIterator.hasNext(CosmosDBRDDIterator.scala:453)
    at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
    at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
    at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage3.processNext(Unknown Source)
    at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
    at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:636)
    at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
    at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.write(UnsafeShuffleWriter.java:187)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
    at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55)
    at org.apache.spark.scheduler.Task.run(Task.scala:123)
    at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1381)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
Caused by: javax.net.ssl.SSLException: Tag mismatch!
    at sun.security.ssl.Alerts.getSSLException(Alerts.java:208)
    at sun.security.ssl.SSLSocketImpl.fatal(SSLSocketImpl.java:1949)
    at sun.security.ssl.SSLSocketImpl.readRecord(SSLSocketImpl.java:1020)
    at sun.security.ssl.SSLSocketImpl.readDataRecord(SSLSocketImpl.java:930)
    at sun.security.ssl.AppInputStream.read(AppInputStream.java:105)
    at cosmosdb_connector_shaded.org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137)
    at cosmosdb_connector_shaded.org.apache.http.impl.io.SessionInputBufferImpl.read(SessionInputBufferImpl.java:198)
    at cosmosdb_connector_shaded.org.apache.http.impl.io.ChunkedInputStream.read(ChunkedInputStream.java:189)
    at cosmosdb_connector_shaded.org.apache.http.conn.EofSensorInputStream.read(EofSensorInputStream.java:135)
    at cosmosdb_connector_shaded.org.apache.http.conn.EofSensorInputStream.read(EofSensorInputStream.java:148)
    at cosmosdb_connector_shaded.org.apache.http.util.EntityUtils.toByteArray(EntityUtils.java:136)
    at com.microsoft.azure.documentdb.internal.directconnectivity.StoreResponse.asInputStream(StoreResponse.java:115)
    ... 43 more
Caused by: javax.crypto.AEADBadTagException: Tag mismatch!
    at com.sun.crypto.provider.GaloisCounterMode.decryptFinal(GaloisCounterMode.java:571)
    at com.sun.crypto.provider.CipherCore.finalNoPadding(CipherCore.java:1046)
    at com.sun.crypto.provider.CipherCore.doFinal(CipherCore.java:983)
    at com.sun.crypto.provider.AESCipher.engineDoFinal(AESCipher.java:491)
    at javax.crypto.Cipher.doFinal(Cipher.java:2377)
    at sun.security.ssl.CipherBox.decrypt(CipherBox.java:461)
    at sun.security.ssl.InputRecord.decrypt(InputRecord.java:172)
    at sun.security.ssl.SSLSocketImpl.readRecord(SSLSocketImpl.java:1015)
    ... 52 more

Driver stacktrace:
    at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1889)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1877)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1876)
    at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
    at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1876)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926)
    at scala.Option.foreach(Option.scala:257)
    at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2110)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2059)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2048)
    at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
    at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2070)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2091)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2110)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2135)
    at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:945)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
    at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
    at org.apache.spark.rdd.RDD.collect(RDD.scala:944)
    at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.save(CosmosDBSpark.scala:153)
    at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.save(CosmosDBSpark.scala:465)
    at com.microsoft.azure.cosmosdb.spark.DefaultSource.createRelation(DefaultSource.scala:74)
    at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
    at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
    at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
    at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
    at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
    at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:761)
    at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:761)
    at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
    at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:761)
    at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:296)
    at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:280)
    at com.microsoft.azure.cosmosdb.spark.CosmosDBSpark$.save(CosmosDBSpark.scala:484)
    at com.microsoft.azure.cosmosdb.spark.schema.DataFrameWriterFunctions.cosmosDB(DataFrameWriterFunctions.scala:41)
    at Utils.Lookups.CosmosDBUtils.Utils$Lookups$CosmosDBUtils$$batchWrite(CosmosDBUtils.scala:106)
    at Utils.Lookups.CosmosDBUtils.write(CosmosDBUtils.scala:97)
    at Workflows.Batch.DEV.WhoIs.FetchWhoIsCosmosDump$.main(FetchWhoIsCosmosDump.scala:40)
    at Workflows.Batch.DEV.WhoIs.FetchWhoIsCosmosDump.main(FetchWhoIsCosmosDump.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:759)

Примечание: я использовал значения по умолчанию для всех конфигураций подключения, записи и запроса.

Пожалуйста, дайте мне знать, в чем может быть проблема

Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...