Я работаю со Spark-shell, используя Mongo-spark-коннектор для чтения / записи данных в MongoDB, хотя я сталкиваюсь с приведенной ниже ошибкой, помимо размещения необходимых JARS следующим образом, может кто-то найти проблему и помочь мнеout!
Заранее спасибо
Баночки:
mongodb-driver-3.4.2.jar;
mongodb-driver-sync-3.11.0.jar;
mongodb-driver-core-3.4.2.jar;
mongo-java-driver-3.4.2.jar;
mongo-spark-connector_2.11-2.2.0.jar;
mongo-spark-connector_2.11-2.2.7.jar
Ошибка:
scala> MongoSpark.save(dfRestaurants.write.option("spark.mongodb.output.uri", "mongodb://username:password@server_name").option("spark.mongodb.output.database", "admin").option("spark.mongodb.output.collection", "myCollection").mode("overwrite"));
**java.lang.NoClassDefFoundError: com/mongodb/MongoDriverInformation**
at com.mongodb.spark.connection.DefaultMongoClientFactory.mongoDriverInformation$lzycompute(DefaultMongoClientFactory.scala:40)
at com.mongodb.spark.connection.DefaultMongoClientFactory.mongoDriverInformation(DefaultMongoClientFactory.scala:40)
at com.mongodb.spark.connection.DefaultMongoClientFactory.create(DefaultMongoClientFactory.scala:49)
at com.mongodb.spark.connection.MongoClientCache.acquire(MongoClientCache.scala:55)
at com.mongodb.spark.MongoConnector.acquireClient(MongoConnector.scala:242)
at com.mongodb.spark.MongoConnector.withMongoClientDo(MongoConnector.scala:155)
at com.mongodb.spark.MongoConnector.withDatabaseDo(MongoConnector.scala:174)
at com.mongodb.spark.MongoConnector.withCollectionDo(MongoConnector.scala:187)
at com.mongodb.spark.sql.DefaultSource.createRelation(DefaultSource.scala:72)
at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:46)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:654)
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:273)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:267)
at com.mongodb.spark.MongoSpark$.save(MongoSpark.scala:192)
... 59 elided