Java Spark sql Dataset <Row>написать паркет - PullRequest
0 голосов
/ 06 февраля 2019

При записи набора данных в паркет произошла следующая ошибка.

data.write().option("compression",snappy").format("parquet").mode(SaveMode.Overwrite).save(filePath);

java.lang.NoClassDefFoundError: Не удалось инициализировать класс org.apache.spark.sql.execution.datasources.parquet.ParquetOptions $ at org.apache.spark.sql.execution.datasources.parquet.ParquetOptions. (ParquetOptions.scala: 55) в org.apache.spark.sql.execution.datasources.parquet.ParquetOptions. (ParquetOptions.scala: 39) в org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat.prepareWrite (ParquetFileFormat.scala: 80) в org.apache.spark.sql.execution.datasources.FileFormatWriter $ .write (FileFormatWriter.scala: FileFormatWriter.scala:по адресу org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run (InsertIntoHadoopFsRelationCommand.scala: 159) по адресу org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectalaes.sescare 104).apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult (commands.scala: 102) в org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute (commands.scala: 122) в org.apache.spark.sql.execution.SparkPlan $$ anonfun $ execute $ 1.apply (SparkPlan.scala: 131) в org.apache.spark.sql.execution.SparkPlan $$ anonfun $ execute $ 1.apply (SparkPlan.scala: 127) в org.apache.spark.sql.execution.SparkPlan $$ anonfun $ executeQuery $ 1.apply (SparkPlan.scala: 155) в org.apache.spark.rdd.RDDOperationScope $ .withScope (RDDOperationScope.scala: 151) в org.apache.spark.sql.execution.SparkPlan.executeQuery (SparkPlan.scala: 152) в org.apache.spark.sql.execution.Spark SplantePlan..scala: 127) в org.apache.spark.sql.execution.QueryExecution.toRdd $ lzycompute (QueryExecution.scala: 80) в org.apache.spark.sql.execution.QueryExecution.toRdd (QueryExecution.scala: 80) вorg.apache.spark.sql.DataFrameWriter $$ anonfun $ runCommand $ 1.apply (DataFrameWriter.scala: 668) в org.apache.spark.sql.DataFrameWriter $$ anonfun $ runCommand $ 1.apply (DataFrameWriter.scala 6) 6:org.apache.spark.sql.execution.SQLExecution $$ anonfun $ withNewExecutionId $ 1.apply (SQLExecution.scala: 78) в org.apache.spark.sql.execution.SQLExecution $ .withSQLConfPropagated (SQLExecution.scala: 125) в org.apache.spark.sql.execution.SQLExecutionExQI.scala: 73) в org.apache.spark.sql.DataFrameWriter.runCommand (DataFrameWriter.scala: 668) в org.apache.spark.sql.DataFrameWriter.saveToV1Source (DataFrameWriter.scala: 276) в org.ap.sql.DataFrameWriter.save (DataFrameWriter.scala: 270) по адресу org.apache.spark.sql.DataFrameWriter.save (DataFrameWriter.scala: 228) по адресу package.common.dao.service.spark.utils.SparkDataBaseUtilImplBaseUtilImplBaseUtilImplFwaseParJava: 121) в package.common.dao.service.spark.SparkImportServiceImpl.csvFileImport (SparkImportServiceImpl.java:29) в package.common.dao.service.spark.SparkDaoServiceImpl.dataImport (SparkDao.monava.dao.service.spark.SparkDaoServiceImpl.executeOperations (SparkDaoServiceImpl.java:37) в package.controller.ServiceDao.fetchJobDetailsByJobId (ServiceDao.java:28) в sun.reflect.NativeMethodAccessorImpl.invoke0 (собственный метод) в sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62) в sun.reflect.DelegatingMetholjor.lang.reflect.Method.method.dispatch.com.sun.jersey.server.impl.uri.rules.HttpMethodRule.accept (HttpMethodRule.java:302) на com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept (RightHandPathRule.java:147 at)com.sun.jersey.server.impl.uri.rules.ResourceClassRule.accept (ResourceClassRule.java:108) в com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept (RightHandPathRule.java:147) вcom.sun.jersey.server.impl.uri.rules.RootResourceClassesRule.accept (RootResourceClassesRule.java:84) в com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest (WebApplicationImpl.java:15)sun.jersey.server.impl.application.WebApplicationImpl._handleRequest (WebApplicationImpl.java:1473) в com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest (WebApplicationImpl.java:1419) в com.sunserver.impl.application.WebApplicationImpl.handleRequest (WebApplicationImpl.java:1409) на com.sun.jersey.spi.container.servlet.WebComponent.service (WebComponent.java:409) на com.sun.jersey.spi.container.servlet.ServletContainer.service (ServletContainer.java:540) в com.sun.jersey.spi.container.servlet.ServletContainer.service (ServletContainer.java:715) в javax.servlet.http.HttpServlet.Servlet.Serv.java: 742) в org.apache.catalina.core.ApplicationFilterChain.internalDoFilter (ApplicationFilterChain.java:231) в org.apache.catalina.core.ApplicationFilterChain.doFilter (ApplicationFilterChain.java:166baseg.gg) или в.server.WsFilter.doFilter (WsFilter.java:52) в org.apache.catalina.core.ApplicationFilterChain.internalDoFilter (ApplicationFilterChain.java:193) в org.apache.catalina.core.ApplicationFilterChain.ja) в org.apache.catalina.core.StandardWrapperValve.invoke (StandardWrapperValve.java:198) в org.apache.catalina.core.StandardContextValve.invoke (StandardContextValve.java:96) в org.apachenticator.Anecat.invoke (AuthenticatorBase.java:493) в org.apache.catalina.core.StandardHostValve.invoke (StandardHostValve.java:140) в org.apache.catalina.valves.ErrorReportValve.invoke (ErrorReportValve.java:81) в org.apache.catalina.valves.AbstractAccessLogValve.invoke (AbstractAccessLogValve.jina50).core.StandardEngineValve.invoke (StandardEngineValve.java:87) в org.apache.catalina.connector.CoyoteAdapter.service (CoyoteAdapter.java:342) в org.apache.coyote.http11.Http11Processor.Provice (Hserp.Proseror.service)) в org.apache.coyote.AbstractProcessorLight.process (AbstractProcessorLight.java:66) в org.apache.coyote.AbstractProtocol $ ConnectionHandler.process (AbstractProtocol.java:806) в org.apache.tomcat.util.net.NioEndpoint $SocketProcessor.doRun (NioEndpoint.java:1498) в org.apache.tomcat.util.net.SocketProcessorBase.run (SocketProcessorBase.java:49) в java.util.concurrent.ThreadPoolExecutor.runWorker (ThreadPoavaEx11).util.concurrent.ThreadPoolExecutor $ Worker.run (ThreadPoolExecutor.java:624) в org.apache.tomcat.util.threads.TaskThread $ WrappingRunnable.run (TaskThread.java:61) на java.lang.Thread.run (Thread.java:748)

Я проверил все зависимости, все вещи кажутся в порядке. Я использую последниеверсия spark 2.4.0.

...