Apache Spark Streaming - класс не найден - PullRequest
1 голос
/ 04 октября 2019

3У меня установлена ​​настройка Apache-Spark в IntelliJ. Похоже, класс StreamingContext не найден во время выполнения. Но у него есть соответствующие зависимости в проекте. При написании кода я смог найти класс StreamingContext и без ошибок компиляции. При запуске кода с опцией IntelliJ «Выполнить» выдается ошибка.

Мой код

import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
import org.apache.spark._
import org.apache.spark.SparkContext

object SparkWithKafka {

   def main(args: Array[String]): Unit = {

     val spark = SparkSession.
      builder().
      master("local")
      .appName("SparkTestProject")
      .getOrCreate();
 // Spark Legacy Streaming with Kafka
 val streamingContext = new org.apache.spark.streaming.StreamingContext(spark.sparkContext, Seconds(30))

 val kafkaParams = Map[String, Object](
   "bootstrap.servers" -> "app-n01.brm.sit4.ord1.corp.rackspace.com:9092",
   "key.deserializer" -> classOf[StringDeserializer],
   "value.deserializer" -> classOf[StringDeserializer],
   "group.id" -> "testGroupId",
   "auto.offset.reset" -> "latest",
   "enable.auto.commit" -> (false: java.lang.Boolean)
 )
 val topics = Array("testTopic")
 val stream = KafkaUtils.createDirectStream[String, String](
   streamingContext,
   PreferConsistent,
   Subscribe[String, String](topics, kafkaParams)
 )

 val linesDStream = stream.map(record => (record.key, record.value))
 linesDStream.map( x => println(x))

     streamingContext.start()
     streamingContext.awaitTermination()

  }
}

Журнал ошибок

19/10/04 12:39:08 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 50690.
19/10/04 12:39:08 INFO NettyBlockTransferService: Server created on LBSX0TQ2.RACKSPACE.CORP:50690
19/10/04 12:39:08 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
19/10/04 12:39:08 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, LBSX0TQ2.RACKSPACE.CORP, 50690, None)
19/10/04 12:39:08 INFO BlockManagerMasterEndpoint: Registering block manager LBSX0TQ2.RACKSPACE.CORP:50690 with 1986.6 MB RAM, 

BlockManagerId(driver, LBSX0TQ2.RACKSPACE.CORP, 50690, None)
    19/10/04 12:39:08 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, LBSX0TQ2.RACKSPACE.CORP, 50690, None)
    19/10/04 12:39:08 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, LBSX0TQ2.RACKSPACE.CORP, 50690, None)
    Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/streaming/StreamingContext
        at SparkWithKafka$.main(SparkWithKafka.scala:28)
        at SparkWithKafka.main(SparkWithKafka.scala)
    Caused by: java.lang.ClassNotFoundException: org.apache.spark.streaming.StreamingContext
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        ... 2 more
    19/10/04 12:39:09 INFO SparkContext: Invoking stop() from shutdown hook
    19/10/04 12:39:09 INFO SparkUI: Stopped Spark web UI at http://LBSX0TQ2.RACKSPACE.CORP:4040
    19/10/04 12:39:09 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
    19/10/04 12:39:09 INFO MemoryStore: MemoryStore cleared
    19/10/04 12:39:09 INFO BlockManager: BlockManager stopped
    19/10/04 12:39:09 INFO BlockManagerMaster: BlockManagerMaster stopped
    19/10/04 12:39:09 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
    19/10/04 12:39:09 INFO SparkContext: Successfully stopped SparkContext
    19/10/04 12:39:09 INFO ShutdownHookManager: Shutdown hook called

build.sbt

name := "SparkProject-IntelliJ-2"

version := "0.1"

scalaVersion := "2.11.12"

libraryDependencies += "org.apache.spark" % "spark-core_2.11" % "2.3.0"
libraryDependencies += "org.apache.spark" % "spark-hive_2.11" % "2.3.0"
libraryDependencies += "com.typesafe" % "config" % "1.2.1"
libraryDependencies += "mysql" % "mysql-connector-java" % "5.1.16"
libraryDependencies += "org.apache.hadoop" % "hadoop-aws" % "2.6.0"
libraryDependencies += "com.datastax.spark" %% "spark-cassandra-connector" % "2.0.0"
libraryDependencies += "org.apache.spark" %% "spark-streaming" % "2.3.0" % "provided"
libraryDependencies += "org.apache.kafka" % "kafka-clients" % "2.3.0"
//libraryDependencies += "org.apache.spark" %% "spark-streaming-kafka-0-10" % "2.0.0"

libraryDependencies +=  "org.apache.spark" %% "spark-streaming-kafka-0-10" % "2.3.0" excludeAll(
  ExclusionRule(organization = "org.spark-project.spark", name = "unused"),
  ExclusionRule(organization = "org.apache.spark", name = "spark-streaming"),
  ExclusionRule(organization = "org.apache.hadoop")
)
Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...