Интеграция Spark и Kafka в Scala
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.sql.{SQLContext, SparkSession}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
object sparkStreaming_Kafka {
@transient lazy val log = org.apache.log4j.LogManager.getLogger("sparkStreaming_Kafka")
def main(args: Array[String]): Unit = {
log.debug("added the messages ****** ---->>>")
val spark = SparkSession
.builder()
.appName("my_App" )
.getOrCreate()
val ssc = new StreamingContext(spark.sparkContext, Seconds(2)) // the polling frequency is 2 seconds, can be modified based on the BM requirements.
log.debug("Before starting the Stream -->>")
val stream = KafkaUtils.createDirectStream[String, String](ssc, PreferConsistent, Subscribe[String, String]
(Array.apply("my_topic"), getKafkaParams())).map(record => record.value)
stream.foreachRDD { rdd =>
try {
if (!rdd.isEmpty()) {
rdd.foreach(x => postData(x))
}
} catch {
case t: Throwable =>
t.printStackTrace() // TODO: handle error
log.error("Exception occured while processing the data exception is {}", t.getCause)
}
}
ssc.start()
log.debug("started now-->> " + compat.Platform.currentTime)
ssc.awaitTermination()
}
def getKafkaParams(): Map[String, Object] = {
Map[String, Object]("bootstrap.servers" -> "localhost:9092",
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> "group_id",
"auto.offset.reset" -> "latest",
"enable.auto.commit" -> (true: java.lang.Boolean))
}
def postData(event: String): Unit = {
log.info("before KinesisSink.process call ==>>"+event)
print(event) // use the event as per the requirement
}
}