Я взял фрейм данных и попытался извлечь из него org.apache.hadoop.hbase.client.Result
. это можно сделать с помощью RDD.
import org.apache.hadoop.hbase.{Cell, CellUtil}
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import scala.math.BigInt
import org.apache.spark._
import org.apache.spark.rdd._
import org.apache.spark.sql._
import org.apache.hadoop.hbase.client.Result
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
object HbaseDFToResult extends App {
val config = new SparkConf().setAppName("test").setMaster("local[*]")
// below configuration set since org.apache.hadoop.hbase.client.Result is not serializable kryo can serialize this
config.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
config.registerKryoClasses(Array(classOf[org.apache.hadoop.hbase.client.Result]))
val spark = SparkSession.builder().config(config).getOrCreate()
val mytests = Seq((1, "test1"), (2, "test2"), (3, "test3"), (4, "test4"))
import spark.implicits._
val df = mytests.toDF("col1", "col2")
val counts: RDD[(ImmutableBytesWritable, Result)] = df.rdd.map{ row =>
val key = row.getAs[Int]("col1")
val keyByteArray = BigInt(key).toByteArray
val ibw = new ImmutableBytesWritable()
ibw.set(keyByteArray)
val value = row.getAs[String]("col2")
val valueByteArray = value.getBytes()
val cellList = List(CellUtil.createCell(valueByteArray))
val cell: java.util.List[Cell] = ListBuffer(cellList: _*)
val result = Result.create(cell)
(ibw, result)
}
val results: Array[Result] = counts.map(x => x._2).collect()
results.foreach(println)
}
log:
/Library/Java/JavaVirtualMachines/jdk1.8.0_191.jdk/Contents/Home/bin/java "-javaagent:/Applications/IntelliJ IDEA CE.app/Contents/lib/idea_rt.jar=60498:/Applications/IntelliJ IDEA
....
2019-05-01 15:41:21 INFO DAGScheduler:54 - Job 0 finished: collect at HbaseDFToResult.scala:41, took 0.568670 s
keyvalues={test1//LATEST_TIMESTAMP/Maximum/vlen=0/seqid=0}
keyvalues={test2//LATEST_TIMESTAMP/Maximum/vlen=0/seqid=0}
keyvalues={test3//LATEST_TIMESTAMP/Maximum/vlen=0/seqid=0}
keyvalues={test4//LATEST_TIMESTAMP/Maximum/vlen=0/seqid=0}
2019-05-01 15:41:21 INFO SparkContext:54 - Invoking stop() from shutdown hook
2019-05-01 15:41:21 INFO AbstractConnector:310 - Stopped Spark@4215838f{HTTP/1.1,[http/1.1]}{0.0.0.0:4040}
2019-05-01 15:41:21 INFO SparkUI:54 - Stopped Spark web UI at http://10.219.20.238:4040
2019-05-01 15:41:21 INFO MapOutputTrackerMasterEndpoint:54 - MapOutputTrackerMasterEndpoint stopped!
2019-05-01 15:41:21 INFO MemoryStore:54 - MemoryStore cleared
2019-05-01 15:41:21 INFO BlockManager:54 - BlockManager stopped
2019-05-01 15:41:21 INFO BlockManagerMaster:54 - BlockManagerMaster stopped
2019-05-01 15:41:21 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:54 - OutputCommitCoordinator stopped!
2019-05-01 15:41:21 INFO SparkContext:54 - Successfully stopped SparkContext
2019-05-01 15:41:21 INFO ShutdownHookManager:54 - Shutdown hook called
2019-05-01 15:41:21 INFO ShutdownHookManager:54 - Deleting directory /private/var/folders/mp/xydn5gdj4b51qgc7lsqzrft40000gp/T/spark-a9d46422-f21a-4f2b-98b0-a73238d20dee
Process finished with exit code 0