Создать DataFrame из класса case - PullRequest
0 голосов
/ 13 мая 2018

Я прочитал другие связанные вопросы, но не нашел ответа.

Я хочу создать DataFrame из класса наблюдения в Spark 2.3.Scala 2.11.8.

Код

package org.XXX

import org.apache.spark.sql.SparkSession

object Test {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .appName("test")
      .getOrCreate()

    case class Employee(Name:String, Age:Int, Designation:String, Salary:Int, ZipCode:Int)
    val EmployeesData = Seq( Employee("Anto",   21, "Software Engineer", 2000, 56798))
    val Employee_DataFrame = EmployeesData.toDF

    spark.stop()
  }
}

Вот что я попробовал в spark-shell:

case class Employee(Name:String, Age:Int, Designation:String, Salary:Int, ZipCode:Int)
val EmployeesData = Seq( Employee("Anto",   21, "Software Engineer", 2000, 56798))
val Employee_DataFrame = EmployeesData.toDF

Ошибка

java.lang.VerifyError: class org.apache.spark.sql.hive.HiveExternalCatalog overrides final method alterDatabase.(Lorg/apache/spark/sql/catalyst/catalog/CatalogDatabase;)V
  at java.lang.ClassLoader.defineClass1(Native Method)
  at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
  at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
  at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
  at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
  at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
  at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
  at java.security.AccessController.doPrivileged(Native Method)
  at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
  at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
  at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:53)
  at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
  at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anon$1.<init>(HiveSessionStateBuilder.scala:69)
  at org.apache.spark.sql.hive.HiveSessionStateBuilder.analyzer(HiveSessionStateBuilder.scala:69)
  at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
  at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
  at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79)
  at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79)
  at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
  at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
  at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:172)
  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:178)
  at org.apache.spark.sql.Dataset$.apply(Dataset.scala:65)
  at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:470)
  at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:377)
  at org.apache.spark.sql.SQLImplicits.localSeqToDatasetHolder(SQLImplicits.scala:228)

1 Ответ

0 голосов
/ 13 мая 2018

Нет проблем в коде, который вы скопировали из общей ссылки, так как ошибка объясняет, что это что-то другое (точный результат копирования кода в моем прогоне ниже).

case class Employee(Name:String, Age:Int, Designation:String, Salary:Int, ZipCode:Int)
val EmployeesData = Seq( Employee("Anto",   21, "Software Engineer", 2000, 56798))
val Employee_DataFrame = EmployeesData.toDF
Employee_DataFrame.show()

Employee_DataFrame: org.apache.spark.sql.DataFrame = [Имя: строка, Возраст: целое число ... еще 3 поля] '

+----+---+-----------------+------+-------+
|Name|Age|      Designation|Salary|ZipCode|
+----+---+-----------------+------+-------+
|Anto| 21|Software Engineer|  2000|  56798|
+----+---+-----------------+------+-------+
...