package crickdata
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.sql._
import org.apache.log4j._
object Analysis {
def GetUrlContentJson(url: String): DataFrame = {
val result = scala.io.Source.fromURL(url).mkString
val sc = new SparkContext("local", "CustomerAmount")
val jsonResponseOneLine = result.toString().stripLineEnd
val jsonRdd = sc.parallelize(jsonResponseOneLine :: Nil)
val spark = SparkSession
.builder
.appName("SparkSQL")
.master("local[*]")
.config("spark.sql.warehouse.dir", "file:///C:/temp") // Necessary to work around a Windows bug in Spark 2.0.0; omit if you're not on Windows.
.getOrCreate()
val jsonDf = spark.sqlContext.read.json(jsonRdd)
return jsonDf
}
def main(args: Array[String]): Unit = {
System.setProperty("hadoop.home.dir", "D:/spark/winutls")
val response = GetUrlContentJson("https://cricapi.com/api/matches?apikey=go2vUFGz98UN1pX4e34RkcDLSRz1")
response.show
}
}
но я получаю ОШИБКУ
Исключение в потоке "main" java.lang.ArrayIndexOutOfBoundsException: 10582 в com.thoughtworks.paranamer.BytecodeReadingParanamer $ ClassReader.accept (BytecodeReadingParanamer.java:) в com.thoughtworks.paranamer.BytecodeReadingParanamer $ ClassReader.access $ 200 (BytecodeReadingParanamer.java:338) в com.thoughtworks.paranamer.BytecodeReadingParanamer.lookupParameterNames (BytecodeReadingParanamer.java:103) в кэшеПроизведенный объект.java: 90) в com.fasterxml.jackson.module.scala.introspect.BeanIntrospector $ .getCtorParams (BeanIntrospector.scala: 44) в com.fasterxml.jackson.module.scala.introspect.BeanIntrospector $. $ anonfun $ apply $ 1(BeanIntrospe