Проблема с ошибкой ниже,
[ошибка] в scala.tools.nsc.typechecker.Typers $ Typer.typedApply $ 1 (Typers.scala: 4580)
[ошибка] в scala.tools.nsc.typechecker.Typers $ Typer.typedInAnyMode $ 1 (Typers.scala: 5343)
[ошибка] в scala.tools.nsc.typechecker.Typers $ Typer.typed1 (Typers.scala: 5360)
[ошибка] в scala.tools.nsc.typechecker.Typers $ Typer.runTyper $ 1 (Typers.scala: 5396)
[ошибка] (Compile / compileIncremental) java.lang.StackOverflowError
[ошибка] Общее время: 11 с, завершено 25 апреля, 2019 г. 19:11:28
также пытался увеличить параметры jmx
javaOptions ++ = Seq ("- Xms512M", "-Xmx4048M", "-XX: MaxPermSize = 4048M", "-XX: + CMSClassUnloadingEnabled"), но это не помогло. Кажется, что все зависимости исправляются должным образом, но эта Ошибка является своего рода поражением.
build.properties
sbt.version=1.2.8
plugin.sbt
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9")
And the build.sbt
name := "ProjectNew"
version := "4.0"
scalaVersion := "2.11.8"
fork := true
libraryDependencies ++= Seq(
"org.scalaz" %% "scalaz-core" % "7.1.0" % "test",
("org.apache.spark" %% "spark-core" % "2.1.0.cloudera1").
exclude("org.mortbay.jetty", "servlet-api").
exclude("commons-beanutils", "commons-beanutils-core").
//exclude("commons-collections", "commons-collections").
exclude("com.esotericsoftware.minlog", "minlog").
//exclude("org.apache.hadooop","hadoop-client").
exclude("commons-logging", "commons-logging") % "provided",
("org.apache.spark" %% "spark-sql" % "2.1.0.cloudera1")
.exclude("com.esotericsoftware.minlog","minlog")
//.exclude("org.apache.hadoop","hadoop-client")
% "provided",
("org.apache.spark" %% "spark-hive" % "2.1.0.cloudera1")
.exclude("com.esotericsoftware.minlog","minlog")
//.exclude("org.apache.hadoop","hadoop-client")
% "provided",
"spark.jobserver" % "job-server-api" % "0.4.0",
"org.scalatest" %%"scalatest" % "2.2.4" % "test",
"com.github.nscala-time" %% "nscala-time" % "1.6.0"
)
//libraryDependencies ++= Seq(
// "org.apache.spark" %% "spark-core" % "1.5.0-cdh5.5.0" % "provided",
// "org.apache.spark" %% "spark-sql" % "1.5.0-cdh5.5.0" % "provided",
// "org.scalatest"%"scalatest_2.10" % "2.2.4" % "test",
// "com.github.nscala-time" %% "nscala-time" % "1.6.0"
// )
resolvers ++= Seq(
"cloudera" at "http://repository.cloudera.com/artifactory/cloudera-repos/",
"Job Server Bintray" at "http://dl.bintray.com/spark-jobserver/maven"
)
scalacOptions ++= Seq("-unchecked", "-deprecation")
assemblyMergeStrategy in assembly := {
case PathList("META-INF", xs @ _*) => MergeStrategy.discard
case x => MergeStrategy.first
}
parallelExecution in Test := false
fork in Test := true
javaOptions ++= Seq("-Xms512M", "-Xmx4048M", "-XX:MaxPermSize=4048M", "-XX:+CMSClassUnloadingEnabled")