Я пытаюсь создать проект с двумя основными классами - SparkConsumer и KafkaProducer.Для этого я ввел многопроектную структуру в файл sbt.Модули потребителя и производителя предназначены для отдельных проектов, основной проект содержит утилиты, которые используются как производителем, так и потребителем.Корень является основным проектом.Также введены общие настройки и зависимости библиотек.Однако по какой-то причине проект не компилируется.Все связанные с sbt установки отмечены красным.Тем не менее, plugins.sbt с определенным плагином sbt-assembly находится в корневом проекте.
Каким может быть решение такой проблемы?
Структура проекта выглядит следующим образом:
Вот файл build.sbt:
lazy val overrides = Seq("com.fasterxml.jackson.core" % "jackson-core" % "2.9.5",
"com.fasterxml.jackson.core" % "jackson-databind" % "2.9.5",
"com.fasterxml.jackson.module" % "jackson-module-scala_2.11" % "2.9.5")
lazy val commonSettings = Seq(
name := "Demo",
version := "0.1",
scalaVersion := "2.11.8",
resolvers += "Spark Packages Repo" at "http://dl.bintray.com/spark-packages/maven",
dependencyOverrides += overrides
)
lazy val assemblySettings = Seq(
assemblyMergeStrategy in assembly := {
case PathList("org","aopalliance", xs @ _*) => MergeStrategy.last
case PathList("javax", "inject", xs @ _*) => MergeStrategy.last
case PathList("javax", "servlet", xs @ _*) => MergeStrategy.last
case PathList("javax", "activation", xs @ _*) => MergeStrategy.last
case PathList("org", "apache", xs @ _*) => MergeStrategy.last
case PathList("com", "google", xs @ _*) => MergeStrategy.last
case PathList("com", "esotericsoftware", xs @ _*) => MergeStrategy.last
case PathList("com", "codahale", xs @ _*) => MergeStrategy.last
case PathList("com", "yammer", xs @ _*) => MergeStrategy.last
case PathList("org", "slf4j", xs @ _*) => MergeStrategy.last
case PathList("org", "neo4j", xs @ _*) => MergeStrategy.last
case PathList("com", "typesafe", xs @ _*) => MergeStrategy.last
case PathList("net", "jpountz", xs @ _*) => MergeStrategy.last
case PathList("META-INF", xs @ _*) => MergeStrategy.discard
case "about.html" => MergeStrategy.rename
case "META-INF/ECLIPSEF.RSA" => MergeStrategy.last
case "META-INF/mailcap" => MergeStrategy.last
case "META-INF/mimetypes.default" => MergeStrategy.last
case "plugin.properties" => MergeStrategy.last
case "log4j.properties" => MergeStrategy.last
case x =>
val oldStrategy = (assemblyMergeStrategy in assembly).value
oldStrategy(x)
}
)
val sparkVersion = "2.2.0"
lazy val commonDependencies = Seq(
"org.apache.kafka" %% "kafka" % "1.1.0",
"org.apache.spark" %% "spark-core" % sparkVersion % "provided",
"org.apache.spark" %% "spark-sql" % sparkVersion,
"org.apache.spark" %% "spark-streaming" % sparkVersion,
"org.apache.spark" %% "spark-streaming-kafka-0-10" % sparkVersion,
"neo4j-contrib" % "neo4j-spark-connector" % "2.1.0-M4",
"com.typesafe" % "config" % "1.3.0",
"org.neo4j.driver" % "neo4j-java-driver" % "1.5.1",
"com.opencsv" % "opencsv" % "4.1",
"com.databricks" %% "spark-csv" % "1.5.0",
"com.github.tototoshi" %% "scala-csv" % "1.3.5",
"org.elasticsearch" %% "elasticsearch-spark-20" % "6.2.4"
)
lazy val root = (project in file("."))
.settings(
commonSettings,
assemblySettings,
libraryDependencies ++= commonDependencies,
assemblyJarName in assembly := "demo_root.jar"
)
.aggregate(core, consumer, producer)
lazy val core = project
.settings(
commonSettings,
assemblySettings,
libraryDependencies ++= commonDependencies
)
lazy val consumer = project
.settings(
commonSettings,
assemblySettings,
libraryDependencies ++= commonDependencies,
mainClass in assembly := Some("consumer.SparkConsumer"),
assemblyJarName in assembly := "demo_consumer.jar"
)
.dependsOn(core)
lazy val producer = project
.settings(
commonSettings,
assemblySettings,
libraryDependencies ++= commonDependencies,
mainClass in assembly := Some("producer.KafkaCheckinsProducer"),
assemblyJarName in assembly := "demo_producer.jar"
)
.dependsOn(core)
ОБНОВЛЕНИЕ: трассировка стека
(producer / update) java.lang.IllegalArgumentException: a module is not authorized to depend on itself: demo#demo_2.11;0.1
[error] (consumer / update) java.lang.IllegalArgumentException: a module is not authorized to depend on itself: demo#demo_2.11;0.1
[error] (core / Compile / compileIncremental) Compilation failed
[error] (update) sbt.librarymanagement.ResolveException: unresolved dependency: org.apache.spark#spark-sql_2.12;2.2.0: not found
[error] unresolved dependency: org.apache.spark#spark-streaming_2.12;2.2.0: not found
[error] unresolved dependency: org.apache.spark#spark-streaming-kafka-0-10_2.12;2.2.0: not found
[error] unresolved dependency: com.databricks#spark-csv_2.12;1.5.0: not found
[error] unresolved dependency: org.elasticsearch#elasticsearch-spark-20_2.12;6.2.4: not found
[error] unresolved dependency: org.apache.spark#spark-core_2.12;2.2.0: not found