спарк-снаряд не работает в Windows 8 |Невозможно загрузить нативную библиотеку hadoop - PullRequest
0 голосов
/ 15 мая 2019

Я очень новичок в Apache Spark.Я пытаюсь настроить систему Windows 8.Я сделал следующее:

  1. Скачал Spark spark-2.4.1-bin-hadoop2.7.
  2. Скачал winutils.exe по этой ссылке https://github.com/steveloughran/winutils/blob/master/hadoop-2.7.1/bin/winutils.exe
  3. Поместите Spark в эту папку E:\InstalledSoftwares\Spark\spark-2.4.1-bin-hadoop2.7\spark-2.4.1-bin-hadoop2.7 и winutils.exe в E:\InstalledSoftwares\winutils\bin
  4. Настройка SPARK_HOME = E:\InstalledSoftwares\Spark\spark-2.4.1-bin-hadoop2.7\spark-2.4.1-bin-hadoop2.7
  5. Настройка HADOOP_HOME = E:\InstalledSoftwares\winutils
  6. Измененная переменная PATH.Это выглядит так: C:\Users\Sumit\AppData\Local\Programs\Python\Python37-32\Scripts\;C:\Users\Sumit\AppData\Local\Programs\Python\Python37-32\;%SPARK_HOME%\bin

echo %SPARK_HOME% и echo %HADOOP_HOME% дает тот же результат, что я упоминал выше.Затем, когда я запускаю это в cmd: C: \ Users \ Sumit> spark-shell, я получаю это (1-я часть журнала находится в образе. Все входы в систему не поступают в консоль): enter image description here

a:211)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$mumly$1.apply(ILoop.scala:
189)
        at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
        at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.org$apache$spark$
repl$SparkILoop$$anonfun$$loopPostInit$1(SparkILoop.scala:199)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$
1$1.apply(SparkILoop.scala:267)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$
1$1.apply(SparkILoop.scala:247)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.withSuppressedSet
tings$1(SparkILoop.scala:235)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.startup$1(SparkIL
oop.scala:247)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(Spar
kILoop.scala:282)
        at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)
        at org.apache.spark.repl.Main$.doMain(Main.scala:78)
        at org.apache.spark.repl.Main$.main(Main.scala:58)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.sc
ala:52)
        at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubm
it$$runMain(SparkSubmit.scala:849)
        at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167
)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
        at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scal
a:924)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
19/05/15 20:41:34 ERROR Utils: Uncaught exception in thread main
java.lang.NullPointerException
        at org.apache.spark.scheduler.local.LocalSchedulerBackend.org$apache$spa
rk$scheduler$local$LocalSchedulerBackend$$stop(LocalSchedulerBackend.scala:162)
        at org.apache.spark.scheduler.local.LocalSchedulerBackend.stop(LocalSche
dulerBackend.scala:138)
        at org.apache.spark.scheduler.TaskSchedulerImpl.stop(TaskSchedulerImpl.s
cala:669)
        at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:2042)

        at org.apache.spark.SparkContext$$anonfun$stop$6.apply$mcV$sp(SparkConte
xt.scala:1949)
        at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1340)
        at org.apache.spark.SparkContext.stop(SparkContext.scala:1948)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:585)
        at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520)
        at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSessi
on.scala:935)
        at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSessi
on.scala:926)
        at scala.Option.getOrElse(Option.scala:121)
        at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.sc
ala:926)
        at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
        at $line3.$read$$iw$$iw.<init>(<console>:15)
        at $line3.$read$$iw.<init>(<console>:43)
        at $line3.$read.<init>(<console>:45)
        at $line3.$read$.<init>(<console>:49)
        at $line3.$read$.<clinit>(<console>)
        at $line3.$eval$.$print$lzycompute(<console>:7)
        at $line3.$eval$.$print(<console>:6)
        at $line3.$eval.$print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:793)

        at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1054
)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunR
eq$1.apply(IMain.scala:645)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunR
eq$1.apply(IMain.scala:644)
        at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaCla
ssLoader.scala:31)
        at scala.reflect.internal.util.AbstractFileClassLoader.asContext(Abstrac
tFileClassLoader.scala:19)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.
scala:644)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:576)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)
        at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.sca
la:231)
        at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.sca
la:231)
        at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
        at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$
apply$mcV$sp$1.apply(SparkILoop.scala:109)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$
apply$mcV$sp$1.apply(SparkILoop.scala:109)
        at scala.collection.immutable.List.foreach(List.scala:392)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV
$sp(SparkILoop.scala:109)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(Spa
rkILoop.scala:109)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(Spa
rkILoop.scala:109)
        at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91)
        at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108
)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply$mcV$sp(SparkILoop.scal
a:211)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$mumly$1.apply(ILoop.scala:
189)
        at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
        at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.org$apache$spark$
repl$SparkILoop$$anonfun$$loopPostInit$1(SparkILoop.scala:199)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$
1$1.apply(SparkILoop.scala:267)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$
1$1.apply(SparkILoop.scala:247)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.withSuppressedSet
tings$1(SparkILoop.scala:235)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.startup$1(SparkIL
oop.scala:247)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(Spar
kILoop.scala:282)
        at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)
        at org.apache.spark.repl.Main$.doMain(Main.scala:78)
        at org.apache.spark.repl.Main$.main(Main.scala:58)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.sc
ala:52)
        at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubm
it$$runMain(SparkSubmit.scala:849)
        at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167
)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
        at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scal
a:924)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
19/05/15 20:41:34 WARN MetricsSystem: Stopping a MetricsSystem that is not runni
ng
19/05/15 20:41:34 ERROR Main: Failed to initialize Spark session.
org.apache.spark.SparkException: Invalid Spark URL: spark://HeartbeatReceiver@Ad
venture_World:50087
        at org.apache.spark.rpc.RpcEndpointAddress$.apply(RpcEndpointAddress.sca
la:66)
        at org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(Net
tyRpcEnv.scala:134)
        at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101)
        at org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109)
        at org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:32)
        at org.apache.spark.executor.Executor.<init>(Executor.scala:179)
        at org.apache.spark.scheduler.local.LocalEndpoint.<init>(LocalSchedulerB
ackend.scala:59)
        at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSch
edulerBackend.scala:127)
        at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.
scala:186)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:501)
        at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520)
        at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSessi
on.scala:935)
        at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSessi
on.scala:926)
        at scala.Option.getOrElse(Option.scala:121)
        at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.sc
ala:926)
        at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
        at $line3.$read$$iw$$iw.<init>(<console>:15)
        at $line3.$read$$iw.<init>(<console>:43)
        at $line3.$read.<init>(<console>:45)
        at $line3.$read$.<init>(<console>:49)
        at $line3.$read$.<clinit>(<console>)
        at $line3.$eval$.$print$lzycompute(<console>:7)
        at $line3.$eval$.$print(<console>:6)
        at $line3.$eval.$print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:793)

        at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1054
)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunR
eq$1.apply(IMain.scala:645)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunR
eq$1.apply(IMain.scala:644)
        at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaCla
ssLoader.scala:31)
        at scala.reflect.internal.util.AbstractFileClassLoader.asContext(Abstrac
tFileClassLoader.scala:19)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.
scala:644)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:576)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)
        at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.sca
la:231)
        at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.sca
la:231)
        at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
        at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$
apply$mcV$sp$1.apply(SparkILoop.scala:109)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$
apply$mcV$sp$1.apply(SparkILoop.scala:109)
        at scala.collection.immutable.List.foreach(List.scala:392)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV
$sp(SparkILoop.scala:109)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(Spa
rkILoop.scala:109)
        at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(Spa
rkILoop.scala:109)
        at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91)
        at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108
)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply$mcV$sp(SparkILoop.scal
a:211)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apac
he$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$mumly$1.apply(ILoop.scala:
189)
        at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
        at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.org$apache$spark$
repl$SparkILoop$$anonfun$$loopPostInit$1(SparkILoop.scala:199)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$
1$1.apply(SparkILoop.scala:267)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$
1$1.apply(SparkILoop.scala:247)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.withSuppressedSet
tings$1(SparkILoop.scala:235)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.startup$1(SparkIL
oop.scala:247)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(Spar
kILoop.scala:282)
        at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)
        at org.apache.spark.repl.Main$.doMain(Main.scala:78)
        at org.apache.spark.repl.Main$.main(Main.scala:58)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.sc
ala:52)
        at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubm
it$$runMain(SparkSubmit.scala:849)
        at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167
)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
        at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scal
a:924)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
...