Reply
Explorer
Posts: 14
Registered: ‎04-03-2014

Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

Hi all,

 

I have simple app that is simply trying to create a SparkContext to a Spark 1.3 Standalone service in CDH5.4.2. Here's my program:

 

package mytest

import org.apache.spark.{SparkContext, SparkConf}


object TestApp extends App {

val sparkConf = new SparkConf()
.setAppName("Terry TestApp")
.setMaster("spark://5K04.corp.pivotlink.com:7077")
.setSparkHome("/opt/cloudera/parcels/CDH/lib/spark")

val sc = new SparkContext(sparkConf)
sc.stop
}

 I've confirmed both the master URL and spark home install directories are correct. When I run this app, I get the following exception:

 

11:50:20.112 [sparkDriver-akka.actor.default-dispatcher-2] ERROR akka.actor.OneForOneStrategy - exception during creation
akka.actor.ActorInitializationException: exception during creation
at akka.actor.ActorInitializationException$.apply(Actor.scala:164) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.create(ActorCell.scala:596) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.invokeAll$1(ActorCell.scala:456) [akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.systemInvoke(ActorCell.scala:478) [akka-actor_2.10-2.3.6.jar:?]
at akka.dispatch.Mailbox.processAllSystemMessages(Mailbox.scala:263) [akka-actor_2.10-2.3.6.jar:?]
at akka.dispatch.Mailbox.run(Mailbox.scala:219) [akka-actor_2.10-2.3.6.jar:?]
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393) [akka-actor_2.10-2.3.6.jar:?]
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) [scala-library-2.10.5.jar:?]
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) [scala-library-2.10.5.jar:?]
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) [scala-library-2.10.5.jar:?]
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) [scala-library-2.10.5.jar:?]
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.7.0_60]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) ~[?:1.7.0_60]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.7.0_60]
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) ~[?:1.7.0_60]
at akka.util.Reflect$.instantiate(Reflect.scala:66) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ArgsReflectConstructor.produce(Props.scala:352) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.Props.newActor(Props.scala:252) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.newActor(ActorCell.scala:552) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.create(ActorCell.scala:578) ~[akka-actor_2.10-2.3.6.jar:?]
... 9 more
Caused by: java.lang.AbstractMethodError: akka.remote.RemoteActorRefProvider$RemotingTerminator.akka$actor$FSM$_setter_$Event_$eq(Lakka/actor/FSM$Event$;)V
at akka.actor.FSM$class.$init$(FSM.scala:272) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.remote.RemoteActorRefProvider$RemotingTerminator.<init>(RemoteActorRefProvider.scala:36) ~[akka-remote_2.10-2.2.3-shaded-protobuf.jar:?]
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.7.0_60]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) ~[?:1.7.0_60]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.7.0_60]
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) ~[?:1.7.0_60]
at akka.util.Reflect$.instantiate(Reflect.scala:66) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ArgsReflectConstructor.produce(Props.scala:352) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.Props.newActor(Props.scala:252) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.newActor(ActorCell.scala:552) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.create(ActorCell.scala:578) ~[akka-actor_2.10-2.3.6.jar:?]
... 9 more
11:50:20.122 [sparkDriver-akka.actor.default-dispatcher-2] ERROR akka.actor.ActorSystemImpl - Uncaught fatal error from thread [sparkDriver-akka.actor.default-dispatcher-3] shutting down ActorSystem [sparkDriver]
java.lang.AbstractMethodError
at akka.actor.ActorCell.create(ActorCell.scala:580) ~[akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.invokeAll$1(ActorCell.scala:456) [akka-actor_2.10-2.3.6.jar:?]
at akka.actor.ActorCell.systemInvoke(ActorCell.scala:478) [akka-actor_2.10-2.3.6.jar:?]
at akka.dispatch.Mailbox.processAllSystemMessages(Mailbox.scala:263) [akka-actor_2.10-2.3.6.jar:?]
at akka.dispatch.Mailbox.run(Mailbox.scala:219) [akka-actor_2.10-2.3.6.jar:?]
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393) [akka-actor_2.10-2.3.6.jar:?]
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) [scala-library-2.10.5.jar:?]
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) [scala-library-2.10.5.jar:?]
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) [scala-library-2.10.5.jar:?]
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) [scala-library-2.10.5.jar:?]
[ERROR] [07/01/2015 11:50:20.130] [sparkDriver-akka.actor.default-dispatcher-2] [ActorSystem(sparkDriver)] Uncaught fatal error from thread [sparkDriver-akka.actor.default-dispatcher-2] shutting down ActorSystem [sparkDriver]
java.lang.AbstractMethodError
at akka.actor.dungeon.FaultHandling$class.akka$actor$dungeon$FaultHandling$$finishTerminate(FaultHandling.scala:210)
at akka.actor.dungeon.FaultHandling$class.terminate(FaultHandling.scala:172)
at akka.actor.ActorCell.terminate(ActorCell.scala:369)
at akka.actor.ActorCell.invokeAll$1(ActorCell.scala:462)
at akka.actor.ActorCell.systemInvoke(ActorCell.scala:478)
at akka.dispatch.Mailbox.processAllSystemMessages(Mailbox.scala:263)
at akka.dispatch.Mailbox.run(Mailbox.scala:219)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)

[ERROR] [07/01/2015 11:50:20.131] [sparkDriver-akka.actor.default-dispatcher-3] [ActorSystem(sparkDriver)] Uncaught fatal error from thread [sparkDriver-akka.actor.default-dispatcher-3] shutting down ActorSystem [sparkDriver]
java.lang.AbstractMethodError
at akka.actor.dungeon.FaultHandling$class.akka$actor$dungeon$FaultHandling$$finishTerminate(FaultHandling.scala:210)
at akka.actor.dungeon.FaultHandling$class.terminate(FaultHandling.scala:172)
at akka.actor.ActorCell.terminate(ActorCell.scala:369)
at akka.actor.ActorCell.invokeAll$1(ActorCell.scala:462)
at akka.actor.ActorCell.systemInvoke(ActorCell.scala:478)
at akka.dispatch.Mailbox.processAllSystemMessages(Mailbox.scala:263)
at akka.dispatch.Mailbox.run(Mailbox.scala:219)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)

 

This app is run on a remote box from the cluster. Does the above exception ring a bell with anyone?

 

Thanks,

-Terry

 

Cloudera Employee
Posts: 481
Registered: ‎08-11-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

It's an Akka version problem, but, that leads me to guess you are trying to package Spark in your app and/or not using spark-submit

Explorer
Posts: 14
Registered: ‎04-03-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

Thanks, that will be helpful to track down the versioning. And yes, I'm trying to submit a job programmatically instead of via spark-submit due to someone's "legacy" code  :P

Cloudera Employee
Posts: 481
Registered: ‎08-11-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

OK, that's going a little off-road but it can be made to work. You'll want to depend on the CDH Spark artifacts to get all the various transitive dependencies harmonized. Ideally mark them 'provided' and construct the classpath containing Spark etc from the cluster's copy at runtime. Embedding the Spark code in your binary is still problematic in some corner cases.

Explorer
Posts: 14
Registered: ‎04-03-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

Just looking at the external libraries reported by IntelliJ shows that the project is pulling in Akka 2.3.6 (probably from a Spray dependency) and Akka 2.2.3 (from the org.spark-project, which is the CDH5.4.2 version?). The 2.2.3 version is reported as being shaded. Not sure if I can flip the shading around and get the other non-Spark related parts of the app working. I'll "off-road" a bit and see what I can figure out.

Cloudera Employee
Posts: 481
Registered: ‎08-11-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

Yep that's exactly it, you'll need Akka 2.2 as that is the version the Hadoop components in CDH 5.4 share. If you depend on the CDH Spark artifacts you'll get that transitively.

Explorer
Posts: 14
Registered: ‎04-03-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

Yeah, I had changed the dependencies to CDH Spark artifacts since this code previously depended on Apache's version of Spark 1.2.0 which has Akka 2.3.4. On top of that, this project also has explicit dependencies on Akka 2.3.6. Not sure why, but I like your off-roading analogy to getting this resolved!

Highlighted
Explorer
Posts: 14
Registered: ‎04-03-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

I ended up not messing with the Akka versioning, but instead dropped a Spark 1.3.1 assembly jar downloaded from Apache in place of Cloudera's. The Spark service comes up fine and I can now connect to the Spark master via the context. However, I'm now noticing the following exceptions which eventually leads to the application getting shut down by the master:

 

Exception in thread "main" java.lang.reflect.UndeclaredThrowableException
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1643)
at org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:59)
at org.apache.spark.executor.CoarseGrainedExecutorBackend$.run(CoarseGrainedExecutorBackend.scala:128)
at org.apache.spark.executor.CoarseGrainedExecutorBackend$.main(CoarseGrainedExecutorBackend.scala:224)
at org.apache.spark.executor.CoarseGrainedExecutorBackend.main(CoarseGrainedExecutorBackend.scala)
Caused by: java.util.concurrent.TimeoutException: Futures timed out after [30 seconds]
at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219)
at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
at scala.concurrent.Await$$anonfun$result$1.apply(package.scala:107)
at scala.concurrent.BlockContext$DefaultBlockContext$.blockOn(BlockContext.scala:53)
at scala.concurrent.Await$.result(package.scala:107)
at org.apache.spark.executor.CoarseGrainedExecutorBackend$$anonfun$run$1.apply$mcV$sp(CoarseGrainedExecutorBackend.scala:144)
at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:60)
at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:59)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
... 4 more

 

I looked at the master's logs and see that it's trying to launch the application on an executor, but fails. Excerpt from the master log:

 

2015-07-06 13:19:35,759 INFO org.apache.spark.deploy.master.Master: Launching executor app-20150706131935-0001/28 on worker worker-20150706124910-BK04.corp.pivotlink.com-7078
2015-07-06 13:19:35,763 WARN org.apache.spark.deploy.master.Master: Got status update for unknown executor app-20150706131935-0001/27
2015-07-06 13:19:35,768 INFO org.apache.spark.deploy.master.Master: Removing executor app-20150706131935-0001/28 because it is FAILED

 

After failing 11 times, the application is removed. Checked the $SPARK_HOME/work directory for the application logs, but it tells me no more than the stacktrace which I attached above. Any ideas what might be happening now?

 

-Terry

Explorer
Posts: 14
Registered: ‎04-03-2014

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

Got it figured out. I had a firewall running on the machine the driver was launched from. Once I disabled that, the app runs, woohoo!

New Contributor
Posts: 1
Registered: ‎09-23-2015

Re: Unable to create SparkContext to Spark 1.3 Standalone service in CDH5.4.2

15/09/24 00:54:29 ERROR OneForOneStrategy: Actor not found for: ActorSelection[A
nchor(akka://sparkDriver/deadLetters), Path(/)]
akka.actor.ActorInitializationException: exception during creation
at akka.actor.ActorInitializationException$.apply(Actor.scala:164)
at akka.actor.ActorCell.create(ActorCell.scala:596)
at akka.actor.ActorCell.invokeAll$1(ActorCell.scala:456)
at akka.actor.ActorCell.systemInvoke(ActorCell.scala:478)
at akka.dispatch.Mailbox.processAllSystemMessages(Mailbox.scala:263)
at akka.dispatch.Mailbox.run(Mailbox.scala:219)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(Abst
ractDispatcher.scala:393)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool
.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:19
79)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThre
ad.java:107)
Caused by: akka.actor.ActorNotFound: Actor not found for: ActorSelection[Anchor(
akka://sparkDriver/deadLetters), Path(/)]
at akka.actor.ActorSelection$$anonfun$resolveOne$1.apply(ActorSelection.
scala:65)
at akka.actor.ActorSelection$$anonfun$resolveOne$1.apply(ActorSelection.
scala:63)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:32)
at akka.dispatch.BatchingExecutor$Batch$$anonfun$run$1.processBatch$1(Ba
tchingExecutor.scala:67)
at akka.dispatch.BatchingExecutor$Batch$$anonfun$run$1.apply$mcV$sp(Batc
hingExecutor.scala:82)
at akka.dispatch.BatchingExecutor$Batch$$anonfun$run$1.apply(BatchingExe
cutor.scala:59)
at akka.dispatch.BatchingExecutor$Batch$$anonfun$run$1.apply(BatchingExe
cutor.scala:59)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:72
)
at akka.dispatch.BatchingExecutor$Batch.run(BatchingExecutor.scala:58)
at akka.dispatch.ExecutionContexts$sameThreadExecutionContext$.unbatched
Execute(Future.scala:74)
at akka.dispatch.BatchingExecutor$class.execute(BatchingExecutor.scala:1
10)
at akka.dispatch.ExecutionContexts$sameThreadExecutionContext$.execute(F
uture.scala:73)
at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala
:40)
at scala.concurrent.impl.Promise$DefaultPromise.scala$concurrent$impl$Pr
omise$DefaultPromise$$dispatchOrAddCallback(Promise.scala:280)
at scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala
:270)
at akka.actor.ActorSelection.resolveOne(ActorSelection.scala:63)
at akka.actor.ActorSelection.resolveOne(ActorSelection.scala:80)
at org.apache.spark.util.AkkaUtils$.makeDriverRef(AkkaUtils.scala:213)
at org.apache.spark.executor.Executor.startDriverHeartbeater(Executor.sc
ala:369)
at org.apache.spark.executor.Executor.<init>(Executor.scala:122)
at org.apache.spark.scheduler.local.LocalActor.<init>(LocalBackend.scala
:53)
at org.apache.spark.scheduler.local.LocalBackend$$anonfun$start$1.apply(
LocalBackend.scala:96)
at org.apache.spark.scheduler.local.LocalBackend$$anonfun$start$1.apply(
LocalBackend.scala:96)
at akka.actor.TypedCreatorFunctionConsumer.produce(Props.scala:343)
at akka.actor.Props.newActor(Props.scala:252)
at akka.actor.ActorCell.newActor(ActorCell.scala:552)
at akka.actor.ActorCell.create(ActorCell.scala:578)
... 9 more

what fire wall has to be stopped.