Created 11-16-2017 06:42 PM
I have upgraded my cluster to HDP 2.6.3 recently with having spark 2.2.0 and livy 0.4.0. My livy job is every failing every time with Rsc driver exception. I analysed logs and found that host not able to add the Jar file from the given location that why it is failing.
Error: WARN DefaultPromise: An exception was thrown by org.apache.livy.rsc.Utils$2.operationComplete() java.util.concurrent.RejectedExecutionException: event executor terminated at io.netty.util.concurrent.SingleThreadEventExecutor.reject(SingleThreadEventExecutor.java:796) at io.netty.util.concurrent.SingleThreadEventExecutor.offerTask(SingleThreadEventExecutor.java:336) at io.netty.util.concurrent.SingleThreadEventExecutor.addTask(SingleThreadEventExecutor.java:329) at io.netty.util.concurrent.SingleThreadEventExecutor.execute(SingleThreadEventExecutor.java:739) at io.netty.util.concurrent.AbstractScheduledEventExecutor.schedule(AbstractScheduledEventExecutor.java:190) at io.netty.util.concurrent.AbstractScheduledEventExecutor.schedule(AbstractScheduledEventExecutor.java:134) at io.netty.util.concurrent.AbstractEventExecutorGroup.schedule(AbstractEventExecutorGroup.java:49) at org.apache.livy.rsc.driver.RSCDriver.setupIdleTimeout(RSCDriver.java:238) at org.apache.livy.rsc.driver.RSCDriver.access$100(RSCDriver.java:70) at org.apache.livy.rsc.driver.RSCDriver$2.onSuccess(RSCDriver.java:220) at org.apache.livy.rsc.driver.RSCDriver$2.onSuccess(RSCDriver.java:216) at org.apache.livy.rsc.Utils$2.operationComplete(Utils.java:108) at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507) at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481) at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420) at io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104) at io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82) at io.netty.channel.AbstractChannel$CloseFuture.setClosed(AbstractChannel.java:1004) at io.netty.channel.AbstractChannel$AbstractUnsafe.doClose0(AbstractChannel.java:633) at io.netty.channel.AbstractChannel$AbstractUnsafe.close(AbstractChannel.java:611) at io.netty.channel.AbstractChannel$AbstractUnsafe.close(AbstractChannel.java:554) at io.netty.channel.DefaultChannelPipeline$HeadContext.close(DefaultChannelPipeline.java:1236) at io.netty.channel.AbstractChannelHandlerContext.invokeClose(AbstractChannelHandlerContext.java:619) at io.netty.channel.AbstractChannelHandlerContext.close(AbstractChannelHandlerContext.java:603) at io.netty.channel.ChannelDuplexHandler.close(ChannelDuplexHandler.java:73) at io.netty.channel.AbstractChannelHandlerContext.invokeClose(AbstractChannelHandlerContext.java:619) at io.netty.channel.AbstractChannelHandlerContext.close(AbstractChannelHandlerContext.java:603) at io.netty.channel.AbstractChannelHandlerContext.close(AbstractChannelHandlerContext.java:460) at io.netty.channel.DefaultChannelPipeline.close(DefaultChannelPipeline.java:949) at io.netty.channel.AbstractChannel.close(AbstractChannel.java:194) at org.apache.livy.rsc.rpc.Rpc.close(Rpc.java:307) at org.apache.livy.rsc.driver.RSCDriver.shutdownServer(RSCDriver.java:309) at org.apache.livy.rsc.driver.RSCDriver.shutdown(RSCDriver.java:133) at org.apache.livy.rsc.driver.RSCDriver.handle(RSCDriver.java:396) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:497) at org.apache.livy.rsc.rpc.RpcDispatcher.handleCall(RpcDispatcher.java:130) at org.apache.livy.rsc.rpc.RpcDispatcher.channelRead0(RpcDispatcher.java:77) at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) at io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:293) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:267) at io.netty.handler.codec.ByteToMessageCodec.channelRead(ByteToMessageCodec.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:643) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:566) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:480) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:442) at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131) at java.lang.Thread.run(Thread.java:745)
Created 12-18-2017 07:09 AM
I already did a few reboots, but it didn't help. I upgraded my HDP cluster via Ambari. I'm using Spark 1.6.3, so I don't need to use livy2, right? It seems to be an issue in Livy 0.4.0
Created 02-08-2018 02:25 PM
I came across this same issue after a cluster upgrade from HDP 2.6.1 to HDP 2.6.4.
From HDP 2.6.3, livy was changed from cloudera 0.3.0 to apache 0.4.0-incubating and so I duly updated my sbt library dependencies to fetch the 0.4.0 from maven (e.g. "org.apache.livy" % "livy-api" % "0.4.0-incubating", etc). However, using pkgdiff I found that HDP's 0.4.0 is *not quite* the same as apache's 0.4.0. In particular there is a new "jobType" parameter added to org/apache/livy/client/common/HttpMessages$SerializedJob and the lack of this parameter being supplied by the client code (linked to apache 0.4.0) directly results in the "IllegalArgumentException: Invalid kind: null" exception and the RpcDispatcher's "NoSuchElementException: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx". This mismatch between the 0.4.0 client and "0.4.0" server can be fixed by forcing the livy client libraries to be picked up from the Hortonworks maven repo in build.sbt as follows (something I did not have to do for livy 0.3.0).
resolvers += "hortonworks repo" at "http://repo.hortonworks.com/content/repositories/releases" libraryDependencies ++= Seq( ... "org.apache.livy" % "livy-api" % "0.4.0.2.6.4.0-91", "org.apache.livy" %% "livy-scala-api" % "0.4.0.2.6.4.0-91", "org.apache.livy" % "livy-client-http" % "0.4.0.2.6.4.0-91" )
(Note that this new parameter is in 0.5.0-incubating (not yet on maven) so it seems that either Hortonworks took a later version or Apache removed this afterwards - either way it is not in the 0.4.0-incubating release at https://github.com/apache/incubator-livy made on August 30th 2017)
Hope this helps someone !
Created 02-20-2018 08:53 AM
@Simon George Yes, that did the trick for me! Thank you for sharing your work-around!
I downloaded the Livy jars, corresponding to my HDP Version from the Hortonworks repo (as shown above), and added them to my Java project: http://repo.hortonworks.com/content/repositories/releases/org/apache/livy/
After adding these libraries, the Spark Pi calculation example worked for me: https://github.com/cloudera/livy#using-the-programmatic-api
Created 02-20-2018 03:30 PM
still facing error in HDP2.6.4 & Livy 0.4.0,
Steps:
1. <em>Livy session was created as spark.</em>
2.<em> Added scala jar(my function)</em>
3.<em> livyClient.run(livyJob()) </em>gives error:
<em> 18/02/20 14:53:44 INFO InteractiveSession: Interactive session 12 created [appid: application_1518698685392_0040, owner: null, proxyUser: Some(hive), state: idle, kind: spark, info: {driverLogUrl=http://hdp04d03.fuzzyl.com:8042/node/containerlogs/container_e04_1518698685392_0040_01_000001/hive, sparkUiUrl=http://ambari04.fuzzyl.com:8088/proxy/application_1518698685392_0040/}] 18/02/20 14:53:44 INFO RSCClient: Received result for 51f4a85f-a499-4dc4-ad9f-9a0e7a42ea64 18/02/20 14:53:44 ERROR SessionServlet$: internal error java.util.concurrent.ExecutionException: java.lang.RuntimeException: py4j.Py4JException: Error while obtaining a new communication channel py4j.CallbackClient.getConnectionLock(CallbackClient.java:218) py4j.CallbackClient.sendCommand(CallbackClient.java:337) py4j.CallbackClient.sendCommand(CallbackClient.java:316) py4j.reflection.PythonProxyHandler.invoke(PythonProxyHandler.java:103) com.sun.proxy.$Proxy24.getLocalTmpDirPath(Unknown Source) org.apache.livy.repl.PythonInterpreter.addPyFile(PythonInterpreter.scala:264) org.apache.livy.repl.ReplDriver$anonfun$addJarOrPyFile$1.apply(ReplDriver.scala:110) org.apache.livy.repl.ReplDriver$anonfun$addJarOrPyFile$1.apply(ReplDriver.scala:110) scala.Option.foreach(Option.scala:257) org.apache.livy.repl.ReplDriver.addJarOrPyFile(ReplDriver.scala:110) org.apache.livy.rsc.driver.JobContextImpl.addJarOrPyFile(JobContextImpl.java:100) org.apache.livy.rsc.driver.AddJarJob.call(AddJarJob.java:39) org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:57) org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:34) java.util.concurrent.FutureTask.run(FutureTask.java:266) java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) java.lang.Thread.run(Thread.java:745) at io.netty.util.concurrent.AbstractFuture.get(AbstractFuture.java:37) at org.apache.livy.rsc.JobHandleImpl.get(JobHandleImpl.java:60) at org.apache.livy.server.interactive.InteractiveSession.addJar(InteractiveSession.scala:542) at org.apache.livy.server.interactive.InteractiveSessionServlet.org$apache$livy$server$interactive$InteractiveSessionServlet$addJarOrPyFile(InteractiveSessionServlet.scala:241) at org.apache.livy.server.interactive.InteractiveSessionServlet$anonfun$19$anonfun$apply$16.apply(InteractiveSessionServlet.scala:208) at org.apache.livy.server.interactive.InteractiveSessionServlet$anonfun$19$anonfun$apply$16.apply(InteractiveSessionServlet.scala:207) at org.apache.livy.server.interactive.SessionHeartbeatNotifier$anonfun$withModifyAccessSession$1.apply(SessionHeartbeat.scala:76) at org.apache.livy.server.interactive.SessionHeartbeatNotifier$anonfun$withModifyAccessSession$1.apply(SessionHeartbeat.scala:74) at org.apache.livy.server.SessionServlet.doWithSession(SessionServlet.scala:221) at org.apache.livy.server.SessionServlet.withModifyAccessSession(SessionServlet.scala:212) at org.apache.livy.server.interactive.InteractiveSessionServlet.org$apache$livy$server$interactive$SessionHeartbeatNotifier$super$withModifyAccessSession(InteractiveSessionServlet.scala:40) at org.apache.livy.server.interactive.SessionHeartbeatNotifier$class.withModifyAccessSession(SessionHeartbeat.scala:74) at org.apache.livy.server.interactive.InteractiveSessionServlet.withModifyAccessSession(InteractiveSessionServlet.scala:40) at org.apache.livy.server.interactive.InteractiveSessionServlet$anonfun$19.apply(InteractiveSessionServlet.scala:207) at org.apache.livy.server.interactive.InteractiveSessionServlet$anonfun$19.apply(InteractiveSessionServlet.scala:206) at org.apache.livy.server.JsonServlet.org$apache$livy$server$JsonServlet$doAction(JsonServlet.scala:113) at org.apache.livy.server.JsonServlet$anonfun$jpost$1.apply(JsonServlet.scala:75) at org.scalatra.ScalatraBase$class.org$scalatra$ScalatraBase$liftAction(ScalatraBase.scala:270) at org.scalatra.ScalatraBase$anonfun$invoke$1.apply(ScalatraBase.scala:265) at org.scalatra.ScalatraBase$anonfun$invoke$1.apply(ScalatraBase.scala:265) at org.scalatra.ApiFormats$class.withRouteMultiParams(ApiFormats.scala:178) at org.apache.livy.server.JsonServlet.withRouteMultiParams(JsonServlet.scala:39) at org.scalatra.ScalatraBase$class.invoke(ScalatraBase.scala:264) at org.scalatra.ScalatraServlet.invoke(ScalatraServlet.scala:49) at org.scalatra.ScalatraBase$anonfun$runRoutes$1$anonfun$apply$8.apply(ScalatraBase.scala:240) at org.scalatra.ScalatraBase$anonfun$runRoutes$1$anonfun$apply$8.apply(ScalatraBase.scala:238) at scala.Option.flatMap(Option.scala:170) at org.scalatra.ScalatraBase$anonfun$runRoutes$1.apply(ScalatraBase.scala:238) at org.scalatra.ScalatraBase$anonfun$runRoutes$1.apply(ScalatraBase.scala:237) at scala.collection.immutable.Stream.flatMap(Stream.scala:446)</em>