Reply
Expert Contributor
Posts: 88
Registered: ‎01-31-2015

Failed to execute spark task, with exception 'java.lang.Exception (HIVE ON SPARK)

 

We moved to hive on spark recently, using CDH 5.8.0, and running our workflows with oozie, but we are now facing following issue on regular basis which leads to failure of our workflows, it works few times and fails most of times, It would be great if some one can help us to resolve this issue.

 

ERROR : Failed to execute spark task, with exception 'java.lang.Exception(Failed to submit Spark work, please retry later)'
java.lang.Exception: Failed to submit Spark work, please retry later
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.execute(RemoteHiveSparkClient.java:184)
	at org.apache.hadoop.hive.ql.exec.spark.session.SparkSessionImpl.submit(SparkSessionImpl.java:71)
	at org.apache.hadoop.hive.ql.exec.spark.SparkTask.execute(SparkTask.java:103)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1782)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1539)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1318)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1127)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1120)
	at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:178)
	at org.apache.hive.service.cli.operation.SQLOperation.access$100(SQLOperation.java:72)
	at org.apache.hive.service.cli.operation.SQLOperation$2$1.run(SQLOperation.java:232)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
	at org.apache.hive.service.cli.operation.SQLOperation$2.run(SQLOperation.java:245)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.IllegalStateException: zip file closed
	at java.util.zip.ZipFile.ensureOpen(ZipFile.java:634)
	at java.util.zip.ZipFile.getEntry(ZipFile.java:305)
	at java.util.jar.JarFile.getEntry(JarFile.java:227)
	at sun.net.www.protocol.jar.URLJarFile.getEntry(URLJarFile.java:128)
	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:132)
	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)
	at java.net.URLClassLoader.getResourceAsStream(URLClassLoader.java:233)
	at javax.xml.parsers.SecuritySupport$4.run(SecuritySupport.java:94)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.xml.parsers.SecuritySupport.getResourceAsStream(SecuritySupport.java:87)
	at javax.xml.parsers.FactoryFinder.findJarServiceProvider(FactoryFinder.java:283)
	at javax.xml.parsers.FactoryFinder.find(FactoryFinder.java:255)
	at javax.xml.parsers.DocumentBuilderFactory.newInstance(DocumentBuilderFactory.java:121)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2526)
	at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2503)
	at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2409)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:1144)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:1116)
	at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:525)
	at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:543)
	at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:437)
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.refreshLocalResources(RemoteHiveSparkClient.java:211)
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.submit(RemoteHiveSparkClient.java:191)
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.execute(RemoteHiveSparkClient.java:182)
	... 21 more

ERROR : Failed to execute spark task, with exception 'java.lang.Exception(Failed to submit Spark work, please retry later)'
java.lang.Exception: Failed to submit Spark work, please retry later
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.execute(RemoteHiveSparkClient.java:184)
	at org.apache.hadoop.hive.ql.exec.spark.session.SparkSessionImpl.submit(SparkSessionImpl.java:71)
	at org.apache.hadoop.hive.ql.exec.spark.SparkTask.execute(SparkTask.java:103)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1782)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1539)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1318)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1127)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1120)
	at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:178)
	at org.apache.hive.service.cli.operation.SQLOperation.access$100(SQLOperation.java:72)
	at org.apache.hive.service.cli.operation.SQLOperation$2$1.run(SQLOperation.java:232)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
	at org.apache.hive.service.cli.operation.SQLOperation$2.run(SQLOperation.java:245)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.IllegalStateException: zip file closed
	at java.util.zip.ZipFile.ensureOpen(ZipFile.java:634)
	at java.util.zip.ZipFile.getEntry(ZipFile.java:305)
	at java.util.jar.JarFile.getEntry(JarFile.java:227)
	at sun.net.www.protocol.jar.URLJarFile.getEntry(URLJarFile.java:128)
	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:132)
	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)
	at java.net.URLClassLoader.getResourceAsStream(URLClassLoader.java:233)
	at javax.xml.parsers.SecuritySupport$4.run(SecuritySupport.java:94)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.xml.parsers.SecuritySupport.getResourceAsStream(SecuritySupport.java:87)
	at javax.xml.parsers.FactoryFinder.findJarServiceProvider(FactoryFinder.java:283)
	at javax.xml.parsers.FactoryFinder.find(FactoryFinder.java:255)
	at javax.xml.parsers.DocumentBuilderFactory.newInstance(DocumentBuilderFactory.java:121)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2526)
	at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2503)
	at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2409)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:1144)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:1116)
	at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:525)
	at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:543)
	at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:437)
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.refreshLocalResources(RemoteHiveSparkClient.java:211)
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.submit(RemoteHiveSparkClient.java:191)
	at org.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.execute(RemoteHiveSparkClient.java:182)
	... 21 more
ERROR : FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.spark.SparkTask

 

 

 

 

Announcements