Member since
09-25-2015
4
Posts
11
Kudos Received
1
Solution
My Accepted Solutions
Title | Views | Posted |
---|---|---|
6083 | 03-07-2017 12:51 AM |
03-25-2017
12:07 AM
2 Kudos
2017-03-24 02:02:36,756|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|scala> import org.apache.spark.SparkContext; import org.apache.spark.sql._; impo ^Mrt org.apache.phoenix.spark._; val df = sqlContext.load("org.apache.phoenix.spar ^Mk", Map("table" -> "INPUT_TABLE", "zkUrl" -> "MYHOSTNAME:2181:/hbase-unsecure")); df.save("org.apache.phoenix.spark", SaveM ^Mode.Overwrite, Map("table" -> "OUTPUT_TABLE", "zkUrl" -> "MYHOSTNAME:2181:/hbase-unsecure"))
2017-03-24 02:02:38,054|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|warning: there were 2 deprecation warning(s); re-run with -deprecation for details
2017-03-24 02:02:38,256|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|java.lang.NoClassDefFoundError: net/sourceforge/cobertura/coveragedata/TouchCollector
2017-03-24 02:02:38,257|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at org.apache.phoenix.spark.DefaultSource.__cobertura_init(DefaultSource.scala)
2017-03-24 02:02:38,257|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at org.apache.phoenix.spark.DefaultSource.<clinit>(DefaultSource.scala)
2017-03-24 02:02:38,257|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-03-24 02:02:38,257|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-03-24 02:02:38,257|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-03-24 02:02:38,257|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
2017-03-24 02:02:38,257|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at java.lang.Class.newInstance(Class.java:442)
2017-03-24 02:02:38,258|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:152)
2017-03-24 02:02:38,258|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119)
2017-03-24 02:02:38,258|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at org.apache.spark.sql.SQLContext.load(SQLContext.scala:1153)
2017-03-24 02:02:38,258|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC$iwC$iwC$iwC$iwC$iwC$iwC$iwC.<init>(<console>:25)
2017-03-24 02:02:38,258|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC$iwC$iwC$iwC$iwC$iwC$iwC.<init>(<console>:36)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC$iwC$iwC$iwC$iwC$iwC.<init>(<console>:38)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC$iwC$iwC$iwC$iwC.<init>(<console>:40)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC$iwC$iwC$iwC.<init>(<console>:42)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC$iwC$iwC.<init>(<console>:44)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC$iwC.<init>(<console>:46)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $iwC.<init>(<console>:48)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at <init>(<console>:50)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at .<init>(<console>:54)
2017-03-24 02:02:38,259|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at .<clinit>(<console>)
2017-03-24 02:02:38,260|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at .<init>(<console>:7)
2017-03-24 02:02:38,260|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at .<clinit>(<console>)
2017-03-24 02:02:38,260|INFO|MainThread|machine.py:154 - run()||GUID=403b8c97-9eaa-43f9-8303-d88c35ea2a2e|at $print(<console>)
... View more
Labels:
- Labels:
-
Apache Phoenix
-
Apache Spark
03-07-2017
12:51 AM
5 Kudos
@Prabhat Ratnala The error is coming from Hive rather than oozie. Error 10096 is a SemanticException: Dynamic partition strict mode requires at least one static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict You can verify this by running your hive script without oozie. Once that work, then try running with oozie.
... View more