Member since
09-12-2017
24
Posts
4
Kudos Received
1
Solution
My Accepted Solutions
Title | Views | Posted |
---|---|---|
781 | 08-29-2018 06:25 AM |
07-01-2019
03:16 PM
Hi,
When clicking on slices in superset gui (/slicemodelview/list/), Im getting an internal server error:
Traceback (most recent call last):
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/app.py", line 1982, in wsgi_app
response = self.full_dispatch_request()
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/app.py", line 1614, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/app.py", line 1517, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/_compat.py", line 33, in reraise
raise value
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/security/decorators.py", line 26, in wraps
return f(self, *args, **kwargs)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/views.py", line 478, in list
widgets=widgets)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/baseviews.py", line 160, in render_template
return render_template(template, **dict(list(kwargs.items()) + list(self.extra_args.items())))
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/templating.py", line 134, in render_template
context, ctx.app)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask/templating.py", line 116, in _render
rv = template.render(context)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/environment.py", line 1008, in render
return self.environment.handle_exception(exc_info, True)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/environment.py", line 780, in handle_exception
reraise(exc_type, exc_value, tb)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/_compat.py", line 37, in reraise
raise value.with_traceback(tb)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/templates/appbuilder/general/model/list.html", line 2, in top-level template code
{% import 'appbuilder/general/lib.html' as lib %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/templates/appbuilder/base.html", line 1, in top-level template code
{% extends base_template %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/templates/superset/base.html", line 1, in top-level template code
{% extends "appbuilder/baselayout.html" %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/templates/appbuilder/baselayout.html", line 2, in top-level template code
{% import 'appbuilder/baselib.html' as baselib %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/templates/appbuilder/init.html", line 46, in top-level template code
{% block body %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/templates/appbuilder/baselayout.html", line 21, in block "body"
{% block content %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/templates/appbuilder/general/model/list.html", line 12, in block "content"
{% block list_list scoped %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/templates/appbuilder/general/model/list.html", line 13, in block "list_list"
{{ widgets.get('list')()|safe }}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/widgets.py", line 34, in __call__
return template.render(args)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/environment.py", line 1008, in render
return self.environment.handle_exception(exc_info, True)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/environment.py", line 780, in handle_exception
reraise(exc_type, exc_value, tb)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/_compat.py", line 37, in reraise
raise value.with_traceback(tb)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/templates/appbuilder/general/widgets/list.html", line 2, in top-level template code
{% extends 'appbuilder/general/widgets/base_list.html' %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/templates/appbuilder/general/widgets/base_list.html", line 25, in top-level template code
{% block begin_loop_values %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/templates/appbuilder/general/widgets/list.html", line 45, in block "begin_loop_values"
{% for item in value_columns %}
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/runtime.py", line 435, in __init__
self._after = self._safe_next()
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/jinja2/runtime.py", line 455, in _safe_next
return next(self._iterator)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/models/base.py", line 114, in get_values
retdict[col] = self._get_attr_value(item, col)
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/flask_appbuilder/models/base.py", line 66, in _get_attr_value
return getattr(item, col)()
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/models/core.py", line 146, in datasource_link
return datasource.link if datasource else None
File "/usr/hdp/3.1.0.0-78/superset/lib/python3.4/site-packages/superset/connectors/druid/models.py", line 517, in link
return Markup('<a href="{self.url}">{name}</a>').format(**locals())
TypeError: format() got multiple values for argument 'self'
... View more
Labels:
- Labels:
-
Hortonworks Data Platform (HDP)
05-23-2019
12:37 PM
Hello, I'm still searching for a solution to use another timestamp instead of "processingTime". Background: I have a kafka message which contains a timestamp as string. I use a custom processor to calculate some values but I want do use the timestamp from the kafka message instead of the timestamp of calculation. Thank you in advance.
... View more
09-27-2018
11:47 AM
Hello, I'm getting kafka messages with a timestamp. Is it possible to use this timestamp as timestamp field in druid sink processor?
... View more
Labels:
- Labels:
-
Apache Kafka
09-17-2018
10:26 AM
My solution was to setup a new one node cluster with HDF3.2, connect the envionment of the new streamline installation to the existing cluster, export SAM applications from existing SAM and import applications to new SAM.
... View more
08-31-2018
12:22 PM
For me the follwoing aggregation is working: [{ "type" : "doubleMax" , "name" : "dummy_value", "fieldName" : "original_value" }]
... View more
08-29-2018
06:38 AM
Hi, I want to have a time series chart and display each value without aggregation. Superset requires a metric. How can I create a metric without aggregation?
... View more
08-29-2018
06:25 AM
1 Kudo
Got it: DruidTranquilityController - property "Aggregator JSON" should be []
... View more
08-24-2018
11:19 AM
I checked all symbolic links and can not find a link pointing to version 3.1.1.0. The streamline.log shows the following output: ERROR [2018-08-24 13:14:16.009] [ForkJoinPool-4-worker-9] c.h.s.s.a.t.s.TopologyStates - Trying to kill any running instance of topology 'test' ERROR [2018-08-24 13:14:16.026] [ForkJoinPool-4-worker-9] c.h.s.c.u.ParallelStreamUtil - Got exception while running async task java.lang.RuntimeException: java.lang.Exception: Topology could not be deployed successfully: storm deploy command failed with Exception in thread "main" java.lang.RuntimeException: org.eclipse.aether.resolution.DependencyResolutionException: Failed to collect dependencies at org.apache.kafka:kafka-clients:jar:0.10.2.1 -> org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.resolution.DependencyResolutionException: Failed to collect dependencies at org.apache.kafka:kafka-clients:jar:0.10.2.1 -> org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.collection.DependencyCollectionException: Failed to collect dependencies at org.apache.kafka:kafka-clients:jar:0.10.2.1 -> org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.resolution.ArtifactDescriptorException: Failed to read artifact descriptor for org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.resolution.ArtifactResolutionException: Could not transfer artifact org.apache.storm:storm-kafka-client:pom:1.1.1.3.1.1.0-35 from/to central (http://repo1.maven.org/maven2/ 😞 Connect to repo1.maven.org:80 timed out, Caused by: org.eclipse.aether.transfer.ArtifactTransferException: Could not transfer artifact org.apache.storm:storm-kafka-client:pom:1.1.1.3.1.1.0-35 from/to central (http://repo1.maven.org/maven2/ 😞 Connect to repo1.maven.org:80 timed out, Caused by: org.apache.http.conn.ConnectTimeoutException: Connect to repo1.maven.org:80 timed out at com.hortonworks.streamline.common.util.ParallelStreamUtil.lambda$runAsync$0(ParallelStreamUtil.java:58) at java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1590) at java.util.concurrent.CompletableFuture$AsyncSupply.exec(CompletableFuture.java:1582) at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289) at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056) at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692) at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157) Caused by: java.lang.Exception: Topology could not be deployed successfully: storm deploy command failed with Exception in thread "main" java.lang.RuntimeException: org.eclipse.aether.resolution.DependencyResolutionException: Failed to collect dependencies at org.apache.kafka:kafka-clients:jar:0.10.2.1 -> org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.resolution.DependencyResolutionException: Failed to collect dependencies at org.apache.kafka:kafka-clients:jar:0.10.2.1 -> org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.collection.DependencyCollectionException: Failed to collect dependencies at org.apache.kafka:kafka-clients:jar:0.10.2.1 -> org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.resolution.ArtifactDescriptorException: Failed to read artifact descriptor for org.apache.storm:storm-kafka-client:jar:1.1.1.3.1.1.0-35, Caused by: org.eclipse.aether.resolution.ArtifactResolutionException: Could not transfer artifact org.apache.storm:storm-kafka-client:pom:1.1.1.3.1.1.0-35 from/to central (http://repo1.maven.org/maven2/ 😞 Connect to repo1.maven.org:80 timed out, Caused by: org.eclipse.aether.transfer.ArtifactTransferException: Could not transfer artifact org.apache.storm:storm-kafka-client:pom:1.1.1.3.1.1.0-35 from/to central (http://repo1.maven.org/maven2/ 😞 Connect to repo1.maven.org:80 timed out, Caused by: org.apache.http.conn.ConnectTimeoutException: Connect to repo1.maven.org:80 timed out at com.hortonworks.streamline.streams.actions.storm.topology.StormTopologyActionsImpl.deploy(StormTopologyActionsImpl.java:288) at com.hortonworks.streamline.streams.actions.topology.state.TopologyStates$5.deploy(TopologyStates.java:123) at com.hortonworks.streamline.streams.actions.topology.state.TopologyContext.deploy(TopologyContext.java:87) at com.hortonworks.streamline.streams.actions.topology.service.TopologyActionsService.lambda$deployTopology$1(TopologyActionsService.java:125) at com.hortonworks.registries.storage.transaction.ManagedTransaction.lambda$executeConsumer$7(ManagedTransaction.java:165) at com.hortonworks.registries.storage.transaction.ManagedTransaction.executeTransactionBlockInternal(ManagedTransaction.java:243) at com.hortonworks.registries.storage.transaction.ManagedTransaction.executeConsumer(ManagedTransaction.java:164) at com.hortonworks.streamline.streams.actions.topology.service.TopologyActionsService.deployTopology(TopologyActionsService.java:123) at com.hortonworks.streamline.streams.service.TopologyActionResource.lambda$deploy$0(TopologyActionResource.java:161) at com.hortonworks.streamline.common.util.ParallelStreamUtil.lambda$runAsync$0(ParallelStreamUtil.java:56) ... 6 common frames omitted
... View more
08-24-2018
07:06 AM
Hi, I want to use NIFI for ingesting data into druid for realtime dashboarding (device_id, sensor_id, sensor_value - without aggregation). I have no glue how the DruidTranquilityController should be configured in order to achive this. Thank you in advance.
... View more
Labels:
- Labels:
-
Apache NiFi
08-21-2018
06:14 AM
The version seems to be correct: :~# ls -lrth /usr/hdf/current/streamline lrwxrwxrwx 1 root root 31 Aug 17 11:25 /usr/hdf/current/streamline -> /usr/hdf/3.2.0.0-520/streamline :~# hdf-select | grep streamline streamline - 3.2.0.0-520 :/usr/hdf/current/streamline/bootstrap/components/sinks# cat kafka-sink-topology-component.json { "type": "SINK", "name": "Kafka", "subType": "KAFKA", "streamingEngine": "STORM", "builtin": true, "fieldHintProviderClass": "com.hortonworks.streamline.streams.cluster.bundle.impl.KafkaSinkBundleHintProvider", "transformationClass": "com.hortonworks.streamline.streams.layout.storm.KafkaBoltFluxComponent", "mavenDeps": "org.apache.kafka:kafka-clients:0.10.2.1,org.apache.storm:storm-kafka-client:1.2.1.3.2.0.0-520^org.slf4j:slf4j-log4j12^log4j:log4j^org.apache.zookeeper:zookeeper^org.apache.kafka:kafka-clients", "topologyComponentUISpecification": { "fields": [
... View more
08-20-2018
01:04 PM
Im using SAM 0.6.0.3.2.0.0-520 and during application deployment I can see, that SAM tries do reach these URLs.
... View more
08-20-2018
07:11 AM
1 Kudo
Hi, I'm trying to deploy an appliction via SAM. The requested jars don't exist in repo: http://repo.hortonworks.com/content/groups/public/org/apache/storm/storm-kafka-client/1.1.1.3.1.1.0-35/storm-kafka-client-1.1.1.3.1.1.0-35.jar http://repo.hortonworks.com/content/groups/public/org/apache/storm/storm-hdfs/1.1.1.3.1.1.0-35/storm-hdfs-1.1.1.3.1.1.0-35.jar http://nexus-private.hortonworks.com/nexus/content/groups/public/org/apache/storm/storm-kafka-client/1.1.1.3.1.1.0-35/storm-kafka-client-1.1.1.3.1.1.0-35.jar http://nexus-private.hortonworks.com/nexus/content/groups/public/org/apache/storm/storm-hdfs/1.1.1.3.1.1.0-35/storm-hdfs-1.1.1.3.1.1.0-35.jar
... View more
Labels:
- Labels:
-
Apache Storm
-
Cloudera DataFlow (CDF)
08-17-2018
12:20 PM
2 Kudos
Hi, I'm using SAM behind a proxy. In HDF 3.1 application deployment was working with proxy settings (maven repo could be reached). After upgrading to HDf 3.2 I'm getting the error message "(http://repo1.maven.org/maven2/): repo1.maven.org: Name or service not known".
... View more
Labels:
- Labels:
-
Cloudera DataFlow (CDF)
07-27-2018
09:28 AM
ambari-2-7.txt
... View more
Labels:
- Labels:
-
Apache Ambari
07-26-2018
10:08 AM
hi @Akhil S Naik, Ubuntu 14 is listed in the upgrade documentation https://docs.hortonworks.com/HDPDocuments/Ambari-2.7.0.0/bk_ambari-upgrade/content/upgrade_ambari.html
... View more
07-26-2018
09:06 AM
Hi, I'm trying to upgrade Ambari to 2.7 and getting HTTP 404: wget -nv http://public-repo-1.hortonworks.com/ambari/ubuntu14/2.x/updates/2.7.0.0/ambari.list -O /etc/apt/sources.list.d/ambari.list
... View more
Labels:
- Labels:
-
Apache Ambari
03-07-2018
06:15 PM
@Pierre Villard I'm facing the same issue.
... View more
12-13-2017
02:29 PM
There is no option "hive.druid.broker.address.default"? Which version is required?
... View more
12-13-2017
01:05 PM
Hi, When I try to run a select including "where ..." I get an error message: Error: Error while processing statement: FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1, vertexId=vertex_1512397358181_0387_8_00, diagnostics=[Vertex vertex_1512397358181_0387_8_00 [Map 1] killed/failed due to:ROOT_INPUT_INIT_FAILURE, Vertex Input: druid_processalert initializer failed, vertex=vertex_1512397358181_0387_8_00 [Map 1], java.io.IOException: java.io.IOException: org.apache.hive.druid.org.jboss.netty.channel.ChannelException: Faulty channel in resource pool at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.submitRequest(DruidStorageHandlerUtils.java:201) at org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat.distributeSelectQuery(DruidQueryBasedInputFormat.java:203) at org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat.getInputSplits(DruidQueryBasedInputFormat.java:162) at org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat.getSplits(DruidQueryBasedInputFormat.java:106) at org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:446) at org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:569) at org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator.initialize(HiveSplitGenerator.java:196) at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable$1.run(RootInputInitializerManager.java:278) at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable$1.run(RootInputInitializerManager.java:269) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable.call(RootInputInitializerManager.java:269) at org.apache.tez.dag.app.dag.RootInputInitializerManager$InputInitializerCallable.call(RootInputInitializerManager.java:253) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hive.druid.org.jboss.netty.channel.ChannelException: Faulty channel in resource pool at org.apache.hive.druid.com.metamx.http.client.NettyHttpClient.go(NettyHttpClient.java:137) at org.apache.hive.druid.com.metamx.http.client.AbstractHttpClient.go(AbstractHttpClient.java:14) at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.submitRequest(DruidStorageHandlerUtils.java:199) ... 17 more Caused by: java.net.ConnectException: Connection refused: localhost/127.0.0.1:8082 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.connect(NioClientBoss.java:152) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.processSelectedKeys(NioClientBoss.java:105) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.process(NioClientBoss.java:79) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.AbstractNioSelector.run(AbstractNioSelector.java:337) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.run(NioClientBoss.java:42) at org.apache.hive.druid.org.jboss.netty.util.ThreadRenamingRunnable.run(ThreadRenamingRunnable.java:108) at org.apache.hive.druid.org.jboss.netty.util.internal.DeadLockProofWorker$1.run(DeadLockProofWorker.java:42) ... 3 more There is a ConnectException to localhost? Is this the root cause and how can I fix it? Thank you in advance.
... View more
Labels:
- Labels:
-
Apache Hive
11-02-2017
07:54 PM
@Jay Kumar SenSharma
You are right. I changed the <LLAP app java opts> and deleted {% endif %}. Thanks
... View more
11-02-2017
04:58 PM
Can not start Hive LLAP: File
"/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py",
line 616, in <module>
HiveServerInteractive().execute() File
"/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py",
line 329, in execute
method(env) File
"/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py",
line 865, in restart self.start(env,
upgrade_type=upgrade_type) File
"/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py",
line 121, in start status =
self._llap_start(env) File
"/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py",
line 258, in _llap_start llap_java_args =
InlineTemplate(params.llap_app_java_opts).get_content() File
"/usr/lib/python2.6/site-packages/resource_management/core/source.py", line 150,
in __init__
super(InlineTemplate, self).__init__(name, extra_imports, **kwargs) File
"/usr/lib/python2.6/site-packages/resource_management/core/source.py", line 137,
in __init__ self.template =
self.template_env.get_template(self.name) File
"/usr/lib/python2.6/site-packages/ambari_jinja2/environment.py", line 716, in
get_template return
self._load_template(name, self.make_globals(globals)) File
"/usr/lib/python2.6/site-packages/ambari_jinja2/environment.py", line 690, in
_load_template template =
self.loader.load(self, name, globals) File
"/usr/lib/python2.6/site-packages/ambari_jinja2/loaders.py", line 127, in
load code =
environment.compile(source, name, filename) File
"/usr/lib/python2.6/site-packages/ambari_jinja2/environment.py", line 492, in
compile
self.handle_exception(exc_info, source_hint=source) File
"<unknown>", line 1, in template ambari_jinja2.exceptions.TemplateSyntaxError:
Unexpected end of template. Jinja was looking for the following tags: 'endif'.
The innermost block that needs to be closed is 'if'. Maybe this issue is related to the wrong python version? OS: Ubuntu 14.04.5 # /usr/bin/python -V Python 2.7.6
... View more
Labels:
- Labels:
-
Apache Hive
09-12-2017
10:03 AM
Which version will fix this issue?
... View more