2016-11-30 10:32:43,964 ERROR [put-hive-streaming-0] o.a.hive.hcatalog.streaming.HiveEndPoint Fatal error on {}; cause null java.lang.NullPointerException: null 2016-11-30 10:32:43,965 ERROR [Timer-Driven Process Thread-6] o.a.n.processors.hive.PutHiveStreaming PutHiveStreaming[id=9b263042-0158-1000-bc12-8f7f1bebaf33] Failed to create HiveWriter for endpoint: {metaStoreUri='thrift://hdp001db1.hadoop.za.cellc.net:9083', database='starhome', table='olympics', partitionVals=[] } 2016-11-30 10:32:43,970 ERROR [Timer-Driven Process Thread-6] o.a.n.processors.hive.PutHiveStreaming org.apache.nifi.util.hive.HiveWriter$ConnectFailure: Failed connecting to EndPoint {metaStoreUri='thrift://hdp001db1.hadoop.za.cellc.net:9083', database='starhome', table='olympics', partitionVals=[] } at org.apache.nifi.util.hive.HiveWriter.(HiveWriter.java:80) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.util.hive.HiveUtils.makeHiveWriter(HiveUtils.java:45) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processors.hive.PutHiveStreaming.makeHiveWriter(PutHiveStreaming.java:827) [nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processors.hive.PutHiveStreaming.getOrCreateWriter(PutHiveStreaming.java:738) [nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processors.hive.PutHiveStreaming.lambda$onTrigger$4(PutHiveStreaming.java:462) [nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.controller.repository.StandardProcessSession.read(StandardProcessSession.java:1880) ~[na:na] at org.apache.nifi.controller.repository.StandardProcessSession.read(StandardProcessSession.java:1851) ~[na:na] at org.apache.nifi.processors.hive.PutHiveStreaming.onTrigger(PutHiveStreaming.java:389) [nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processor.AbstractProcessor.onTrigger(AbstractProcessor.java:27) ~[nifi-api-1.0.0.2.0.0.0-579.jar:1.0.0.2.0.0.0-579] at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1064) ~[na:na] at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:136) ~[na:na] at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:47) ~[na:na] at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:132) ~[na:na] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[na:1.8.0_77] at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) ~[na:1.8.0_77] at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) ~[na:1.8.0_77] at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) ~[na:1.8.0_77] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) ~[na:1.8.0_77] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ~[na:1.8.0_77] at java.lang.Thread.run(Thread.java:745) ~[na:1.8.0_77] Caused by: org.apache.nifi.util.hive.HiveWriter$TxnBatchFailure: Failed acquiring Transaction Batch from EndPoint: {metaStoreUri='thrift://hdp001db1.hadoop.za.cellc.net:9083', database='starhome', table='olympics', partitionVals=[] } at org.apache.nifi.util.hive.HiveWriter.nextTxnBatch(HiveWriter.java:255) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.util.hive.HiveWriter.(HiveWriter.java:74) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] ... 19 common frames omitted Caused by: org.apache.hive.hcatalog.streaming.TransactionBatchUnAvailable: Unable to acquire transaction batch on end point: {metaStoreUri='thrift://hdp001db1.hadoop.za.cellc.net:9083', database='starhome', table='olympics', partitionVals=[] } at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:610) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:555) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$ConnectionImpl.fetchTransactionBatchImpl(HiveEndPoint.java:441) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$ConnectionImpl.fetchTransactionBatch(HiveEndPoint.java:421) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.nifi.util.hive.HiveWriter.lambda$nextTxnBatch$7(HiveWriter.java:250) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_77] ... 3 common frames omitted Caused by: org.apache.thrift.TApplicationException: Internal error processing open_txns at org.apache.thrift.TApplicationException.read(TApplicationException.java:111) ~[hive-exec-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:79) ~[hive-exec-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_open_txns(ThriftHiveMetastore.java:3875) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.open_txns(ThriftHiveMetastore.java:3862) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.openTxns(HiveMetaStoreClient.java:1880) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at sun.reflect.GeneratedMethodAccessor60.invoke(Unknown Source) ~[na:na] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_77] at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_77] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:155) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at com.sun.proxy.$Proxy90.openTxns(Unknown Source) ~[na:na] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.openTxnImpl(HiveEndPoint.java:623) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:604) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] ... 8 common frames omitted 2016-11-30 10:32:43,971 ERROR [Timer-Driven Process Thread-6] o.a.n.processors.hive.PutHiveStreaming PutHiveStreaming[id=9b263042-0158-1000-bc12-8f7f1bebaf33] Error connecting to Hive endpoint: table olympics at thrift://hdp001db1.hadoop.za.cellc.net:9083 2016-11-30 10:32:43,971 ERROR [Timer-Driven Process Thread-6] o.a.n.processors.hive.PutHiveStreaming PutHiveStreaming[id=9b263042-0158-1000-bc12-8f7f1bebaf33] Hive Streaming connect/write error, flow file will be penalized and routed to retry 2016-11-30 10:32:43,973 ERROR [Timer-Driven Process Thread-6] o.a.n.processors.hive.PutHiveStreaming org.apache.nifi.util.hive.HiveWriter$ConnectFailure: Failed connecting to EndPoint {metaStoreUri='thrift://hdp001db1.hadoop.za.cellc.net:9083', database='starhome', table='olympics', partitionVals=[] } at org.apache.nifi.util.hive.HiveWriter.(HiveWriter.java:80) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.util.hive.HiveUtils.makeHiveWriter(HiveUtils.java:45) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processors.hive.PutHiveStreaming.makeHiveWriter(PutHiveStreaming.java:827) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processors.hive.PutHiveStreaming.getOrCreateWriter(PutHiveStreaming.java:738) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processors.hive.PutHiveStreaming.lambda$onTrigger$4(PutHiveStreaming.java:462) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.controller.repository.StandardProcessSession.read(StandardProcessSession.java:1880) ~[na:na] at org.apache.nifi.controller.repository.StandardProcessSession.read(StandardProcessSession.java:1851) ~[na:na] at org.apache.nifi.processors.hive.PutHiveStreaming.onTrigger(PutHiveStreaming.java:389) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.processor.AbstractProcessor.onTrigger(AbstractProcessor.java:27) [nifi-api-1.0.0.2.0.0.0-579.jar:1.0.0.2.0.0.0-579] at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1064) [nifi-framework-core-1.0.0.2.0.0.0-579.jar:1.0.0.2.0.0.0-579] at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:136) [nifi-framework-core-1.0.0.2.0.0.0-579.jar:1.0.0.2.0.0.0-579] at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:47) [nifi-framework-core-1.0.0.2.0.0.0-579.jar:1.0.0.2.0.0.0-579] at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:132) [nifi-framework-core-1.0.0.2.0.0.0-579.jar:1.0.0.2.0.0.0-579] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_77] at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) [na:1.8.0_77] at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) [na:1.8.0_77] at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) [na:1.8.0_77] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_77] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_77] at java.lang.Thread.run(Thread.java:745) [na:1.8.0_77] Caused by: org.apache.nifi.util.hive.HiveWriter$TxnBatchFailure: Failed acquiring Transaction Batch from EndPoint: {metaStoreUri='thrift://hdp001db1.hadoop.za.cellc.net:9083', database='starhome', table='olympics', partitionVals=[] } at org.apache.nifi.util.hive.HiveWriter.nextTxnBatch(HiveWriter.java:255) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at org.apache.nifi.util.hive.HiveWriter.(HiveWriter.java:74) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] ... 19 common frames omitted Caused by: org.apache.hive.hcatalog.streaming.TransactionBatchUnAvailable: Unable to acquire transaction batch on end point: {metaStoreUri='thrift://hdp001db1.hadoop.za.cellc.net:9083', database='starhome', table='olympics', partitionVals=[] } at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:610) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:555) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$ConnectionImpl.fetchTransactionBatchImpl(HiveEndPoint.java:441) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$ConnectionImpl.fetchTransactionBatch(HiveEndPoint.java:421) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.nifi.util.hive.HiveWriter.lambda$nextTxnBatch$7(HiveWriter.java:250) ~[nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_77] ... 3 common frames omitted Caused by: org.apache.thrift.TApplicationException: Internal error processing open_txns at org.apache.thrift.TApplicationException.read(TApplicationException.java:111) ~[hive-exec-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:79) ~[hive-exec-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_open_txns(ThriftHiveMetastore.java:3875) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.open_txns(ThriftHiveMetastore.java:3862) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.openTxns(HiveMetaStoreClient.java:1880) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at sun.reflect.GeneratedMethodAccessor60.invoke(Unknown Source) ~[na:na] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_77] at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_77] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:155) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at com.sun.proxy.$Proxy90.openTxns(Unknown Source) ~[na:na] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.openTxnImpl(HiveEndPoint.java:623) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:604) ~[hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] ... 8 common frames omitted 2016-11-30 10:32:43,999 WARN [Timer-Driven Process Thread-9] o.a.nifi.processors.standard.PutFile PutFile[id=b03aa344-0158-1000-f97e-d74ae38e1b22] Penalizing StandardFlowFileRecord[uuid=f1933b81-f74b-4bfe-a8b3-c9155b56d4b7,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1480494358448-6799, container=default, section=655], offset=585075, length=40984],offset=0,name=olympics.csv,size=40984] and routing to failure as configured because file with the same name already exists 2016-11-30 10:32:44,099 INFO [Timer-Driven Process Thread-5] hive.metastore Trying to connect to metastore with URI thrift://hdp001db1.hadoop.za.cellc.net:9083 2016-11-30 10:32:44,100 INFO [Timer-Driven Process Thread-5] hive.metastore Connected to metastore. 2016-11-30 10:32:44,287 WARN [put-hive-streaming-0] o.a.h.h.m.RetryingMetaStoreClient MetaStoreClient lost connection. Attempting to reconnect. org.apache.thrift.TApplicationException: Internal error processing open_txns at org.apache.thrift.TApplicationException.read(TApplicationException.java:111) ~[hive-exec-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:79) ~[hive-exec-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_open_txns(ThriftHiveMetastore.java:3875) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.open_txns(ThriftHiveMetastore.java:3862) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.openTxns(HiveMetaStoreClient.java:1880) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at sun.reflect.GeneratedMethodAccessor60.invoke(Unknown Source) ~[na:na] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_77] at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_77] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:155) ~[hive-metastore-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at com.sun.proxy.$Proxy90.openTxns(Unknown Source) [na:na] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.openTxnImpl(HiveEndPoint.java:623) [hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:604) [hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$TransactionBatchImpl.(HiveEndPoint.java:555) [hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$ConnectionImpl.fetchTransactionBatchImpl(HiveEndPoint.java:441) [hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.hive.hcatalog.streaming.HiveEndPoint$ConnectionImpl.fetchTransactionBatch(HiveEndPoint.java:421) [hive-hcatalog-streaming-1.2.1000.2.5.0.0-1245.jar:1.2.1000.2.5.0.0-1245] at org.apache.nifi.util.hive.HiveWriter.lambda$nextTxnBatch$7(HiveWriter.java:250) [nifi-hive-processors-1.1.0-SNAPSHOT.jar:1.1.0-SNAPSHOT] at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_77] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) ~[na:1.8.0_77] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ~[na:1.8.0_77] at java.lang.Thread.run(Thread.java:745) ~[na:1.8.0_77]