Support Questions
Find answers, ask questions, and share your expertise

flume-ng command failing in kerberorized cluster

flume-ng command failing in kerberorized cluster

Master Collaborator

error

1503619019573-ae6317dc-leader-finder-thread], Failed to find leader for Set([kafkahdfs,0])
kafka.common.BrokerEndPointNotAvailableException: End point with security protocol PLAINTEXT not found for broker 1001
2 REPLIES 2
Highlighted

Re: flume-ng command failing in kerberorized cluster

Master Collaborator

take a look at the two debug files i uploaded . rawdata-debug.txt debug-log.txt

this is the link I am following for configuring flume-kafka in Kerberos cluster

https://community.hortonworks.com/articles/86079/flume-with-secured-kafka-channel.html

cd /etc/flume/conf
flume-ng agent -n flume1 -c ./conf/ -f conf/kafkahdfs.conf -Dflume.root.logger=INFO,console

Below are my configuration files

/etc/flume/conf/flume-env.sh

export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-0.b15.el6_8.x86_64
# Give Flume more memory and pre-allocate, enable remote monitoring via JMX
export JAVA_OPTS="-Xms100m -Xmx2000m -Dcom.sun.management.jmxremote -Dflume.monitoring.type=http -Dflume.monitoring.port=34545 -Dj
ava.security.auth.login.config=/etc/flume/conf/flume_kafka_jaas.conf"
# Note that the Flume conf directory is always included in the classpath.
if [ -e "/usr/lib/flume/lib/ambari-metrics-flume-sink.jar" ]; then
  export FLUME_CLASSPATH=$FLUME_CLASSPATH:/usr/lib/flume/lib/ambari-metrics-flume-sink.jar
fi
export HIVE_HOME=/usr/hdp/2.5.3.0-37/hive
export HCAT_HOME=/usr/hdp/2.5.3.0-37/hive-hcatalog
[root@hadoop1 conf]#

/etc/flume/conf/flume_kafka_jaas.conf

KafkaClient {
    com.sun.security.auth.module.Krb5LoginModule required
    useKeyTab=true
    storeKey=true
    serviceName="kafka"
    keyTab="/etc/security/keytabs/kafka.service.keytab"
    principal="kafka/hadoop1.tolls.dot.state.fl.us@TOLLS.DOT.STATE.FL.US";
};

/etc/flume/conf/kafkahdfs.conf

flume1.sources = kafka-source-1
flume1.channels = hdfs-channel-1
flume1.sinks = hdfs-sink-1
flume1.sources.kafka-source-1.type = org.apache.flume.source.kafka.KafkaSource
flume1.sources.kafka-source-1.zookeeperConnect = hadoop1:2181
flume1.sources.kafka-source-1.topic =kafkahdfs
flume1.sources.kafka-source-1.batchSize = 100
flume1.sources.kafka-source-1.channels = hdfs-channel-1
### Sinks #######
flume1.sinks.hdfs-sink-1.channel = hdfs-channel-1
flume1.sinks.hdfs-sink-1.type = hdfs
flume1.sinks.hdfs-sink-1.hdfs.writeFormat = Text
flume1.sinks.hdfs-sink-1.hdfs.fileType = DataStream
flume1.sinks.hdfs-sink-1.hdfs.filePrefix = test-events
flume1.sinks.hdfs-sink-1.hdfs.useLocalTimeStamp = true
flume1.sinks.hdfs-sink-1.hdfs.path = /tmp/kafka/%{topic}/%y-%m-%d
flume1.sinks.hdfs-sink-1.hdfs.rollCount=100
flume1.sinks.hdfs-sink-1.hdfs.rollSize=0
flume1.sinks.hdfs-sink-1.kerberosPrincipal=flume/hadoop1.tolls.dot.state.fl.us
flume1.sinks.hdfs-sink-1.hdfs.kerberosKeytab=/etc/security/keytabs/flume.service.keytab
### Channels ######
flume1.channels.hdfs-channel-1.capacity = 10000
flume1.channels.hdfs-channel-1.type = memory
flume1.channels.hdfs-channel-1.transactionCapacity = 1000
flume1.channels.hdfs-channel-1.kafka.bootstrap.servers = hadoop1.tolls.do.state.fl.us:6667
flume1.channels.hdfs-channel-1.topic = kafkahdfs
flume1.channels.hdfs-channel-1.producer.security.protocol = SASL_PLAINTEXT
flume1.channels.hdfs-channel-1.producer.sasl.mechanism = GSSAPI
flume1.channels.hdfs-channel-1.consumer.security.protocol = SASL_PLAINTEXT
flume1.channels.hdfs-channel-1.consumer.sasl.mechanism = GSSAPI

Highlighted

Re: flume-ng command failing in kerberorized cluster

Expert Contributor
@Sami Ahmad

the article https://community.hortonworks.com/articles/86079/flume-with-secured-kafka-channel.html you are referring to is for the kafka-channel, in the configuration details you are given you are using HDFS channel,

Can you please make sure whether you are able to access the same topic (kafkahdfs) using the KAFKA console Producer/Consumer

if it works then please modify your flume_kafka_jaas.conf to have the below:

KafkaClient {
    com.sun.security.auth.module.Krb5LoginModule required
    useKeyTab=true
    storeKey=true
    serviceName="kafka"
    keyTab="/etc/security/keytabs/kafka.service.keytab"
    principal="kafka/hadoop1.tolls.dot.state.fl.us@TOLLS.DOT.STATE.FL.US";
};
// ZooKeeper client authentication
Client {
    com.sun.security.auth.module.Krb5LoginModule required
    useKeyTab=true
    storeKey=true
    serviceName="zookeeper"
    keyTab="/etc/security/keytabs/kafka.service.keytab"
    principal="kafka/hadoop1.tolls.dot.state.fl.us@TOLLS.DOT.STATE.FL.US";
};

Thanks

Venkat

Don't have an account?