INI_FILE_PATH=/app/list/bigdata/dev/common/conf/common.conf #------------------------------------------------------------------ # la fonction initConf # description : extraction des valeurs de config stockées dans le fichier common.conf # syntaxe : initConf #--------------------------------------------------------------------------- function initConf(){ if [[ $# -ne 2 ]]; then logger "error" "la fonction initConf prend deux arguments. syntaxe : initConf " exit 1 fi if [[ ! -f ${INI_FILE_PATH} ]]; then logger "error" "le fichier ${INI_FILE_PATH} n'existe pas" exit 1 else eval `sed -e 's/[[:space:]]*\=[[:space:]]*/=/g' \ -e 's/;.*$//' \ -e 's/[[:space:]]*$//' \ -e 's/^[[:space:]]*//' \ -e "s/^\(.*\)=\([^\"']*\)$/\1=\"\2\"/" \ < ${INI_FILE_PATH} | sed -n -e "/^\[$1\]/,/^\s*\[/{/^[^;].*\=.*/p;}"` if [[ $? -ne 0 ]]; then logger "error" "echec de la recuperation de la configuration à partir du fichier ${INI_FILE_PATH}" exit 1 fi echo ${!2} fi } #------------------------------------------------------------------ # la fonction getKrbREALM # description : recupération de la valeur de @REALM à partir du fichier krb5.conf # syntaxe : getKrbREALM #--------------------------------------------------------------------------- function getKrbREALM(){ local krbConfFile=$(initConf EDGE-FS KRB5CONF_PATH) if [[ ! -f ${krbConfFile} ]]; then logger "error" "le fichier ${krbConfFile} n'existe pas" exit 1 else local ini_section="libdefaults" local ini_key="default_realm" eval `sed -e 's/[[:space:]]*\=[[:space:]]*/=/g' \ -e 's/;.*$//' \ -e 's/[[:space:]]*$//' \ -e 's/^[[:space:]]*//' \ -e "s/^\(.*\)=\([^\"']*\)$/\1=\"\2\"/" \ < ${krbConfFile} | sed -n -e "/^\[$ini_section\]/,/^\s*\[/{/^[^;].*\=.*/p;}"` if [[ $? -ne 0 ]]; then logger "error" "echec lors de la recuperation de la valeur @REALM à partir du fichier ${krbConfFile}" exit 1 fi local temp_value=`echo "$"${ini_key}` if [[ -z "$temp_value" ]]; then logger "error" "la valeur de l'attribut ${ini_key} dans la section '${ini_section}' n'est pas renseigné ou n'existe pas" exit 1 fi echo `eval echo ${temp_value}` fi } #--------------------------------------------------------------------------------- # la fonction setFiles # description : ajout de --files dans la commande spark-submit # syntaxe : setFiles #--------------------------------------------------------------------------------- function setFiles(){ local files="" if [[ -f ${HIVE_SITE_PATH} ]]; then files="${files}${HIVE_SITE_PATH}," fi if [[ -f ${HBASE_SITE_PATH} ]]; then files="${files}${HBASE_SITE_PATH}," fi if [[ -f ${KEYTAB} ]]; then files="${files}${JAAS_FILE}#client.jaas,${KEYTAB}," fi if [[ -z ${files} ]] || [[ ${files} == "," ]]; then echo "" else echo "--files ${files%,}" fi } #--------------------------------------------------------------------------------- # la fonction setJars # description : ajout de --jars pour la commande spark-submit # syntaxe : setJars #--------------------------------------------------------------------------------- function setJars(){ if [[ -d ${HBASE_LIB_PATH} ]]; then local jars=$(find ${HBASE_LIB_PATH} -name "*.jar" | tr -s '\n' ',') echo "--jars ${jars%,},/home/hervyjc/hbase-spark-1.0.0.jar,/home/hervyjc/hbase-protocol-2.1.0.jar" else echo "" fi } #--------------------------------------------------------------------------------- # la fonction setPrincipalKeytab # description : ajout de --principal et --keytab dans la commande spark-submit # syntaxe : setPrincipalKeytab #--------------------------------------------------------------------------------- function setPrincipalKeytab(){ if [[ -f ${KEYTAB} ]]; then if [[ ! -f ${SYMLINK_KEYTAB} ]]; then ln -s ${KEYTAB} ${SYMLINK_KEYTAB} fi if [[ -f ${SYMLINK_KEYTAB} ]]; then echo "--principal ${WHOAMI}@${KRB_REALM} --keytab ${SYMLINK_KEYTAB}" else echo "" fi else echo "" fi } # INITIALISATION VARIABLES FILES="" JARS="" # VALORISATION DES VARIABLES export WHOAMI=$(whoami) export KRB_REALM=$(getKrbREALM) export KEYTAB=/home/hervyjc/hervyjc.keytab CLEAN_USERNAME=$(echo "$(whoami)" | sed 's/[^a-zA-Z0-9]//g') TRT_PERIOD=$3 FILE_SYSTEM=${10} HIVE_SITE_PATH="/etc/hive/conf.cloudera.hive/hive-site.xml" HBASE_SITE_PATH="/etc/hbase/conf.cloudera.hbase/hbase-site.xml" HBASE_LIB_PATH="/opt/cloudera/parcels/CDH/lib/hbase" SYMLINK_KEYTAB=/home/$(whoami)/SYMLINK_${CLEAN_USERNAME}.keytab CONFIG_FILE=/${HDFS_INSTALL_HOME}/${appName}/${trtName}/current/config/action.yml echo "La commande suivante va être lancée : " echo spark-submit --class org.example.Main \ --master yarn \ --deploy-mode cluster \ $(setPrincipalKeytab) \ $(setFiles) \ $(setJars) \ --conf "spark.driver.extraJavaOptions=-Dlog4j.configurationFile=file:log4j2.yml -Djava.security.auth.login.config=client.jaas -Dprincipal=${WHOAMI}@${KRB_REALM} -DkeytabPath=${WHOAMI}.keytab" \ --conf "spark.executor.extraJavaOptions=-Dlog4j.configurationFile=file:log4j2.yml -Djava.security.auth.login.config=client.jaas -Dprincipal=${WHOAMI}@${KRB_REALM} -DkeytabPath=${WHOAMI}.keytab" \ --queue root.dev_regular \ /app/list/bigdata/dev/jlhervy/hbase-spark-integration-1.0-SNAPSHOT.jar spark-submit --class org.example.Main \ --master yarn \ --deploy-mode cluster \ $(setPrincipalKeytab) \ $(setFiles) \ --jars /home/hervyjc/hbase-spark-1.0.0.jar,/home/hervyjc/hbase-protocol-2.1.0.jar \ --conf "spark.driver.extraJavaOptions=-Dlog4j.configurationFile=file:log4j2.yml -Djava.security.auth.login.config=client.jaas -Dprincipal=${WHOAMI}@${KRB_REALM} -DkeytabPath=${WHOAMI}.keytab" \ --conf "spark.executor.extraJavaOptions=-Dlog4j.configurationFile=file:log4j2.yml -Djava.security.auth.login.config=client.jaas -Dprincipal=${WHOAMI}@${KRB_REALM} -DkeytabPath=${WHOAMI}.keytab" \ --queue root.dev_regular \ --driver-memory 8g \ --executor-memory 8g \ --executor-cores 4 \ --num-executors 12 \ /app/list/bigdata/dev/jlhervy/hbase-spark-integration-1.0-SNAPSHOT.jar