# Before running a Zeppelin notebook [root@sandbox spark-client]# ps ax | grep spark 3858 ? Sl 0:13 /usr/lib/jvm/java/bin/java -Dhdp.version=2.4.0.0-169 -cp /usr/hdp/2.4.0.0-169/spark/sbin/../conf/:/usr/hdp/2.4.0.0-169/spark/lib/spark-assembly-1.6.0.2.4.0.0-169-hadoop2.7.1.2.4.0.0-169.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-api-jdo-3.2.6.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-core-3.2.10.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-rdbms-3.2.9.jar:/usr/hdp/current/hadoop-client/conf/ -Xms1g -Xmx1g -XX:MaxPermSize=256m org.apache.spark.deploy.history.HistoryServer 4214 ? Sl 0:16 /usr/lib/jvm/java/bin/java -Dhdp.version=2.4.0.0-169 -Dspark.executor.memory=512m -Dspark.executor.instances=2 -Dspark.yarn.queue=default -Dfile.encoding=UTF-8 -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-zeppelin-sandbox.hortonworks.com.log -cp ::/usr/hdp/current/zeppelin-server/lib/lib/*:/usr/hdp/current/zeppelin-server/lib/*::/usr/hdp/current/zeppelin-server/lib/conf org.apache.zeppelin.server.ZeppelinServer 6669 pts/0 S+ 0:00 grep spark # After running a Zeppelin notebook [root@sandbox spark-client]# ps ax | grep spark 3858 ? Sl 0:13 /usr/lib/jvm/java/bin/java -Dhdp.version=2.4.0.0-169 -cp /usr/hdp/2.4.0.0-169/spark/sbin/../conf/:/usr/hdp/2.4.0.0-169/spark/lib/spark-assembly-1.6.0.2.4.0.0-169-hadoop2.7.1.2.4.0.0-169.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-api-jdo-3.2.6.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-core-3.2.10.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-rdbms-3.2.9.jar:/usr/hdp/current/hadoop-client/conf/ -Xms1g -Xmx1g -XX:MaxPermSize=256m org.apache.spark.deploy.history.HistoryServer 4214 ? Sl 0:19 /usr/lib/jvm/java/bin/java -Dhdp.version=2.4.0.0-169 -Dspark.executor.memory=512m -Dspark.executor.instances=2 -Dspark.yarn.queue=default -Dfile.encoding=UTF-8 -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-zeppelin-sandbox.hortonworks.com.log -cp ::/usr/hdp/current/zeppelin-server/lib/lib/*:/usr/hdp/current/zeppelin-server/lib/*::/usr/hdp/current/zeppelin-server/lib/conf org.apache.zeppelin.server.ZeppelinServer 6734 ? S 0:00 /bin/bash /usr/hdp/current/zeppelin-server/lib/bin/interpreter.sh -d /usr/hdp/current/zeppelin-server/lib/interpreter/spark -p 56425 6748 ? Sl 1:04 /usr/lib/jvm/java/bin/java -Dhdp.version=2.4.0.0-169 -cp /usr/hdp/current/zeppelin-server/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar:/usr/hdp/current/spark-historyserver/conf/:/usr/hdp/2.4.0.0-169/spark/lib/spark-assembly-1.6.0.2.4.0.0-169-hadoop2.7.1.2.4.0.0-169.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-api-jdo-3.2.6.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-core-3.2.10.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-rdbms-3.2.9.jar:/usr/hdp/current/hadoop-client/conf/ -Xms1g -Xmx1g -Dhdp.version=2.4.0.0-169 -Dspark.executor.memory=512m -Dspark.executor.instances=2 -Dspark.yarn.queue=default -Dfile.encoding=UTF-8 -Dhdp.version=2.4.0.0-169 -Dspark.executor.memory=512m -Dspark.executor.instances=2 -Dspark.yarn.queue=default -Dfile.encoding=UTF-8 -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-interpreter-spark-zeppelin-sandbox.hortonworks.com.log -XX:MaxPermSize=256m org.apache.spark.deploy.SparkSubmit --conf spark.driver.extraClassPath=::/usr/hdp/current/zeppelin-server/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar --conf spark.driver.extraJavaOptions= -Dhdp.version=2.4.0.0-169?-Dspark.executor.memory=512m?-Dspark.executor.instances=2 -Dspark.yarn.queue=default -Dfile.encoding=UTF-8 -Dhdp.version=2.4.0.0-169?-Dspark.executor.memory=512m?-Dspark.executor.instances=2 -Dspark.yarn.queue=default -Dfile.encoding=UTF-8 -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-interpreter-spark-zeppelin-sandbox.hortonworks.com.log --class org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer /usr/hdp/current/zeppelin-server/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar 56425 6945 ? Ss 0:00 /bin/bash -c /usr/lib/jvm/java/bin/java -server -Xmx512m -Djava.io.tmpdir=/hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001/tmp -Dhdp.version=2.4.0.0-169 -Dspark.yarn.app.container.log.dir=/hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001 -XX:MaxPermSize=256m org.apache.spark.deploy.yarn.ExecutorLauncher --arg '192.168.1.106:37942' --executor-memory 512m --executor-cores 1 --properties-file /hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001/__spark_conf__/__spark_conf__.properties 1> /hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001/stdout 2> /hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001/stderr 6956 ? Sl 0:07 /usr/lib/jvm/java/bin/java -server -Xmx512m -Djava.io.tmpdir=/hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001/tmp -Dhdp.version=2.4.0.0-169 -Dspark.yarn.app.container.log.dir=/hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001 -XX:MaxPermSize=256m org.apache.spark.deploy.yarn.ExecutorLauncher --arg 192.168.1.106:37942 --executor-memory 512m --executor-cores 1 --properties-file /hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000001/__spark_conf__/__spark_conf__.properties 7013 ? Ss 0:00 /bin/bash -c /usr/lib/jvm/java/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms512m -Xmx512m -Djava.io.tmpdir=/hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002/tmp '-Dspark.driver.port=37942' '-Dspark.history.ui.port=18080' -Dspark.yarn.app.container.log.dir=/hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002 -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@192.168.1.106:37942 --executor-id 1 --hostname sandbox.hortonworks.com --cores 1 --app-id application_1457964813770_0002 --user-class-path file:/hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002/__app__.jar 1> /hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002/stdout 2> /hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002/stderr 7023 ? Sl 0:11 /usr/lib/jvm/java/bin/java -server -XX:OnOutOfMemoryError=kill %p -Xms512m -Xmx512m -Djava.io.tmpdir=/hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002/tmp -Dspark.driver.port=37942 -Dspark.history.ui.port=18080 -Dspark.yarn.app.container.log.dir=/hadoop/yarn/log/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002 -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@192.168.1.106:37942 --executor-id 1 --hostname sandbox.hortonworks.com --cores 1 --app-id application_1457964813770_0002 --user-class-path file:/hadoop/yarn/local/usercache/zeppelin/appcache/application_1457964813770_0002/container_e11_1457964813770_0002_01_000002/__app__.jar 7195 pts/0 S+ 0:00 grep spark