<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Error in starting spark-shell -WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use +  HDP 2.3.4 on EC2 in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125197#M17849</link>
    <description>&lt;PRE&gt;16/02/03 22:55:25 WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use
java.net.BindException: Address already in use
        at sun.nio.ch.Net.bind0(Native Method)
        at sun.nio.ch.Net.bind(Net.java:444)
        at sun.nio.ch.Net.bind(Net.java:436)
        at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
        at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
        at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.spark-project.jetty.server.Server.doStart(Server.java:293)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$connect$1(JettyUtils.scala:237)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.util.Utils$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
        at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
        at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
        at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:247)
        at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at scala.Option.foreach(Option.scala:236)
        at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:474)
        at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
        at $line3.$read$iwC$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:9)
        at $line3.$read$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:18)
        at $line3.$read.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:20)
        at $line3.$read$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:24)
        at $line3.$read$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:7)
        at $line3.$eval$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval.$print(&amp;lt;console&amp;gt;)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
        at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
        at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
        at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
        at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
        at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
        at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply$mcZ$sp(SparkILoop.scala:991)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$process(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$runMain(SparkSubmit.scala:685)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
16/02/03 22:55:25 WARN AbstractLifeCycle: FAILED org.spark-project.jetty.server.Server@7509bf4e: java.net.BindException: Address already in use
java.net.BindException: Address already in use
        at sun.nio.ch.Net.bind0(Native Method)
        at sun.nio.ch.Net.bind(Net.java:444)
        at sun.nio.ch.Net.bind(Net.java:436)
        at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
        at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
        at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.spark-project.jetty.server.Server.doStart(Server.java:293)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$connect$1(JettyUtils.scala:237)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.util.Utils$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
        at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
        at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
        at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:247)
        at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at scala.Option.foreach(Option.scala:236)
        at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:474)
        at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
        at $line3.$read$iwC$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:9)
        at $line3.$read$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:18)
        at $line3.$read.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:20)
        at $line3.$read$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:24)
        at $line3.$read$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:7)
        at $line3.$eval$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval.$print(&amp;lt;console&amp;gt;)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
        at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
        at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
        at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
        at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
        at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
        at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply$mcZ$sp(SparkILoop.scala:991)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$process(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$runMain(SparkSubmit.scala:685)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/st
&lt;/PRE&gt;&lt;P&gt;[ec2-user@ip-172-31-29-201 ~]$  netstat -pnat | grep 404&lt;/P&gt;&lt;P&gt;(No info could be read for "-p": geteuid()=1000 but you should be root.)
tcp        1      0 xxx.xx.xx.xxx:34047     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp        1      0 xxx.xx.xx.xxx:34049     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp        1      0 xxx.xx.xx.xxx:34046     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp        1      0 xxx.xx.xx.xxx:34048    xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       0      0 :::4040                 :::*                    LISTEN      -
tcp6       0      0 :::4041                 :::*                    LISTEN      -
tcp6       1      0 xxx.xx.xx.xxx:34043     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       1      0 xxx.xx.xx.xxx:34045     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       1      0 xxx.xx.xx.xxx:34042     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       1      0 xxx.xx.xx.xxx:34044     xxx.xx.xx.xxx:6188      CLOSE_WAIT&lt;/P&gt;&lt;P&gt;Any pointers Appreciated .&lt;/P&gt;</description>
    <pubDate>Thu, 04 Feb 2016 12:07:44 GMT</pubDate>
    <dc:creator>DivyaGehlot13</dc:creator>
    <dc:date>2016-02-04T12:07:44Z</dc:date>
    <item>
      <title>Error in starting spark-shell -WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use +  HDP 2.3.4 on EC2</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125197#M17849</link>
      <description>&lt;PRE&gt;16/02/03 22:55:25 WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use
java.net.BindException: Address already in use
        at sun.nio.ch.Net.bind0(Native Method)
        at sun.nio.ch.Net.bind(Net.java:444)
        at sun.nio.ch.Net.bind(Net.java:436)
        at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
        at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
        at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.spark-project.jetty.server.Server.doStart(Server.java:293)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$connect$1(JettyUtils.scala:237)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.util.Utils$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
        at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
        at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
        at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:247)
        at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at scala.Option.foreach(Option.scala:236)
        at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:474)
        at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
        at $line3.$read$iwC$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:9)
        at $line3.$read$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:18)
        at $line3.$read.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:20)
        at $line3.$read$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:24)
        at $line3.$read$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:7)
        at $line3.$eval$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval.$print(&amp;lt;console&amp;gt;)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
        at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
        at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
        at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
        at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
        at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
        at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply$mcZ$sp(SparkILoop.scala:991)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$process(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$runMain(SparkSubmit.scala:685)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
16/02/03 22:55:25 WARN AbstractLifeCycle: FAILED org.spark-project.jetty.server.Server@7509bf4e: java.net.BindException: Address already in use
java.net.BindException: Address already in use
        at sun.nio.ch.Net.bind0(Native Method)
        at sun.nio.ch.Net.bind(Net.java:444)
        at sun.nio.ch.Net.bind(Net.java:436)
        at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
        at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
        at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
        at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.spark-project.jetty.server.Server.doStart(Server.java:293)
        at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
        at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$connect$1(JettyUtils.scala:237)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.ui.JettyUtils$anonfun$3.apply(JettyUtils.scala:247)
        at org.apache.spark.util.Utils$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
        at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
        at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
        at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:247)
        at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at org.apache.spark.SparkContext$anonfun$13.apply(SparkContext.scala:474)
        at scala.Option.foreach(Option.scala:236)
        at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:474)
        at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
        at $line3.$read$iwC$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:9)
        at $line3.$read$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:18)
        at $line3.$read.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:20)
        at $line3.$read$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:24)
        at $line3.$read$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:7)
        at $line3.$eval$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
        at $line3.$eval.$print(&amp;lt;console&amp;gt;)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
        at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
        at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
        at org.apache.spark.repl.SparkILoopInit$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
        at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
        at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
        at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
        at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
        at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply$mcZ$sp(SparkILoop.scala:991)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop$anonfun$org$apache$spark$repl$SparkILoop$process$1.apply(SparkILoop.scala:945)
        at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$process(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$runMain(SparkSubmit.scala:685)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
16/02/03 22:55:25 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/st
&lt;/PRE&gt;&lt;P&gt;[ec2-user@ip-172-31-29-201 ~]$  netstat -pnat | grep 404&lt;/P&gt;&lt;P&gt;(No info could be read for "-p": geteuid()=1000 but you should be root.)
tcp        1      0 xxx.xx.xx.xxx:34047     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp        1      0 xxx.xx.xx.xxx:34049     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp        1      0 xxx.xx.xx.xxx:34046     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp        1      0 xxx.xx.xx.xxx:34048    xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       0      0 :::4040                 :::*                    LISTEN      -
tcp6       0      0 :::4041                 :::*                    LISTEN      -
tcp6       1      0 xxx.xx.xx.xxx:34043     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       1      0 xxx.xx.xx.xxx:34045     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       1      0 xxx.xx.xx.xxx:34042     xxx.xx.xx.xxx:6188      CLOSE_WAIT  -
tcp6       1      0 xxx.xx.xx.xxx:34044     xxx.xx.xx.xxx:6188      CLOSE_WAIT&lt;/P&gt;&lt;P&gt;Any pointers Appreciated .&lt;/P&gt;</description>
      <pubDate>Thu, 04 Feb 2016 12:07:44 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125197#M17849</guid>
      <dc:creator>DivyaGehlot13</dc:creator>
      <dc:date>2016-02-04T12:07:44Z</dc:date>
    </item>
    <item>
      <title>Re: Error in starting spark-shell -WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use +  HDP 2.3.4 on EC2</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125198#M17850</link>
      <description>&lt;P&gt;The "java.net.BindException:Address already in use" exception is just a warning that can be ignored: it will find a different port on its own if the default one (4040) is in use. From your netstat output it seems like it started on 4041 instead. To find process id of which process if running on those ports you can run below&lt;/P&gt;&lt;PRE&gt;netstat -tulpn | grep 404&lt;/PRE&gt;&lt;P&gt;You can also check status of Spark on YARN jobs using Resource Manager UI e.g. if running on sandbox you can access it on &lt;A href="http://VMs_IP_ADDRESS:8088/cluster" target="_blank"&gt;http://VMs_IP_ADDRESS:8088/cluster&lt;/A&gt; and click the Spark AM to get to the Spark UI.&lt;/P&gt;&lt;P&gt;Check this thread for more details &lt;A href="https://community.hortonworks.com/questions/8257/how-can-i-resolve-it.html" target="_blank"&gt;https://community.hortonworks.com/questions/8257/how-can-i-resolve-it.html&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 04 Feb 2016 12:34:49 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125198#M17850</guid>
      <dc:creator>abajwa</dc:creator>
      <dc:date>2016-02-04T12:34:49Z</dc:date>
    </item>
    <item>
      <title>Re: Error in starting spark-shell -WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use +  HDP 2.3.4 on EC2</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125199#M17851</link>
      <description>&lt;P&gt;&lt;A rel="user" href="https://community.cloudera.com/users/831/divyag.html" nodeid="831"&gt;@Divya Gehlot&lt;/A&gt; any update?&lt;/P&gt;</description>
      <pubDate>Fri, 05 Feb 2016 11:58:50 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125199#M17851</guid>
      <dc:creator>abajwa</dc:creator>
      <dc:date>2016-02-05T11:58:50Z</dc:date>
    </item>
    <item>
      <title>Re: Error in starting spark-shell -WARN AbstractLifeCycle: FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use +  HDP 2.3.4 on EC2</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125200#M17852</link>
      <description>&lt;P&gt;Thanks Ali for the help&lt;/P&gt;</description>
      <pubDate>Mon, 15 Feb 2016 10:37:33 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Error-in-starting-spark-shell-WARN-AbstractLifeCycle-FAILED/m-p/125200#M17852</guid>
      <dc:creator>DivyaGehlot13</dc:creator>
      <dc:date>2016-02-15T10:37:33Z</dc:date>
    </item>
  </channel>
</rss>

