<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: SparkSQL key not found: scale in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/64007#M52732</link>
    <description>&lt;P&gt;We are also facing the same issue. Is there any solution now?&lt;/P&gt;</description>
    <pubDate>Thu, 25 Jan 2018 15:21:40 GMT</pubDate>
    <dc:creator>d.diwakar</dc:creator>
    <dc:date>2018-01-25T15:21:40Z</dc:date>
    <item>
      <title>SparkSQL key not found: scale</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/50068#M52728</link>
      <description>&lt;P&gt;&lt;SPAN&gt;Hadoop version is&amp;nbsp;&lt;SPAN&gt;2.6.0-cdh.5.8.3.&amp;nbsp;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&lt;SPAN&gt;Spark version is 1.6.0 with Scala version 2.10.5&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;I trying to do a simple SQL query to the remote Oracle 11g DB by using Spark SQL in spark-shell.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;val jdbcDF = sqlContext.read.format("jdbc").options(
Map("url" -&amp;gt; "jdbc:oracle:thin:system/system@db-host:1521:orcl", 
"dbtable"-&amp;gt; "schema_name.table_name", 
"driver"-&amp;gt; "oracle.jdbc.OracleDriver", 
"username" -&amp;gt; "user", 
"password" -&amp;gt; "pwd")).load()&lt;/PRE&gt;&lt;P&gt;the result is:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;java.util.NoSuchElementException: key not found: scale
    at scala.collection.MapLike$class.default(MapLike.scala:228)
    at scala.collection.AbstractMap.default(Map.scala:58)
    at scala.collection.MapLike$class.apply(MapLike.scala:141)
    at scala.collection.AbstractMap.apply(Map.scala:58)
    at org.apache.spark.sql.types.Metadata.get(Metadata.scala:108)
    at org.apache.spark.sql.types.Metadata.getLong(Metadata.scala:51)
    at org.apache.spark.sql.jdbc.OracleDialect$.getCatalystType(OracleDialect.scala:33)
    at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:140)
    at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.&amp;lt;init&amp;gt;(JDBCRelation.scala:91)
    at org.apache.spark.sql.execution.datasources.jdbc.DefaultSource.createRelation(DefaultSource.scala:57)
    at org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:158)
    at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:25)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:30)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:32)
    at $iwC$$iwC$$iwC$$iwC$$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:34)
    at $iwC$$iwC$$iwC$$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:36)
    at $iwC$$iwC$$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:38)
    at $iwC$$iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:40)
    at $iwC.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:42)
    at &amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:44)
    at .&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:48)
    at .&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
    at .&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:7)
    at .&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)
    at $print(&amp;lt;console&amp;gt;)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1045)
    at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1326)
    at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:821)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:852)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:800)
    at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
    at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
    at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
    at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
    at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
    at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
    at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
    at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1064)
    at org.apache.spark.repl.Main$.main(Main.scala:31)
    at org.apache.spark.repl.Main.main(Main.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)&lt;/PRE&gt;&lt;P&gt;Can someone tell me, what is the reason of such error?&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 10:58:21 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/50068#M52728</guid>
      <dc:creator>katchpr</dc:creator>
      <dc:date>2022-09-16T10:58:21Z</dc:date>
    </item>
    <item>
      <title>Re: SparkSQL key not found: scale</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/50295#M52729</link>
      <description>&lt;P&gt;This is currently an issue with Numeric datatypes. &amp;nbsp;This is resolved in 2.0, but you can work around the issue by casting to Varchar or importing data into an RDD then converting to DataFrame.&lt;/P&gt;</description>
      <pubDate>Thu, 02 Feb 2017 14:34:09 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/50295#M52729</guid>
      <dc:creator>hubbarja</dc:creator>
      <dc:date>2017-02-02T14:34:09Z</dc:date>
    </item>
    <item>
      <title>Re: SparkSQL key not found: scale</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/60066#M52730</link>
      <description>&lt;P&gt;Can you explain how to do that briefly.&lt;BR /&gt;I'm trying to connect to oracle database to bring the count of the table from source and compare with the data moved to destination table in hive for validation.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks&lt;/P&gt;</description>
      <pubDate>Mon, 18 Sep 2017 19:41:55 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/60066#M52730</guid>
      <dc:creator>maduri</dc:creator>
      <dc:date>2017-09-18T19:41:55Z</dc:date>
    </item>
    <item>
      <title>Re: SparkSQL key not found: scale</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/60290#M52731</link>
      <description>&lt;P&gt;As i don't know the columns which are numeric in source table.&lt;/P&gt;</description>
      <pubDate>Mon, 25 Sep 2017 14:55:42 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/60290#M52731</guid>
      <dc:creator>maduri</dc:creator>
      <dc:date>2017-09-25T14:55:42Z</dc:date>
    </item>
    <item>
      <title>Re: SparkSQL key not found: scale</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/64007#M52732</link>
      <description>&lt;P&gt;We are also facing the same issue. Is there any solution now?&lt;/P&gt;</description>
      <pubDate>Thu, 25 Jan 2018 15:21:40 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/SparkSQL-key-not-found-scale/m-p/64007#M52732</guid>
      <dc:creator>d.diwakar</dc:creator>
      <dc:date>2018-01-25T15:21:40Z</dc:date>
    </item>
  </channel>
</rss>

