<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: pyspark convert unixtimestamp to datetime in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/pyspark-convert-unixtimestamp-to-datetime/m-p/187401#M70613</link>
    <description>&lt;A rel="user" href="https://community.cloudera.com/users/3057/sreeviswaathikala.html" nodeid="3057"&gt;@Viswa&lt;/A&gt;&lt;P&gt;For regular unix timestamp field to human readable without T in it is lot simpler as you can use the below conversion for that.&lt;/P&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt; hiveContext.sql("select from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SSS')").show(truncate=False)
+-----------------------+
|_c0                    |
+-----------------------+
|2017-11-02 21:35:16.000|
+-----------------------+&lt;/PRE&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt;hiveContext.sql("select from_unixtime(cast(&amp;lt;unix-timestamp-column-name&amp;gt; as bigint),'yyyy-MM-dd HH:mm:ss.SSS')")&lt;/PRE&gt;&lt;P&gt;&lt;STRONG&gt;But you are expecting format as yyyy-MM-ddThh:mm:ss &lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;For this case you need to use &lt;B&gt;concat date and time &lt;/B&gt;with &lt;STRONG&gt;T &lt;/STRONG&gt;letter&lt;/P&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt;hiveContext.sql("""select concat(concat(substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),1,10),'T'),substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),12))""").show(truncate=False) 
+-----------------------+
|_c0                    |
+-----------------------+
|2017-11-02T21:35:16.00|
+-----------------------+
&lt;/PRE&gt;&lt;P&gt;&lt;STRONG&gt;&lt;U&gt;Your query:-&lt;/U&gt;&lt;/STRONG&gt;&lt;/P&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt;hiveContext.sql("""select concat(concat(substr(cast(from_unixtime(cast(&amp;lt;unix-timestamp-column-name&amp;gt; as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),1,10),'T'),
substr(cast(from_unixtime(cast(&amp;lt;unix-timestamp-column-name&amp;gt; as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),12))""").show(truncate=False) //replace &amp;lt;unix-timestamp-column-name&amp;gt; with your column name&lt;/PRE&gt;&lt;P&gt;in case if you want to test in hive then use the below query&lt;/P&gt;&lt;PRE&gt;hive# select concat(concat(substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SSS') as string),1,10),'T'),
substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SSS') as string),12));
+--------------------------+--+
|           _c0            |
+--------------------------+--+
| 2017-11-02T21:35:16.00  |
+--------------------------+--+

&lt;/PRE&gt;&lt;P&gt;Hope this will help to resolve your issue...!!!&lt;/P&gt;</description>
    <pubDate>Fri, 03 Nov 2017 11:34:48 GMT</pubDate>
    <dc:creator>Shu_ashu</dc:creator>
    <dc:date>2017-11-03T11:34:48Z</dc:date>
    <item>
      <title>pyspark convert unixtimestamp to datetime</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/pyspark-convert-unixtimestamp-to-datetime/m-p/187400#M70612</link>
      <description>&lt;P&gt;Hi team,&lt;/P&gt;&lt;P&gt;I am looking to convert a unix timestamp field to human readable format.&lt;/P&gt;&lt;P&gt;Can some one help me in this.&lt;/P&gt;&lt;P&gt;I am using from unix_timestamp('Timestamp', "yyyy-MM-ddThh:mm:ss"), but this is not working.&lt;/P&gt;&lt;P&gt;Any suggestions would be of great help&lt;/P&gt;</description>
      <pubDate>Thu, 02 Nov 2017 01:28:56 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/pyspark-convert-unixtimestamp-to-datetime/m-p/187400#M70612</guid>
      <dc:creator>sreeviswa_athic</dc:creator>
      <dc:date>2017-11-02T01:28:56Z</dc:date>
    </item>
    <item>
      <title>Re: pyspark convert unixtimestamp to datetime</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/pyspark-convert-unixtimestamp-to-datetime/m-p/187401#M70613</link>
      <description>&lt;A rel="user" href="https://community.cloudera.com/users/3057/sreeviswaathikala.html" nodeid="3057"&gt;@Viswa&lt;/A&gt;&lt;P&gt;For regular unix timestamp field to human readable without T in it is lot simpler as you can use the below conversion for that.&lt;/P&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt; hiveContext.sql("select from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SSS')").show(truncate=False)
+-----------------------+
|_c0                    |
+-----------------------+
|2017-11-02 21:35:16.000|
+-----------------------+&lt;/PRE&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt;hiveContext.sql("select from_unixtime(cast(&amp;lt;unix-timestamp-column-name&amp;gt; as bigint),'yyyy-MM-dd HH:mm:ss.SSS')")&lt;/PRE&gt;&lt;P&gt;&lt;STRONG&gt;But you are expecting format as yyyy-MM-ddThh:mm:ss &lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;For this case you need to use &lt;B&gt;concat date and time &lt;/B&gt;with &lt;STRONG&gt;T &lt;/STRONG&gt;letter&lt;/P&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt;hiveContext.sql("""select concat(concat(substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),1,10),'T'),substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),12))""").show(truncate=False) 
+-----------------------+
|_c0                    |
+-----------------------+
|2017-11-02T21:35:16.00|
+-----------------------+
&lt;/PRE&gt;&lt;P&gt;&lt;STRONG&gt;&lt;U&gt;Your query:-&lt;/U&gt;&lt;/STRONG&gt;&lt;/P&gt;&lt;PRE&gt;pyspark
&amp;gt;&amp;gt;&amp;gt;hiveContext.sql("""select concat(concat(substr(cast(from_unixtime(cast(&amp;lt;unix-timestamp-column-name&amp;gt; as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),1,10),'T'),
substr(cast(from_unixtime(cast(&amp;lt;unix-timestamp-column-name&amp;gt; as bigint),'yyyy-MM-dd HH:mm:ss.SS') as string),12))""").show(truncate=False) //replace &amp;lt;unix-timestamp-column-name&amp;gt; with your column name&lt;/PRE&gt;&lt;P&gt;in case if you want to test in hive then use the below query&lt;/P&gt;&lt;PRE&gt;hive# select concat(concat(substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SSS') as string),1,10),'T'),
substr(cast(from_unixtime(cast(1509672916 as bigint),'yyyy-MM-dd HH:mm:ss.SSS') as string),12));
+--------------------------+--+
|           _c0            |
+--------------------------+--+
| 2017-11-02T21:35:16.00  |
+--------------------------+--+

&lt;/PRE&gt;&lt;P&gt;Hope this will help to resolve your issue...!!!&lt;/P&gt;</description>
      <pubDate>Fri, 03 Nov 2017 11:34:48 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/pyspark-convert-unixtimestamp-to-datetime/m-p/187401#M70613</guid>
      <dc:creator>Shu_ashu</dc:creator>
      <dc:date>2017-11-03T11:34:48Z</dc:date>
    </item>
  </channel>
</rss>

