<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Hive-Druid Segment granularity MONTH of 31 days issue in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Hive-Druid-Segment-granularity-MONTH-of-31-days-issue/m-p/184660#M75704</link>
    <description>&lt;P&gt;I changed the segment granularity to YEAR and worked fine &lt;/P&gt;&lt;P&gt;"druid.segment.granularity" = "YEAR",&lt;A href="https://community.cloudera.com/legacyfs/online/attachments/62850-druid-benchmarking-results.png"&gt;druid-benchmarking-results.png&lt;/A&gt;&lt;/P&gt;</description>
    <pubDate>Mon, 12 Mar 2018 19:45:16 GMT</pubDate>
    <dc:creator>hamid_zorgani</dc:creator>
    <dc:date>2018-03-12T19:45:16Z</dc:date>
    <item>
      <title>Hive-Druid Segment granularity MONTH of 31 days issue</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Hive-Druid-Segment-granularity-MONTH-of-31-days-issue/m-p/184659#M75703</link>
      <description>&lt;P&gt;Hi, &lt;/P&gt;&lt;P&gt;When loading data into Hive using druid storage handler my table properties have "druid.segment.granularity" = "MONTH", It cause a exception error while loading  data, seems like the segment of length one month process only 30 days and some month have 31 days.&lt;/P&gt;&lt;P&gt;Caused by: java.util.concurrent.ExecutionException: org.apache.hive.druid.io.druid.java.util.common.IAE: interval[&lt;STRONG&gt;1992-10-01&lt;/STRONG&gt;T00:00:00.000+01:00/&lt;STRONG&gt;1992-10-30&lt;/STRONG&gt;T23:00:00.000Z] &lt;STRONG&gt;does not encapsulate the full range of timestamps&lt;/STRONG&gt;[1992-10-01T00:00:00.000+01:00, &lt;STRONG&gt;1992-10-31&lt;/STRONG&gt;T00:00:00.000Z]
at org.apache.hive.druid.com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:299)
at org.apache.hive.druid.com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:286)
at org.apache.hive.druid.com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
at org.apache.hadoop.hive.druid.io.DruidRecordWriter.pushSegments(DruidRecordWriter.java:165)&lt;BR /&gt;&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 12:57:44 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Hive-Druid-Segment-granularity-MONTH-of-31-days-issue/m-p/184659#M75703</guid>
      <dc:creator>hamid_zorgani</dc:creator>
      <dc:date>2022-09-16T12:57:44Z</dc:date>
    </item>
    <item>
      <title>Re: Hive-Druid Segment granularity MONTH of 31 days issue</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Hive-Druid-Segment-granularity-MONTH-of-31-days-issue/m-p/184660#M75704</link>
      <description>&lt;P&gt;I changed the segment granularity to YEAR and worked fine &lt;/P&gt;&lt;P&gt;"druid.segment.granularity" = "YEAR",&lt;A href="https://community.cloudera.com/legacyfs/online/attachments/62850-druid-benchmarking-results.png"&gt;druid-benchmarking-results.png&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 12 Mar 2018 19:45:16 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Hive-Druid-Segment-granularity-MONTH-of-31-days-issue/m-p/184660#M75704</guid>
      <dc:creator>hamid_zorgani</dc:creator>
      <dc:date>2018-03-12T19:45:16Z</dc:date>
    </item>
  </channel>
</rss>

