hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jd...@apache.org
Subject svn commit: r1673553 [3/4] - in /hive/trunk: metastore/if/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen/thrift/gen-php/metastore/ metastore/src/gen/thrift/gen-py/hive_me...
Date Tue, 14 Apr 2015 20:47:30 GMT
Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java Tue Apr 14 20:47:29 2015
@@ -17328,7 +17328,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list524.size);
                   for (int _i525 = 0; _i525 < _list524.size; ++_i525)
                   {
-                    String _elem526; // required
+                    String _elem526; // optional
                     _elem526 = iprot.readString();
                     struct.success.add(_elem526);
                   }
@@ -17427,7 +17427,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list529.size);
             for (int _i530 = 0; _i530 < _list529.size; ++_i530)
             {
-              String _elem531; // required
+              String _elem531; // optional
               _elem531 = iprot.readString();
               struct.success.add(_elem531);
             }
@@ -18090,7 +18090,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list532.size);
                   for (int _i533 = 0; _i533 < _list532.size; ++_i533)
                   {
-                    String _elem534; // required
+                    String _elem534; // optional
                     _elem534 = iprot.readString();
                     struct.success.add(_elem534);
                   }
@@ -18189,7 +18189,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list537.size);
             for (int _i538 = 0; _i538 < _list537.size; ++_i538)
             {
-              String _elem539; // required
+              String _elem539; // optional
               _elem539 = iprot.readString();
               struct.success.add(_elem539);
             }
@@ -23950,7 +23950,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<FieldSchema>(_list550.size);
                   for (int _i551 = 0; _i551 < _list550.size; ++_i551)
                   {
-                    FieldSchema _elem552; // required
+                    FieldSchema _elem552; // optional
                     _elem552 = new FieldSchema();
                     _elem552.read(iprot);
                     struct.success.add(_elem552);
@@ -24090,7 +24090,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<FieldSchema>(_list555.size);
             for (int _i556 = 0; _i556 < _list555.size; ++_i556)
             {
-              FieldSchema _elem557; // required
+              FieldSchema _elem557; // optional
               _elem557 = new FieldSchema();
               _elem557.read(iprot);
               struct.success.add(_elem557);
@@ -25251,7 +25251,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<FieldSchema>(_list558.size);
                   for (int _i559 = 0; _i559 < _list558.size; ++_i559)
                   {
-                    FieldSchema _elem560; // required
+                    FieldSchema _elem560; // optional
                     _elem560 = new FieldSchema();
                     _elem560.read(iprot);
                     struct.success.add(_elem560);
@@ -25391,7 +25391,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<FieldSchema>(_list563.size);
             for (int _i564 = 0; _i564 < _list563.size; ++_i564)
             {
-              FieldSchema _elem565; // required
+              FieldSchema _elem565; // optional
               _elem565 = new FieldSchema();
               _elem565.read(iprot);
               struct.success.add(_elem565);
@@ -26443,7 +26443,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<FieldSchema>(_list566.size);
                   for (int _i567 = 0; _i567 < _list566.size; ++_i567)
                   {
-                    FieldSchema _elem568; // required
+                    FieldSchema _elem568; // optional
                     _elem568 = new FieldSchema();
                     _elem568.read(iprot);
                     struct.success.add(_elem568);
@@ -26583,7 +26583,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<FieldSchema>(_list571.size);
             for (int _i572 = 0; _i572 < _list571.size; ++_i572)
             {
-              FieldSchema _elem573; // required
+              FieldSchema _elem573; // optional
               _elem573 = new FieldSchema();
               _elem573.read(iprot);
               struct.success.add(_elem573);
@@ -27744,7 +27744,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<FieldSchema>(_list574.size);
                   for (int _i575 = 0; _i575 < _list574.size; ++_i575)
                   {
-                    FieldSchema _elem576; // required
+                    FieldSchema _elem576; // optional
                     _elem576 = new FieldSchema();
                     _elem576.read(iprot);
                     struct.success.add(_elem576);
@@ -27884,7 +27884,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<FieldSchema>(_list579.size);
             for (int _i580 = 0; _i580 < _list579.size; ++_i580)
             {
-              FieldSchema _elem581; // required
+              FieldSchema _elem581; // optional
               _elem581 = new FieldSchema();
               _elem581.read(iprot);
               struct.success.add(_elem581);
@@ -33134,7 +33134,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list582.size);
                   for (int _i583 = 0; _i583 < _list582.size; ++_i583)
                   {
-                    String _elem584; // required
+                    String _elem584; // optional
                     _elem584 = iprot.readString();
                     struct.success.add(_elem584);
                   }
@@ -33233,7 +33233,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list587.size);
             for (int _i588 = 0; _i588 < _list587.size; ++_i588)
             {
-              String _elem589; // required
+              String _elem589; // optional
               _elem589 = iprot.readString();
               struct.success.add(_elem589);
             }
@@ -34008,7 +34008,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list590.size);
                   for (int _i591 = 0; _i591 < _list590.size; ++_i591)
                   {
-                    String _elem592; // required
+                    String _elem592; // optional
                     _elem592 = iprot.readString();
                     struct.success.add(_elem592);
                   }
@@ -34107,7 +34107,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list595.size);
             for (int _i596 = 0; _i596 < _list595.size; ++_i596)
             {
-              String _elem597; // required
+              String _elem597; // optional
               _elem597 = iprot.readString();
               struct.success.add(_elem597);
             }
@@ -35569,7 +35569,7 @@ public class ThriftHiveMetastore {
                   struct.tbl_names = new ArrayList<String>(_list598.size);
                   for (int _i599 = 0; _i599 < _list598.size; ++_i599)
                   {
-                    String _elem600; // required
+                    String _elem600; // optional
                     _elem600 = iprot.readString();
                     struct.tbl_names.add(_elem600);
                   }
@@ -35663,7 +35663,7 @@ public class ThriftHiveMetastore {
             struct.tbl_names = new ArrayList<String>(_list603.size);
             for (int _i604 = 0; _i604 < _list603.size; ++_i604)
             {
-              String _elem605; // required
+              String _elem605; // optional
               _elem605 = iprot.readString();
               struct.tbl_names.add(_elem605);
             }
@@ -36237,7 +36237,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Table>(_list606.size);
                   for (int _i607 = 0; _i607 < _list606.size; ++_i607)
                   {
-                    Table _elem608; // required
+                    Table _elem608; // optional
                     _elem608 = new Table();
                     _elem608.read(iprot);
                     struct.success.add(_elem608);
@@ -36377,7 +36377,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Table>(_list611.size);
             for (int _i612 = 0; _i612 < _list611.size; ++_i612)
             {
-              Table _elem613; // required
+              Table _elem613; // optional
               _elem613 = new Table();
               _elem613.read(iprot);
               struct.success.add(_elem613);
@@ -37533,7 +37533,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list614.size);
                   for (int _i615 = 0; _i615 < _list614.size; ++_i615)
                   {
-                    String _elem616; // required
+                    String _elem616; // optional
                     _elem616 = iprot.readString();
                     struct.success.add(_elem616);
                   }
@@ -37672,7 +37672,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list619.size);
             for (int _i620 = 0; _i620 < _list619.size; ++_i620)
             {
-              String _elem621; // required
+              String _elem621; // optional
               _elem621 = iprot.readString();
               struct.success.add(_elem621);
             }
@@ -43537,7 +43537,7 @@ public class ThriftHiveMetastore {
                   struct.new_parts = new ArrayList<Partition>(_list622.size);
                   for (int _i623 = 0; _i623 < _list622.size; ++_i623)
                   {
-                    Partition _elem624; // required
+                    Partition _elem624; // optional
                     _elem624 = new Partition();
                     _elem624.read(iprot);
                     struct.new_parts.add(_elem624);
@@ -43617,7 +43617,7 @@ public class ThriftHiveMetastore {
             struct.new_parts = new ArrayList<Partition>(_list627.size);
             for (int _i628 = 0; _i628 < _list627.size; ++_i628)
             {
-              Partition _elem629; // required
+              Partition _elem629; // optional
               _elem629 = new Partition();
               _elem629.read(iprot);
               struct.new_parts.add(_elem629);
@@ -44625,7 +44625,7 @@ public class ThriftHiveMetastore {
                   struct.new_parts = new ArrayList<PartitionSpec>(_list630.size);
                   for (int _i631 = 0; _i631 < _list630.size; ++_i631)
                   {
-                    PartitionSpec _elem632; // required
+                    PartitionSpec _elem632; // optional
                     _elem632 = new PartitionSpec();
                     _elem632.read(iprot);
                     struct.new_parts.add(_elem632);
@@ -44705,7 +44705,7 @@ public class ThriftHiveMetastore {
             struct.new_parts = new ArrayList<PartitionSpec>(_list635.size);
             for (int _i636 = 0; _i636 < _list635.size; ++_i636)
             {
-              PartitionSpec _elem637; // required
+              PartitionSpec _elem637; // optional
               _elem637 = new PartitionSpec();
               _elem637.read(iprot);
               struct.new_parts.add(_elem637);
@@ -45891,7 +45891,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list638.size);
                   for (int _i639 = 0; _i639 < _list638.size; ++_i639)
                   {
-                    String _elem640; // required
+                    String _elem640; // optional
                     _elem640 = iprot.readString();
                     struct.part_vals.add(_elem640);
                   }
@@ -46000,7 +46000,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list643.size);
             for (int _i644 = 0; _i644 < _list643.size; ++_i644)
             {
-              String _elem645; // required
+              String _elem645; // optional
               _elem645 = iprot.readString();
               struct.part_vals.add(_elem645);
             }
@@ -48318,7 +48318,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list646.size);
                   for (int _i647 = 0; _i647 < _list646.size; ++_i647)
                   {
-                    String _elem648; // required
+                    String _elem648; // optional
                     _elem648 = iprot.readString();
                     struct.part_vals.add(_elem648);
                   }
@@ -48447,7 +48447,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list651.size);
             for (int _i652 = 0; _i652 < _list651.size; ++_i652)
             {
-              String _elem653; // required
+              String _elem653; // optional
               _elem653 = iprot.readString();
               struct.part_vals.add(_elem653);
             }
@@ -52326,7 +52326,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list654.size);
                   for (int _i655 = 0; _i655 < _list654.size; ++_i655)
                   {
-                    String _elem656; // required
+                    String _elem656; // optional
                     _elem656 = iprot.readString();
                     struct.part_vals.add(_elem656);
                   }
@@ -52452,7 +52452,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list659.size);
             for (int _i660 = 0; _i660 < _list659.size; ++_i660)
             {
-              String _elem661; // required
+              String _elem661; // optional
               _elem661 = iprot.readString();
               struct.part_vals.add(_elem661);
             }
@@ -53700,7 +53700,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list662.size);
                   for (int _i663 = 0; _i663 < _list662.size; ++_i663)
                   {
-                    String _elem664; // required
+                    String _elem664; // optional
                     _elem664 = iprot.readString();
                     struct.part_vals.add(_elem664);
                   }
@@ -53846,7 +53846,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list667.size);
             for (int _i668 = 0; _i668 < _list667.size; ++_i668)
             {
-              String _elem669; // required
+              String _elem669; // optional
               _elem669 = iprot.readString();
               struct.part_vals.add(_elem669);
             }
@@ -58457,7 +58457,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list670.size);
                   for (int _i671 = 0; _i671 < _list670.size; ++_i671)
                   {
-                    String _elem672; // required
+                    String _elem672; // optional
                     _elem672 = iprot.readString();
                     struct.part_vals.add(_elem672);
                   }
@@ -58566,7 +58566,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list675.size);
             for (int _i676 = 0; _i676 < _list675.size; ++_i676)
             {
-              String _elem677; // required
+              String _elem677; // optional
               _elem677 = iprot.readString();
               struct.part_vals.add(_elem677);
             }
@@ -61456,7 +61456,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list688.size);
                   for (int _i689 = 0; _i689 < _list688.size; ++_i689)
                   {
-                    String _elem690; // required
+                    String _elem690; // optional
                     _elem690 = iprot.readString();
                     struct.part_vals.add(_elem690);
                   }
@@ -61482,7 +61482,7 @@ public class ThriftHiveMetastore {
                   struct.group_names = new ArrayList<String>(_list691.size);
                   for (int _i692 = 0; _i692 < _list691.size; ++_i692)
                   {
-                    String _elem693; // required
+                    String _elem693; // optional
                     _elem693 = iprot.readString();
                     struct.group_names.add(_elem693);
                   }
@@ -61626,7 +61626,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list698.size);
             for (int _i699 = 0; _i699 < _list698.size; ++_i699)
             {
-              String _elem700; // required
+              String _elem700; // optional
               _elem700 = iprot.readString();
               struct.part_vals.add(_elem700);
             }
@@ -61643,7 +61643,7 @@ public class ThriftHiveMetastore {
             struct.group_names = new ArrayList<String>(_list701.size);
             for (int _i702 = 0; _i702 < _list701.size; ++_i702)
             {
-              String _elem703; // required
+              String _elem703; // optional
               _elem703 = iprot.readString();
               struct.group_names.add(_elem703);
             }
@@ -64418,7 +64418,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Partition>(_list704.size);
                   for (int _i705 = 0; _i705 < _list704.size; ++_i705)
                   {
-                    Partition _elem706; // required
+                    Partition _elem706; // optional
                     _elem706 = new Partition();
                     _elem706.read(iprot);
                     struct.success.add(_elem706);
@@ -64538,7 +64538,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Partition>(_list709.size);
             for (int _i710 = 0; _i710 < _list709.size; ++_i710)
             {
-              Partition _elem711; // required
+              Partition _elem711; // optional
               _elem711 = new Partition();
               _elem711.read(iprot);
               struct.success.add(_elem711);
@@ -65238,7 +65238,7 @@ public class ThriftHiveMetastore {
                   struct.group_names = new ArrayList<String>(_list712.size);
                   for (int _i713 = 0; _i713 < _list712.size; ++_i713)
                   {
-                    String _elem714; // required
+                    String _elem714; // optional
                     _elem714 = iprot.readString();
                     struct.group_names.add(_elem714);
                   }
@@ -65375,7 +65375,7 @@ public class ThriftHiveMetastore {
             struct.group_names = new ArrayList<String>(_list717.size);
             for (int _i718 = 0; _i718 < _list717.size; ++_i718)
             {
-              String _elem719; // required
+              String _elem719; // optional
               _elem719 = iprot.readString();
               struct.group_names.add(_elem719);
             }
@@ -65868,7 +65868,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Partition>(_list720.size);
                   for (int _i721 = 0; _i721 < _list720.size; ++_i721)
                   {
-                    Partition _elem722; // required
+                    Partition _elem722; // optional
                     _elem722 = new Partition();
                     _elem722.read(iprot);
                     struct.success.add(_elem722);
@@ -65988,7 +65988,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Partition>(_list725.size);
             for (int _i726 = 0; _i726 < _list725.size; ++_i726)
             {
-              Partition _elem727; // required
+              Partition _elem727; // optional
               _elem727 = new Partition();
               _elem727.read(iprot);
               struct.success.add(_elem727);
@@ -67058,7 +67058,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<PartitionSpec>(_list728.size);
                   for (int _i729 = 0; _i729 < _list728.size; ++_i729)
                   {
-                    PartitionSpec _elem730; // required
+                    PartitionSpec _elem730; // optional
                     _elem730 = new PartitionSpec();
                     _elem730.read(iprot);
                     struct.success.add(_elem730);
@@ -67178,7 +67178,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<PartitionSpec>(_list733.size);
             for (int _i734 = 0; _i734 < _list733.size; ++_i734)
             {
-              PartitionSpec _elem735; // required
+              PartitionSpec _elem735; // optional
               _elem735 = new PartitionSpec();
               _elem735.read(iprot);
               struct.success.add(_elem735);
@@ -68167,7 +68167,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list736.size);
                   for (int _i737 = 0; _i737 < _list736.size; ++_i737)
                   {
-                    String _elem738; // required
+                    String _elem738; // optional
                     _elem738 = iprot.readString();
                     struct.success.add(_elem738);
                   }
@@ -68266,7 +68266,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list741.size);
             for (int _i742 = 0; _i742 < _list741.size; ++_i742)
             {
-              String _elem743; // required
+              String _elem743; // optional
               _elem743 = iprot.readString();
               struct.success.add(_elem743);
             }
@@ -68863,7 +68863,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list744.size);
                   for (int _i745 = 0; _i745 < _list744.size; ++_i745)
                   {
-                    String _elem746; // required
+                    String _elem746; // optional
                     _elem746 = iprot.readString();
                     struct.part_vals.add(_elem746);
                   }
@@ -68989,7 +68989,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list749.size);
             for (int _i750 = 0; _i750 < _list749.size; ++_i750)
             {
-              String _elem751; // required
+              String _elem751; // optional
               _elem751 = iprot.readString();
               struct.part_vals.add(_elem751);
             }
@@ -69486,7 +69486,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Partition>(_list752.size);
                   for (int _i753 = 0; _i753 < _list752.size; ++_i753)
                   {
-                    Partition _elem754; // required
+                    Partition _elem754; // optional
                     _elem754 = new Partition();
                     _elem754.read(iprot);
                     struct.success.add(_elem754);
@@ -69606,7 +69606,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Partition>(_list757.size);
             for (int _i758 = 0; _i758 < _list757.size; ++_i758)
             {
-              Partition _elem759; // required
+              Partition _elem759; // optional
               _elem759 = new Partition();
               _elem759.read(iprot);
               struct.success.add(_elem759);
@@ -70391,7 +70391,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list760.size);
                   for (int _i761 = 0; _i761 < _list760.size; ++_i761)
                   {
-                    String _elem762; // required
+                    String _elem762; // optional
                     _elem762 = iprot.readString();
                     struct.part_vals.add(_elem762);
                   }
@@ -70425,7 +70425,7 @@ public class ThriftHiveMetastore {
                   struct.group_names = new ArrayList<String>(_list763.size);
                   for (int _i764 = 0; _i764 < _list763.size; ++_i764)
                   {
-                    String _elem765; // required
+                    String _elem765; // optional
                     _elem765 = iprot.readString();
                     struct.group_names.add(_elem765);
                   }
@@ -70578,7 +70578,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list770.size);
             for (int _i771 = 0; _i771 < _list770.size; ++_i771)
             {
-              String _elem772; // required
+              String _elem772; // optional
               _elem772 = iprot.readString();
               struct.part_vals.add(_elem772);
             }
@@ -70599,7 +70599,7 @@ public class ThriftHiveMetastore {
             struct.group_names = new ArrayList<String>(_list773.size);
             for (int _i774 = 0; _i774 < _list773.size; ++_i774)
             {
-              String _elem775; // required
+              String _elem775; // optional
               _elem775 = iprot.readString();
               struct.group_names.add(_elem775);
             }
@@ -71092,7 +71092,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Partition>(_list776.size);
                   for (int _i777 = 0; _i777 < _list776.size; ++_i777)
                   {
-                    Partition _elem778; // required
+                    Partition _elem778; // optional
                     _elem778 = new Partition();
                     _elem778.read(iprot);
                     struct.success.add(_elem778);
@@ -71212,7 +71212,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Partition>(_list781.size);
             for (int _i782 = 0; _i782 < _list781.size; ++_i782)
             {
-              Partition _elem783; // required
+              Partition _elem783; // optional
               _elem783 = new Partition();
               _elem783.read(iprot);
               struct.success.add(_elem783);
@@ -71815,7 +71815,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list784.size);
                   for (int _i785 = 0; _i785 < _list784.size; ++_i785)
                   {
-                    String _elem786; // required
+                    String _elem786; // optional
                     _elem786 = iprot.readString();
                     struct.part_vals.add(_elem786);
                   }
@@ -71941,7 +71941,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list789.size);
             for (int _i790 = 0; _i790 < _list789.size; ++_i790)
             {
-              String _elem791; // required
+              String _elem791; // optional
               _elem791 = iprot.readString();
               struct.part_vals.add(_elem791);
             }
@@ -72438,7 +72438,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list792.size);
                   for (int _i793 = 0; _i793 < _list792.size; ++_i793)
                   {
-                    String _elem794; // required
+                    String _elem794; // optional
                     _elem794 = iprot.readString();
                     struct.success.add(_elem794);
                   }
@@ -72557,7 +72557,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list797.size);
             for (int _i798 = 0; _i798 < _list797.size; ++_i798)
             {
-              String _elem799; // required
+              String _elem799; // optional
               _elem799 = iprot.readString();
               struct.success.add(_elem799);
             }
@@ -73730,7 +73730,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Partition>(_list800.size);
                   for (int _i801 = 0; _i801 < _list800.size; ++_i801)
                   {
-                    Partition _elem802; // required
+                    Partition _elem802; // optional
                     _elem802 = new Partition();
                     _elem802.read(iprot);
                     struct.success.add(_elem802);
@@ -73850,7 +73850,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Partition>(_list805.size);
             for (int _i806 = 0; _i806 < _list805.size; ++_i806)
             {
-              Partition _elem807; // required
+              Partition _elem807; // optional
               _elem807 = new Partition();
               _elem807.read(iprot);
               struct.success.add(_elem807);
@@ -75024,7 +75024,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<PartitionSpec>(_list808.size);
                   for (int _i809 = 0; _i809 < _list808.size; ++_i809)
                   {
-                    PartitionSpec _elem810; // required
+                    PartitionSpec _elem810; // optional
                     _elem810 = new PartitionSpec();
                     _elem810.read(iprot);
                     struct.success.add(_elem810);
@@ -75144,7 +75144,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<PartitionSpec>(_list813.size);
             for (int _i814 = 0; _i814 < _list813.size; ++_i814)
             {
-              PartitionSpec _elem815; // required
+              PartitionSpec _elem815; // optional
               _elem815 = new PartitionSpec();
               _elem815.read(iprot);
               struct.success.add(_elem815);
@@ -76602,7 +76602,7 @@ public class ThriftHiveMetastore {
                   struct.names = new ArrayList<String>(_list816.size);
                   for (int _i817 = 0; _i817 < _list816.size; ++_i817)
                   {
-                    String _elem818; // required
+                    String _elem818; // optional
                     _elem818 = iprot.readString();
                     struct.names.add(_elem818);
                   }
@@ -76711,7 +76711,7 @@ public class ThriftHiveMetastore {
             struct.names = new ArrayList<String>(_list821.size);
             for (int _i822 = 0; _i822 < _list821.size; ++_i822)
             {
-              String _elem823; // required
+              String _elem823; // optional
               _elem823 = iprot.readString();
               struct.names.add(_elem823);
             }
@@ -77204,7 +77204,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Partition>(_list824.size);
                   for (int _i825 = 0; _i825 < _list824.size; ++_i825)
                   {
-                    Partition _elem826; // required
+                    Partition _elem826; // optional
                     _elem826 = new Partition();
                     _elem826.read(iprot);
                     struct.success.add(_elem826);
@@ -77324,7 +77324,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Partition>(_list829.size);
             for (int _i830 = 0; _i830 < _list829.size; ++_i830)
             {
-              Partition _elem831; // required
+              Partition _elem831; // optional
               _elem831 = new Partition();
               _elem831.read(iprot);
               struct.success.add(_elem831);
@@ -78881,7 +78881,7 @@ public class ThriftHiveMetastore {
                   struct.new_parts = new ArrayList<Partition>(_list832.size);
                   for (int _i833 = 0; _i833 < _list832.size; ++_i833)
                   {
-                    Partition _elem834; // required
+                    Partition _elem834; // optional
                     _elem834 = new Partition();
                     _elem834.read(iprot);
                     struct.new_parts.add(_elem834);
@@ -78991,7 +78991,7 @@ public class ThriftHiveMetastore {
             struct.new_parts = new ArrayList<Partition>(_list837.size);
             for (int _i838 = 0; _i838 < _list837.size; ++_i838)
             {
-              Partition _elem839; // required
+              Partition _elem839; // optional
               _elem839 = new Partition();
               _elem839.read(iprot);
               struct.new_parts.add(_elem839);
@@ -81197,7 +81197,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list840.size);
                   for (int _i841 = 0; _i841 < _list840.size; ++_i841)
                   {
-                    String _elem842; // required
+                    String _elem842; // optional
                     _elem842 = iprot.readString();
                     struct.part_vals.add(_elem842);
                   }
@@ -81326,7 +81326,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list845.size);
             for (int _i846 = 0; _i846 < _list845.size; ++_i846)
             {
-              String _elem847; // required
+              String _elem847; // optional
               _elem847 = iprot.readString();
               struct.part_vals.add(_elem847);
             }
@@ -82209,7 +82209,7 @@ public class ThriftHiveMetastore {
                   struct.part_vals = new ArrayList<String>(_list848.size);
                   for (int _i849 = 0; _i849 < _list848.size; ++_i849)
                   {
-                    String _elem850; // required
+                    String _elem850; // optional
                     _elem850 = iprot.readString();
                     struct.part_vals.add(_elem850);
                   }
@@ -82305,7 +82305,7 @@ public class ThriftHiveMetastore {
             struct.part_vals = new ArrayList<String>(_list853.size);
             for (int _i854 = 0; _i854 < _list853.size; ++_i854)
             {
-              String _elem855; // required
+              String _elem855; // optional
               _elem855 = iprot.readString();
               struct.part_vals.add(_elem855);
             }
@@ -84469,7 +84469,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list856.size);
                   for (int _i857 = 0; _i857 < _list856.size; ++_i857)
                   {
-                    String _elem858; // required
+                    String _elem858; // optional
                     _elem858 = iprot.readString();
                     struct.success.add(_elem858);
                   }
@@ -84568,7 +84568,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list861.size);
             for (int _i862 = 0; _i862 < _list861.size; ++_i862)
             {
-              String _elem863; // required
+              String _elem863; // optional
               _elem863 = iprot.readString();
               struct.success.add(_elem863);
             }
@@ -94564,7 +94564,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Index>(_list894.size);
                   for (int _i895 = 0; _i895 < _list894.size; ++_i895)
                   {
-                    Index _elem896; // required
+                    Index _elem896; // optional
                     _elem896 = new Index();
                     _elem896.read(iprot);
                     struct.success.add(_elem896);
@@ -94684,7 +94684,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Index>(_list899.size);
             for (int _i900 = 0; _i900 < _list899.size; ++_i900)
             {
-              Index _elem901; // required
+              Index _elem901; // optional
               _elem901 = new Index();
               _elem901.read(iprot);
               struct.success.add(_elem901);
@@ -95673,7 +95673,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list902.size);
                   for (int _i903 = 0; _i903 < _list902.size; ++_i903)
                   {
-                    String _elem904; // required
+                    String _elem904; // optional
                     _elem904 = iprot.readString();
                     struct.success.add(_elem904);
                   }
@@ -95772,7 +95772,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list907.size);
             for (int _i908 = 0; _i908 < _list907.size; ++_i908)
             {
-              String _elem909; // required
+              String _elem909; // optional
               _elem909 = iprot.readString();
               struct.success.add(_elem909);
             }
@@ -111516,7 +111516,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list910.size);
                   for (int _i911 = 0; _i911 < _list910.size; ++_i911)
                   {
-                    String _elem912; // required
+                    String _elem912; // optional
                     _elem912 = iprot.readString();
                     struct.success.add(_elem912);
                   }
@@ -111615,7 +111615,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list915.size);
             for (int _i916 = 0; _i916 < _list915.size; ++_i916)
             {
-              String _elem917; // required
+              String _elem917; // optional
               _elem917 = iprot.readString();
               struct.success.add(_elem917);
             }
@@ -114964,7 +114964,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list918.size);
                   for (int _i919 = 0; _i919 < _list918.size; ++_i919)
                   {
-                    String _elem920; // required
+                    String _elem920; // optional
                     _elem920 = iprot.readString();
                     struct.success.add(_elem920);
                   }
@@ -115063,7 +115063,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list923.size);
             for (int _i924 = 0; _i924 < _list923.size; ++_i924)
             {
-              String _elem925; // required
+              String _elem925; // optional
               _elem925 = iprot.readString();
               struct.success.add(_elem925);
             }
@@ -118360,7 +118360,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<Role>(_list926.size);
                   for (int _i927 = 0; _i927 < _list926.size; ++_i927)
                   {
-                    Role _elem928; // required
+                    Role _elem928; // optional
                     _elem928 = new Role();
                     _elem928.read(iprot);
                     struct.success.add(_elem928);
@@ -118460,7 +118460,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<Role>(_list931.size);
             for (int _i932 = 0; _i932 < _list931.size; ++_i932)
             {
-              Role _elem933; // required
+              Role _elem933; // optional
               _elem933 = new Role();
               _elem933.read(iprot);
               struct.success.add(_elem933);
@@ -121475,7 +121475,7 @@ public class ThriftHiveMetastore {
                   struct.group_names = new ArrayList<String>(_list934.size);
                   for (int _i935 = 0; _i935 < _list934.size; ++_i935)
                   {
-                    String _elem936; // required
+                    String _elem936; // optional
                     _elem936 = iprot.readString();
                     struct.group_names.add(_elem936);
                   }
@@ -121585,7 +121585,7 @@ public class ThriftHiveMetastore {
             struct.group_names = new ArrayList<String>(_list939.size);
             for (int _i940 = 0; _i940 < _list939.size; ++_i940)
             {
-              String _elem941; // required
+              String _elem941; // optional
               _elem941 = iprot.readString();
               struct.group_names.add(_elem941);
             }
@@ -123049,7 +123049,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<HiveObjectPrivilege>(_list942.size);
                   for (int _i943 = 0; _i943 < _list942.size; ++_i943)
                   {
-                    HiveObjectPrivilege _elem944; // required
+                    HiveObjectPrivilege _elem944; // optional
                     _elem944 = new HiveObjectPrivilege();
                     _elem944.read(iprot);
                     struct.success.add(_elem944);
@@ -123149,7 +123149,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<HiveObjectPrivilege>(_list947.size);
             for (int _i948 = 0; _i948 < _list947.size; ++_i948)
             {
-              HiveObjectPrivilege _elem949; // required
+              HiveObjectPrivilege _elem949; // optional
               _elem949 = new HiveObjectPrivilege();
               _elem949.read(iprot);
               struct.success.add(_elem949);
@@ -126061,7 +126061,7 @@ public class ThriftHiveMetastore {
                   struct.group_names = new ArrayList<String>(_list950.size);
                   for (int _i951 = 0; _i951 < _list950.size; ++_i951)
                   {
-                    String _elem952; // required
+                    String _elem952; // optional
                     _elem952 = iprot.readString();
                     struct.group_names.add(_elem952);
                   }
@@ -126155,7 +126155,7 @@ public class ThriftHiveMetastore {
             struct.group_names = new ArrayList<String>(_list955.size);
             for (int _i956 = 0; _i956 < _list955.size; ++_i956)
             {
-              String _elem957; // required
+              String _elem957; // optional
               _elem957 = iprot.readString();
               struct.group_names.add(_elem957);
             }
@@ -126567,7 +126567,7 @@ public class ThriftHiveMetastore {
                   struct.success = new ArrayList<String>(_list958.size);
                   for (int _i959 = 0; _i959 < _list958.size; ++_i959)
                   {
-                    String _elem960; // required
+                    String _elem960; // optional
                     _elem960 = iprot.readString();
                     struct.success.add(_elem960);
                   }
@@ -126666,7 +126666,7 @@ public class ThriftHiveMetastore {
             struct.success = new ArrayList<String>(_list963.size);
             for (int _i964 = 0; _i964 < _list963.size; ++_i964)
             {
-              String _elem965; // required
+              String _elem965; // optional
               _elem965 = iprot.readString();
               struct.success.add(_elem965);
             }

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java Tue Apr 14 20:47:29 2015
@@ -618,7 +618,7 @@ public class Type implements org.apache.
                 struct.fields = new ArrayList<FieldSchema>(_list0.size);
                 for (int _i1 = 0; _i1 < _list0.size; ++_i1)
                 {
-                  FieldSchema _elem2; // required
+                  FieldSchema _elem2; // optional
                   _elem2 = new FieldSchema();
                   _elem2.read(iprot);
                   struct.fields.add(_elem2);
@@ -749,7 +749,7 @@ public class Type implements org.apache.
           struct.fields = new ArrayList<FieldSchema>(_list5.size);
           for (int _i6 = 0; _i6 < _list5.size; ++_i6)
           {
-            FieldSchema _elem7; // required
+            FieldSchema _elem7; // optional
             _elem7 = new FieldSchema();
             _elem7.read(iprot);
             struct.fields.add(_elem7);

Modified: hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php Tue Apr 14 20:47:29 2015
@@ -6074,6 +6074,220 @@ class DecimalColumnStatsData {
 
 }
 
+class Date {
+  static $_TSPEC;
+
+  public $daysSinceEpoch = null;
+
+  public function __construct($vals=null) {
+    if (!isset(self::$_TSPEC)) {
+      self::$_TSPEC = array(
+        1 => array(
+          'var' => 'daysSinceEpoch',
+          'type' => TType::I64,
+          ),
+        );
+    }
+    if (is_array($vals)) {
+      if (isset($vals['daysSinceEpoch'])) {
+        $this->daysSinceEpoch = $vals['daysSinceEpoch'];
+      }
+    }
+  }
+
+  public function getName() {
+    return 'Date';
+  }
+
+  public function read($input)
+  {
+    $xfer = 0;
+    $fname = null;
+    $ftype = 0;
+    $fid = 0;
+    $xfer += $input->readStructBegin($fname);
+    while (true)
+    {
+      $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+      if ($ftype == TType::STOP) {
+        break;
+      }
+      switch ($fid)
+      {
+        case 1:
+          if ($ftype == TType::I64) {
+            $xfer += $input->readI64($this->daysSinceEpoch);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
+        default:
+          $xfer += $input->skip($ftype);
+          break;
+      }
+      $xfer += $input->readFieldEnd();
+    }
+    $xfer += $input->readStructEnd();
+    return $xfer;
+  }
+
+  public function write($output) {
+    $xfer = 0;
+    $xfer += $output->writeStructBegin('Date');
+    if ($this->daysSinceEpoch !== null) {
+      $xfer += $output->writeFieldBegin('daysSinceEpoch', TType::I64, 1);
+      $xfer += $output->writeI64($this->daysSinceEpoch);
+      $xfer += $output->writeFieldEnd();
+    }
+    $xfer += $output->writeFieldStop();
+    $xfer += $output->writeStructEnd();
+    return $xfer;
+  }
+
+}
+
+class DateColumnStatsData {
+  static $_TSPEC;
+
+  public $lowValue = null;
+  public $highValue = null;
+  public $numNulls = null;
+  public $numDVs = null;
+
+  public function __construct($vals=null) {
+    if (!isset(self::$_TSPEC)) {
+      self::$_TSPEC = array(
+        1 => array(
+          'var' => 'lowValue',
+          'type' => TType::STRUCT,
+          'class' => '\metastore\Date',
+          ),
+        2 => array(
+          'var' => 'highValue',
+          'type' => TType::STRUCT,
+          'class' => '\metastore\Date',
+          ),
+        3 => array(
+          'var' => 'numNulls',
+          'type' => TType::I64,
+          ),
+        4 => array(
+          'var' => 'numDVs',
+          'type' => TType::I64,
+          ),
+        );
+    }
+    if (is_array($vals)) {
+      if (isset($vals['lowValue'])) {
+        $this->lowValue = $vals['lowValue'];
+      }
+      if (isset($vals['highValue'])) {
+        $this->highValue = $vals['highValue'];
+      }
+      if (isset($vals['numNulls'])) {
+        $this->numNulls = $vals['numNulls'];
+      }
+      if (isset($vals['numDVs'])) {
+        $this->numDVs = $vals['numDVs'];
+      }
+    }
+  }
+
+  public function getName() {
+    return 'DateColumnStatsData';
+  }
+
+  public function read($input)
+  {
+    $xfer = 0;
+    $fname = null;
+    $ftype = 0;
+    $fid = 0;
+    $xfer += $input->readStructBegin($fname);
+    while (true)
+    {
+      $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+      if ($ftype == TType::STOP) {
+        break;
+      }
+      switch ($fid)
+      {
+        case 1:
+          if ($ftype == TType::STRUCT) {
+            $this->lowValue = new \metastore\Date();
+            $xfer += $this->lowValue->read($input);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
+        case 2:
+          if ($ftype == TType::STRUCT) {
+            $this->highValue = new \metastore\Date();
+            $xfer += $this->highValue->read($input);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
+        case 3:
+          if ($ftype == TType::I64) {
+            $xfer += $input->readI64($this->numNulls);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
+        case 4:
+          if ($ftype == TType::I64) {
+            $xfer += $input->readI64($this->numDVs);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
+        default:
+          $xfer += $input->skip($ftype);
+          break;
+      }
+      $xfer += $input->readFieldEnd();
+    }
+    $xfer += $input->readStructEnd();
+    return $xfer;
+  }
+
+  public function write($output) {
+    $xfer = 0;
+    $xfer += $output->writeStructBegin('DateColumnStatsData');
+    if ($this->lowValue !== null) {
+      if (!is_object($this->lowValue)) {
+        throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+      }
+      $xfer += $output->writeFieldBegin('lowValue', TType::STRUCT, 1);
+      $xfer += $this->lowValue->write($output);
+      $xfer += $output->writeFieldEnd();
+    }
+    if ($this->highValue !== null) {
+      if (!is_object($this->highValue)) {
+        throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+      }
+      $xfer += $output->writeFieldBegin('highValue', TType::STRUCT, 2);
+      $xfer += $this->highValue->write($output);
+      $xfer += $output->writeFieldEnd();
+    }
+    if ($this->numNulls !== null) {
+      $xfer += $output->writeFieldBegin('numNulls', TType::I64, 3);
+      $xfer += $output->writeI64($this->numNulls);
+      $xfer += $output->writeFieldEnd();
+    }
+    if ($this->numDVs !== null) {
+      $xfer += $output->writeFieldBegin('numDVs', TType::I64, 4);
+      $xfer += $output->writeI64($this->numDVs);
+      $xfer += $output->writeFieldEnd();
+    }
+    $xfer += $output->writeFieldStop();
+    $xfer += $output->writeStructEnd();
+    return $xfer;
+  }
+
+}
+
 class ColumnStatisticsData {
   static $_TSPEC;
 
@@ -6083,6 +6297,7 @@ class ColumnStatisticsData {
   public $stringStats = null;
   public $binaryStats = null;
   public $decimalStats = null;
+  public $dateStats = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -6117,6 +6332,11 @@ class ColumnStatisticsData {
           'type' => TType::STRUCT,
           'class' => '\metastore\DecimalColumnStatsData',
           ),
+        7 => array(
+          'var' => 'dateStats',
+          'type' => TType::STRUCT,
+          'class' => '\metastore\DateColumnStatsData',
+          ),
         );
     }
     if (is_array($vals)) {
@@ -6138,6 +6358,9 @@ class ColumnStatisticsData {
       if (isset($vals['decimalStats'])) {
         $this->decimalStats = $vals['decimalStats'];
       }
+      if (isset($vals['dateStats'])) {
+        $this->dateStats = $vals['dateStats'];
+      }
     }
   }
 
@@ -6208,6 +6431,14 @@ class ColumnStatisticsData {
             $xfer += $input->skip($ftype);
           }
           break;
+        case 7:
+          if ($ftype == TType::STRUCT) {
+            $this->dateStats = new \metastore\DateColumnStatsData();
+            $xfer += $this->dateStats->read($input);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -6269,6 +6500,14 @@ class ColumnStatisticsData {
       $xfer += $this->decimalStats->write($output);
       $xfer += $output->writeFieldEnd();
     }
+    if ($this->dateStats !== null) {
+      if (!is_object($this->dateStats)) {
+        throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+      }
+      $xfer += $output->writeFieldBegin('dateStats', TType::STRUCT, 7);
+      $xfer += $this->dateStats->write($output);
+      $xfer += $output->writeFieldEnd();
+    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;

Modified: hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py Tue Apr 14 20:47:29 2015
@@ -4148,6 +4148,170 @@ class DecimalColumnStatsData:
   def __ne__(self, other):
     return not (self == other)
 
+class Date:
+  """
+  Attributes:
+   - daysSinceEpoch
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I64, 'daysSinceEpoch', None, None, ), # 1
+  )
+
+  def __init__(self, daysSinceEpoch=None,):
+    self.daysSinceEpoch = daysSinceEpoch
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I64:
+          self.daysSinceEpoch = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('Date')
+    if self.daysSinceEpoch is not None:
+      oprot.writeFieldBegin('daysSinceEpoch', TType.I64, 1)
+      oprot.writeI64(self.daysSinceEpoch)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.daysSinceEpoch is None:
+      raise TProtocol.TProtocolException(message='Required field daysSinceEpoch is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class DateColumnStatsData:
+  """
+  Attributes:
+   - lowValue
+   - highValue
+   - numNulls
+   - numDVs
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRUCT, 'lowValue', (Date, Date.thrift_spec), None, ), # 1
+    (2, TType.STRUCT, 'highValue', (Date, Date.thrift_spec), None, ), # 2
+    (3, TType.I64, 'numNulls', None, None, ), # 3
+    (4, TType.I64, 'numDVs', None, None, ), # 4
+  )
+
+  def __init__(self, lowValue=None, highValue=None, numNulls=None, numDVs=None,):
+    self.lowValue = lowValue
+    self.highValue = highValue
+    self.numNulls = numNulls
+    self.numDVs = numDVs
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRUCT:
+          self.lowValue = Date()
+          self.lowValue.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.highValue = Date()
+          self.highValue.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I64:
+          self.numNulls = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I64:
+          self.numDVs = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('DateColumnStatsData')
+    if self.lowValue is not None:
+      oprot.writeFieldBegin('lowValue', TType.STRUCT, 1)
+      self.lowValue.write(oprot)
+      oprot.writeFieldEnd()
+    if self.highValue is not None:
+      oprot.writeFieldBegin('highValue', TType.STRUCT, 2)
+      self.highValue.write(oprot)
+      oprot.writeFieldEnd()
+    if self.numNulls is not None:
+      oprot.writeFieldBegin('numNulls', TType.I64, 3)
+      oprot.writeI64(self.numNulls)
+      oprot.writeFieldEnd()
+    if self.numDVs is not None:
+      oprot.writeFieldBegin('numDVs', TType.I64, 4)
+      oprot.writeI64(self.numDVs)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.numNulls is None:
+      raise TProtocol.TProtocolException(message='Required field numNulls is unset!')
+    if self.numDVs is None:
+      raise TProtocol.TProtocolException(message='Required field numDVs is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
 class ColumnStatisticsData:
   """
   Attributes:
@@ -4157,6 +4321,7 @@ class ColumnStatisticsData:
    - stringStats
    - binaryStats
    - decimalStats
+   - dateStats
   """
 
   thrift_spec = (
@@ -4167,15 +4332,17 @@ class ColumnStatisticsData:
     (4, TType.STRUCT, 'stringStats', (StringColumnStatsData, StringColumnStatsData.thrift_spec), None, ), # 4
     (5, TType.STRUCT, 'binaryStats', (BinaryColumnStatsData, BinaryColumnStatsData.thrift_spec), None, ), # 5
     (6, TType.STRUCT, 'decimalStats', (DecimalColumnStatsData, DecimalColumnStatsData.thrift_spec), None, ), # 6
+    (7, TType.STRUCT, 'dateStats', (DateColumnStatsData, DateColumnStatsData.thrift_spec), None, ), # 7
   )
 
-  def __init__(self, booleanStats=None, longStats=None, doubleStats=None, stringStats=None, binaryStats=None, decimalStats=None,):
+  def __init__(self, booleanStats=None, longStats=None, doubleStats=None, stringStats=None, binaryStats=None, decimalStats=None, dateStats=None,):
     self.booleanStats = booleanStats
     self.longStats = longStats
     self.doubleStats = doubleStats
     self.stringStats = stringStats
     self.binaryStats = binaryStats
     self.decimalStats = decimalStats
+    self.dateStats = dateStats
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -4222,6 +4389,12 @@ class ColumnStatisticsData:
           self.decimalStats.read(iprot)
         else:
           iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.STRUCT:
+          self.dateStats = DateColumnStatsData()
+          self.dateStats.read(iprot)
+        else:
+          iprot.skip(ftype)
       else:
         iprot.skip(ftype)
       iprot.readFieldEnd()
@@ -4256,6 +4429,10 @@ class ColumnStatisticsData:
       oprot.writeFieldBegin('decimalStats', TType.STRUCT, 6)
       self.decimalStats.write(oprot)
       oprot.writeFieldEnd()
+    if self.dateStats is not None:
+      oprot.writeFieldBegin('dateStats', TType.STRUCT, 7)
+      self.dateStats.write(oprot)
+      oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 

Modified: hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb Tue Apr 14 20:47:29 2015
@@ -977,6 +977,47 @@ class DecimalColumnStatsData
   ::Thrift::Struct.generate_accessors self
 end
 
+class Date
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  DAYSSINCEEPOCH = 1
+
+  FIELDS = {
+    DAYSSINCEEPOCH => {:type => ::Thrift::Types::I64, :name => 'daysSinceEpoch'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field daysSinceEpoch is unset!') unless @daysSinceEpoch
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class DateColumnStatsData
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  LOWVALUE = 1
+  HIGHVALUE = 2
+  NUMNULLS = 3
+  NUMDVS = 4
+
+  FIELDS = {
+    LOWVALUE => {:type => ::Thrift::Types::STRUCT, :name => 'lowValue', :class => ::Date, :optional => true},
+    HIGHVALUE => {:type => ::Thrift::Types::STRUCT, :name => 'highValue', :class => ::Date, :optional => true},
+    NUMNULLS => {:type => ::Thrift::Types::I64, :name => 'numNulls'},
+    NUMDVS => {:type => ::Thrift::Types::I64, :name => 'numDVs'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field numNulls is unset!') unless @numNulls
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field numDVs is unset!') unless @numDVs
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class ColumnStatisticsData < ::Thrift::Union
   include ::Thrift::Struct_Union
   class << self
@@ -1003,6 +1044,10 @@ class ColumnStatisticsData < ::Thrift::U
     def decimalStats(val)
       ColumnStatisticsData.new(:decimalStats, val)
     end
+
+    def dateStats(val)
+      ColumnStatisticsData.new(:dateStats, val)
+    end
   end
 
   BOOLEANSTATS = 1
@@ -1011,6 +1056,7 @@ class ColumnStatisticsData < ::Thrift::U
   STRINGSTATS = 4
   BINARYSTATS = 5
   DECIMALSTATS = 6
+  DATESTATS = 7
 
   FIELDS = {
     BOOLEANSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'booleanStats', :class => ::BooleanColumnStatsData},
@@ -1018,7 +1064,8 @@ class ColumnStatisticsData < ::Thrift::U
     DOUBLESTATS => {:type => ::Thrift::Types::STRUCT, :name => 'doubleStats', :class => ::DoubleColumnStatsData},
     STRINGSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'stringStats', :class => ::StringColumnStatsData},
     BINARYSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'binaryStats', :class => ::BinaryColumnStatsData},
-    DECIMALSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'decimalStats', :class => ::DecimalColumnStatsData}
+    DECIMALSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'decimalStats', :class => ::DecimalColumnStatsData},
+    DATESTATS => {:type => ::Thrift::Types::STRUCT, :name => 'dateStats', :class => ::DateColumnStatsData}
   }
 
   def struct_fields; FIELDS; end

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java Tue Apr 14 20:47:29 2015
@@ -27,6 +27,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Date;
+import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.Decimal;
 import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
@@ -103,6 +105,13 @@ public class StatObjectConverter {
            binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null,
            binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null,
            binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null);
+     } else if (statsObj.getStatsData().isSetDateStats()) {
+       DateColumnStatsData dateStats = statsObj.getStatsData().getDateStats();
+       mColStats.setDateStats(
+           dateStats.isSetNumNulls() ? dateStats.getNumNulls() : null,
+           dateStats.isSetNumDVs() ? dateStats.getNumDVs() : null,
+           dateStats.isSetLowValue() ? dateStats.getLowValue().getDaysSinceEpoch() : null,
+           dateStats.isSetHighValue() ? dateStats.getHighValue().getDaysSinceEpoch() : null);
      }
      return mColStats;
   }
@@ -258,6 +267,19 @@ public class StatObjectConverter {
       }
       decimalStats.setNumDVs(mStatsObj.getNumDVs());
       colStatsData.setDecimalStats(decimalStats);
+    } else if (colType.equals("date")) {
+      DateColumnStatsData dateStats = new DateColumnStatsData();
+      dateStats.setNumNulls(mStatsObj.getNumNulls());
+      Long highValue = mStatsObj.getLongHighValue();
+      if (highValue != null) {
+        dateStats.setHighValue(new Date(highValue));
+      }
+      Long lowValue = mStatsObj.getLongLowValue();
+      if (lowValue != null) {
+        dateStats.setLowValue(new Date(lowValue));
+      }
+      dateStats.setNumDVs(mStatsObj.getNumDVs());
+      colStatsData.setDateStats(dateStats);
     }
     statsObj.setStatsData(colStatsData);
     return statsObj;
@@ -330,6 +352,13 @@ public class StatObjectConverter {
           binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null,
           binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null,
           binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null);
+    } else if (statsObj.getStatsData().isSetDateStats()) {
+      DateColumnStatsData dateStats = statsObj.getStatsData().getDateStats();
+      mColStats.setDateStats(
+          dateStats.isSetNumNulls() ? dateStats.getNumNulls() : null,
+          dateStats.isSetNumDVs() ? dateStats.getNumDVs() : null,
+          dateStats.isSetLowValue() ? dateStats.getLowValue().getDaysSinceEpoch() : null,
+          dateStats.isSetHighValue() ? dateStats.getHighValue().getDaysSinceEpoch() : null);
     }
     return mColStats;
   }
@@ -397,6 +426,13 @@ public class StatObjectConverter {
       }
       decimalStats.setNumDVs(mStatsObj.getNumDVs());
       colStatsData.setDecimalStats(decimalStats);
+    } else if (colType.equals("date")) {
+      DateColumnStatsData dateStats = new DateColumnStatsData();
+      dateStats.setNumNulls(mStatsObj.getNumNulls());
+      dateStats.setHighValue(new Date(mStatsObj.getLongHighValue()));
+      dateStats.setLowValue(new Date(mStatsObj.getLongLowValue()));
+      dateStats.setNumDVs(mStatsObj.getNumDVs());
+      colStatsData.setDateStats(dateStats);
     }
     statsObj.setStatsData(colStatsData);
     return statsObj;
@@ -473,6 +509,17 @@ public class StatObjectConverter {
       }
       decimalStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
       data.setDecimalStats(decimalStats);
+    } else if (colType.equals("date")) {
+      DateColumnStatsData dateStats = new DateColumnStatsData();
+      dateStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+      if (lhigh != null) {
+        dateStats.setHighValue(new Date(MetaStoreDirectSql.extractSqlLong(lhigh)));
+      }
+      if (llow != null) {
+        dateStats.setLowValue(new Date(MetaStoreDirectSql.extractSqlLong(llow)));
+      }
+      dateStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
+      data.setDateStats(dateStats);
     }
   }
 

Modified: hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java (original)
+++ hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java Tue Apr 14 20:47:29 2015
@@ -200,6 +200,14 @@ public class MPartitionColumnStatistics
     this.maxColLen = maxColLen;
     this.avgColLen = avgColLen;
   }
+
+  public void setDateStats(Long numNulls, Long numNDVs, Long lowValue, Long highValue) {
+    this.numNulls = numNulls;
+    this.numDVs = numNDVs;
+    this.longLowValue = lowValue;
+    this.longHighValue = highValue;
+  }
+
   public Long getLongLowValue() {
     return longLowValue;
   }

Modified: hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java (original)
+++ hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java Tue Apr 14 20:47:29 2015
@@ -191,6 +191,13 @@ public class MTableColumnStatistics {
     this.avgColLen = avgColLen;
   }
 
+  public void setDateStats(Long numNulls, Long numNDVs, Long lowValue, Long highValue) {
+    this.numNulls = numNulls;
+    this.numDVs = numNDVs;
+    this.longLowValue = lowValue;
+    this.longHighValue = highValue;
+  }
+
   public Long getLongLowValue() {
     return longLowValue;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java Tue Apr 14 20:47:29 2015
@@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Date;
+import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.Decimal;
 import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
@@ -51,11 +53,13 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.ColumnStatsWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
@@ -187,6 +191,23 @@ public class ColumnStatsTask extends Tas
     }
   }
 
+  private void unpackDateStats(ObjectInspector oi, Object o, String fName,
+      ColumnStatisticsObj statsObj) {
+    if (fName.equals("countnulls")) {
+      long v = ((LongObjectInspector) oi).get(o);
+      statsObj.getStatsData().getDateStats().setNumNulls(v);
+    } else if (fName.equals("numdistinctvalues")) {
+      long v = ((LongObjectInspector) oi).get(o);
+      statsObj.getStatsData().getDateStats().setNumDVs(v);
+    } else if (fName.equals("max")) {
+      DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+      statsObj.getStatsData().getDateStats().setHighValue(new Date(v.getDays()));
+    } else if (fName.equals("min")) {
+      DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+      statsObj.getStatsData().getDateStats().setLowValue(new Date(v.getDays()));
+    }
+  }
+
   private void unpackPrimitiveObject (ObjectInspector oi, Object o, String fieldName,
       ColumnStatisticsObj statsObj) {
     if (o == null) {
@@ -222,6 +243,10 @@ public class ColumnStatsTask extends Tas
         DecimalColumnStatsData decimalStats = new DecimalColumnStatsData();
         statsData.setDecimalStats(decimalStats);
         statsObj.setStatsData(statsData);
+      } else if (s.equalsIgnoreCase("date")) {
+        DateColumnStatsData dateStats = new DateColumnStatsData();
+        statsData.setDateStats(dateStats);
+        statsObj.setStatsData(statsData);
       }
     } else {
       // invoke the right unpack method depending on data type of the column
@@ -237,6 +262,8 @@ public class ColumnStatsTask extends Tas
         unpackBinaryStats(oi, o, fieldName, statsObj);
       } else if (statsObj.getStatsData().isSetDecimalStats()) {
         unpackDecimalStats(oi, o, fieldName, statsObj);
+      } else if (statsObj.getStatsData().isSetDateStats()) {
+        unpackDateStats(oi, o, fieldName, statsObj);
       }
     }
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java Tue Apr 14 20:47:29 2015
@@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Date;
+import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.Decimal;
 import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
@@ -50,6 +52,7 @@ import org.apache.hadoop.hive.ql.plan.Co
 import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
  * ColumnStatsUpdateTask implementation. For example, ALTER TABLE src_stat
@@ -235,6 +238,28 @@ public class ColumnStatsUpdateTask exten
       }
       statsData.setDecimalStats(decimalStats);
       statsObj.setStatsData(statsData);
+    } else if (columnType.equalsIgnoreCase("date")) {
+      DateColumnStatsData dateStats = new DateColumnStatsData();
+      Map<String, String> mapProp = work.getMapProp();
+      for (Entry<String, String> entry : mapProp.entrySet()) {
+        String fName = entry.getKey();
+        String value = entry.getValue();
+        if (fName.equals("numNulls")) {
+          dateStats.setNumNulls(Long.parseLong(value));
+        } else if (fName.equals("numDVs")) {
+          dateStats.setNumDVs(Long.parseLong(value));
+        } else if (fName.equals("lowValue")) {
+          // Date high/low value is stored as long in stats DB, but allow users to set high/low
+          // value using either date format (yyyy-mm-dd) or numeric format (days since epoch)
+          dateStats.setLowValue(readDateValue(value));
+        } else if (fName.equals("highValue")) {
+          dateStats.setHighValue(readDateValue(value));
+        } else {
+          throw new SemanticException("Unknown stat");
+        }
+      }
+      statsData.setDateStats(dateStats);
+      statsObj.setStatsData(statsData);
     } else {
       throw new SemanticException("Unsupported type");
     }
@@ -302,4 +327,16 @@ public class ColumnStatsUpdateTask exten
   public String getName() {
     return "COLUMNSTATS UPDATE TASK";
   }
+
+  private Date readDateValue(String dateStr) {
+    // try either yyyy-mm-dd, or integer representing days since epoch
+    try {
+      DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(dateStr));
+      return new Date(writableVal.getDays());
+    } catch (IllegalArgumentException err) {
+      // Fallback to integer parsing
+      LOG.debug("Reading date value as days since epoch: " + dateStr);
+      return new Date(Long.parseLong(dateStr));
+    }
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java Tue Apr 14 20:47:29 2015
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.Decimal;
 import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
@@ -52,6 +53,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.DescTableDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.ShowIndexesDesc;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 
 /**
@@ -145,9 +147,21 @@ public final class MetaDataFormatUtils {
   }
 
   private static String convertToString(Decimal val) {
+    if (val == null) {
+      return "";
+    }
     return HiveDecimal.create(new BigInteger(val.getUnscaled()), val.getScale()).toString();
   }
 
+  private static String convertToString(org.apache.hadoop.hive.metastore.api.Date val) {
+    if (val == null) {
+      return "";
+    }
+
+    DateWritable writableValue = new DateWritable((int) val.getDaysSinceEpoch());
+    return writableValue.toString();
+  }
+
   private static ColumnStatisticsObj getColumnStatisticsObject(String colName,
       String colType, List<ColumnStatisticsObj> colStats) {
     if (colStats != null && !colStats.isEmpty()) {
@@ -196,6 +210,12 @@ public final class MetaDataFormatUtils {
           LongColumnStatsData lcsd = csd.getLongStats();
           appendColumnStatsNoFormatting(colBuffer, lcsd.getLowValue(), lcsd.getHighValue(),
               lcsd.getNumNulls(), lcsd.getNumDVs(), "", "", "", "");
+        } else if (csd.isSetDateStats()) {
+          DateColumnStatsData dcsd = csd.getDateStats();
+          appendColumnStatsNoFormatting(colBuffer,
+              convertToString(dcsd.getLowValue()),
+              convertToString(dcsd.getHighValue()),
+              dcsd.getNumNulls(), dcsd.getNumDVs(), "", "", "", "");
         }
       } else {
         appendColumnStatsNoFormatting(colBuffer, "", "", "", "", "", "", "", "");
@@ -440,6 +460,12 @@ public final class MetaDataFormatUtils {
           LongColumnStatsData lcsd = csd.getLongStats();
           appendColumnStats(tableInfo, lcsd.getLowValue(), lcsd.getHighValue(), lcsd.getNumNulls(),
               lcsd.getNumDVs(), "", "", "", "");
+        } else if (csd.isSetDateStats()) {
+          DateColumnStatsData dcsd = csd.getDateStats();
+          appendColumnStats(tableInfo,
+              convertToString(dcsd.getLowValue()),
+              convertToString(dcsd.getHighValue()),
+              dcsd.getNumNulls(), dcsd.getNumDVs(), "", "", "", "");
         }
       } else {
         appendColumnStats(tableInfo, "", "", "", "", "", "", "", "");

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java Tue Apr 14 20:47:29 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -86,9 +87,11 @@ public class GenericUDAFComputeStats ext
       return new GenericUDAFBinaryStatsEvaluator();
     case DECIMAL:
       return new GenericUDAFDecimalStatsEvaluator();
+    case DATE:
+      return new GenericUDAFDateStatsEvaluator();
     default:
       throw new UDFArgumentTypeException(0,
-          "Only integer/long/timestamp/float/double/string/binary/boolean/decimal type argument " +
+          "Only integer/long/timestamp/date/float/double/string/binary/boolean/decimal type argument " +
           "is accepted but "
           + parameters[0].getTypeName() + " is passed.");
     }
@@ -1314,4 +1317,73 @@ public class GenericUDAFComputeStats ext
       ((NumericStatsAgg)agg).reset("Decimal");
     }
   }
+
+  /**
+   * GenericUDAFDateStatsEvaluator
+   * High/low value will be saved in stats DB as long value representing days since epoch.
+   */
+  public static class GenericUDAFDateStatsEvaluator
+      extends GenericUDAFNumericStatsEvaluator<DateWritable, DateObjectInspector> {
+
+    @Override
+    protected DateObjectInspector getValueObjectInspector() {
+      return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
+    }
+
+    @AggregationType(estimable = true)
+    public class DateStatsAgg extends NumericStatsAgg {
+      @Override
+      public int estimate() {
+        JavaDataModel model = JavaDataModel.get();
+        return super.estimate() + model.primitive2() * 2;
+      }
+
+      @Override
+      protected void update(Object p, PrimitiveObjectInspector inputOI) {
+        // DateWritable is mutable, DateStatsAgg needs its own copy
+        DateWritable v = new DateWritable((DateWritable) inputOI.getPrimitiveWritableObject(p));
+
+        //Update min counter if new value is less than min seen so far
+        if (min == null || v.compareTo(min) < 0) {
+          min = v;
+        }
+        //Update max counter if new value is greater than max seen so far
+        if (max == null || v.compareTo(max) > 0) {
+          max = v;
+        }
+        // Add value to NumDistinctValue Estimator
+        numDV.addToEstimator(v.getDays());
+      }
+
+      @Override
+      protected void updateMin(Object minValue, DateObjectInspector minFieldOI) {
+        if ((minValue != null) && (min == null ||
+            min.compareTo(minFieldOI.getPrimitiveWritableObject(minValue)) > 0)) {
+          // DateWritable is mutable, DateStatsAgg needs its own copy
+          min = new DateWritable(minFieldOI.getPrimitiveWritableObject(minValue));
+        }
+      }
+
+      @Override
+      protected void updateMax(Object maxValue, DateObjectInspector maxFieldOI) {
+        if ((maxValue != null) && (max == null ||
+            max.compareTo(maxFieldOI.getPrimitiveWritableObject(maxValue)) < 0)) {
+          // DateWritable is mutable, DateStatsAgg needs its own copy
+          max = new DateWritable(maxFieldOI.getPrimitiveWritableObject(maxValue));
+        }
+      }
+    };
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      AggregationBuffer result = new DateStatsAgg();
+      reset(result);
+      return result;
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      ((NumericStatsAgg)agg).reset("Date");
+    }
+  }
 }

Added: hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q?rev=1673553&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q Tue Apr 14 20:47:29 2015
@@ -0,0 +1,28 @@
+
+create table tab_date (
+  origin_city_name string,
+  dest_city_name string,
+  fl_date date,
+  arr_delay float,
+  fl_num int
+);
+
+-- insert some data
+load data local inpath '../../data/files/flights_join.txt' overwrite into table tab_date;
+
+select count(*) from tab_date;
+
+-- compute statistical summary of data
+select compute_stats(fl_date, 16) from tab_date;
+
+explain
+analyze table tab_date compute statistics for columns fl_date;
+
+analyze table tab_date compute statistics for columns fl_date;
+
+describe formatted tab_date fl_date;
+
+-- Update stats manually. Try both yyyy-mm-dd and integer value for high/low value
+alter table tab_date update statistics for column fl_date set ('numDVs'='19', 'highValue'='2015-01-01', 'lowValue'='0');
+
+describe formatted tab_date fl_date;



Mime
View raw message