hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1507713 [1/6] - in /hive/trunk: data/files/ jdbc/src/java/org/apache/hadoop/hive/jdbc/ jdbc/src/java/org/apache/hive/jdbc/ jdbc/src/test/org/apache/hadoop/hive/jdbc/ jdbc/src/test/org/apache/hive/jdbc/ ql/src/gen/protobuf/gen-java/org/apac...
Date Sat, 27 Jul 2013 21:20:06 GMT
Author: hashutosh
Date: Sat Jul 27 21:20:03 2013
New Revision: 1507713

URL: http://svn.apache.org/r1507713
Log:
HIVE-3472 : add Date data type (Jason Dere via Ashutosh Chauhan)

Added:
    hive/trunk/data/files/flights_join.txt
    hive/trunk/data/files/flights_tiny.txt.1
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/DateColumnStatistics.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java
    hive/trunk/ql/src/test/queries/clientnegative/date_literal1.q
    hive/trunk/ql/src/test/queries/clientnegative/date_literal2.q
    hive/trunk/ql/src/test/queries/clientnegative/date_literal3.q
    hive/trunk/ql/src/test/queries/clientpositive/ctas_date.q
    hive/trunk/ql/src/test/queries/clientpositive/date_1.q
    hive/trunk/ql/src/test/queries/clientpositive/date_2.q
    hive/trunk/ql/src/test/queries/clientpositive/date_3.q
    hive/trunk/ql/src/test/queries/clientpositive/date_4.q
    hive/trunk/ql/src/test/queries/clientpositive/date_comparison.q
    hive/trunk/ql/src/test/queries/clientpositive/date_join1.q
    hive/trunk/ql/src/test/queries/clientpositive/date_serde.q
    hive/trunk/ql/src/test/queries/clientpositive/date_udf.q
    hive/trunk/ql/src/test/queries/clientpositive/partition_date.q
    hive/trunk/ql/src/test/queries/clientpositive/union_date.q
    hive/trunk/ql/src/test/results/clientnegative/date_literal1.q.out
    hive/trunk/ql/src/test/results/clientnegative/date_literal2.q.out
    hive/trunk/ql/src/test/results/clientnegative/date_literal3.q.out
    hive/trunk/ql/src/test/results/clientpositive/ctas_date.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_1.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_3.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_4.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_comparison.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_join1.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_serde.q.out
    hive/trunk/ql/src/test/results/clientpositive/date_udf.q.out
    hive/trunk/ql/src/test/results/clientpositive/partition_date.q.out
    hive/trunk/ql/src/test/results/clientpositive/union_date.q.out
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
    hive/trunk/service/README.txt
Removed:
    hive/trunk/ql/src/test/queries/clientnegative/invalid_t_create1.q
    hive/trunk/ql/src/test/results/clientnegative/invalid_t_create1.q.out
Modified:
    hive/trunk/data/files/datatypes.txt
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java
    hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
    hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
    hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
    hive/trunk/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
    hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java
    hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hive/trunk/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatisticsImpl.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
    hive/trunk/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
    hive/trunk/ql/src/test/queries/clientnegative/invalid_t_alter1.q
    hive/trunk/ql/src/test/queries/clientnegative/invalid_t_alter2.q
    hive/trunk/ql/src/test/queries/clientnegative/invalid_t_transform.q
    hive/trunk/ql/src/test/queries/clientnegative/serde_regex.q
    hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
    hive/trunk/ql/src/test/results/clientnegative/invalid_t_alter1.q.out
    hive/trunk/ql/src/test/results/clientnegative/invalid_t_alter2.q.out
    hive/trunk/ql/src/test/results/clientnegative/invalid_t_create2.q.out
    hive/trunk/ql/src/test/results/clientnegative/invalid_t_transform.q.out
    hive/trunk/ql/src/test/results/clientnegative/serde_regex.q.out
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
    hive/trunk/service/if/TCLIService.thrift
    hive/trunk/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
    hive/trunk/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
    hive/trunk/service/src/gen/thrift/gen-cpp/TCLIService_types.h
    hive/trunk/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
    hive/trunk/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
    hive/trunk/service/src/gen/thrift/gen-py/TCLIService/constants.py
    hive/trunk/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
    hive/trunk/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
    hive/trunk/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
    hive/trunk/service/src/java/org/apache/hive/service/cli/ColumnValue.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/Type.java

Modified: hive/trunk/data/files/datatypes.txt
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/datatypes.txt?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/data/files/datatypes.txt (original)
+++ hive/trunk/data/files/datatypes.txt Sat Jul 27 21:20:03 2013
@@ -1,3 +1,3 @@
-\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N
--1false-1.1\N\N\N-1-1-1.0-1\N\N\N\N\N
-1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789123456789.0123456YWJjZA==
+\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N
+-1false-1.1\N\N\N-1-1-1.0-1\N\N\N\N\N\N
+1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789123456789.0123456YWJjZA==2013-01-01

Added: hive/trunk/data/files/flights_join.txt
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/flights_join.txt?rev=1507713&view=auto
==============================================================================
--- hive/trunk/data/files/flights_join.txt (added)
+++ hive/trunk/data/files/flights_join.txt Sat Jul 27 21:20:03 2013
@@ -0,0 +1,20 @@
+BaltimoreNew York2010-10-20-30.01064
+BaltimoreNew York2010-10-2123.01142
+BaltimoreNew York2010-10-226.01599
+ChicagoNew York2010-10-2342.0361
+ChicagoNew York2010-10-2424.0897
+ChicagoNew York2010-10-2515.01531
+ChicagoNew York2010-10-26-6.01610
+ChicagoNew York2010-10-27-2.03198
+BaltimoreNew York2010-10-2817.01064
+BaltimoreNew York2010-10-29105.01142
+BaltimoreNew York2000-11-20-30.01064
+BaltimoreNew York2000-11-2123.01142
+BaltimoreNew York2000-11-226.01599
+ChicagoNew York2000-11-2342.0361
+ChicagoNew York2000-11-2424.0897
+ChicagoNew York2000-11-2515.01531
+ChicagoNew York2000-11-26-6.01610
+ChicagoNew York2000-11-27-2.03198
+BaltimoreNew York2000-11-2817.01064
+BaltimoreNew York2000-11-28105.01142

Added: hive/trunk/data/files/flights_tiny.txt.1
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/flights_tiny.txt.1?rev=1507713&view=auto
==============================================================================
--- hive/trunk/data/files/flights_tiny.txt.1 (added)
+++ hive/trunk/data/files/flights_tiny.txt.1 Sat Jul 27 21:20:03 2013
@@ -0,0 +1,137 @@
+BaltimoreNew York2010-10-20-30.01064
+BaltimoreNew York2010-10-2023.01142
+BaltimoreNew York2010-10-206.01599
+ChicagoNew York2010-10-2042.0361
+ChicagoNew York2010-10-2024.0897
+ChicagoNew York2010-10-2015.01531
+ChicagoNew York2010-10-20-6.01610
+ChicagoNew York2010-10-20-2.03198
+BaltimoreNew York2010-10-2117.01064
+BaltimoreNew York2010-10-21105.01142
+BaltimoreNew York2010-10-2128.01599
+ChicagoNew York2010-10-21142.0361
+ChicagoNew York2010-10-2177.0897
+ChicagoNew York2010-10-2153.01531
+ChicagoNew York2010-10-21-5.01610
+ChicagoNew York2010-10-2151.03198
+BaltimoreNew York2010-10-22-12.01064
+BaltimoreNew York2010-10-2254.01142
+BaltimoreNew York2010-10-2218.01599
+ChicagoNew York2010-10-222.0361
+ChicagoNew York2010-10-2224.0897
+ChicagoNew York2010-10-2216.01531
+ChicagoNew York2010-10-22-6.01610
+ChicagoNew York2010-10-22-11.03198
+BaltimoreNew York2010-10-2318.0272
+BaltimoreNew York2010-10-23-10.01805
+BaltimoreNew York2010-10-236.03171
+ChicagoNew York2010-10-233.0384
+ChicagoNew York2010-10-2332.0426
+ChicagoNew York2010-10-231.0650
+ChicagoNew York2010-10-2311.03085
+BaltimoreNew York2010-10-2412.01599
+BaltimoreNew York2010-10-2420.02571
+ChicagoNew York2010-10-2410.0361
+ChicagoNew York2010-10-24113.0897
+ChicagoNew York2010-10-24-5.01531
+ChicagoNew York2010-10-24-17.01610
+ChicagoNew York2010-10-24-3.03198
+BaltimoreNew York2010-10-25-25.01064
+BaltimoreNew York2010-10-2592.01142
+BaltimoreNew York2010-10-25106.01599
+ChicagoNew York2010-10-2531.0361
+ChicagoNew York2010-10-25-1.0897
+ChicagoNew York2010-10-2543.01531
+ChicagoNew York2010-10-256.01610
+ChicagoNew York2010-10-25-16.03198
+BaltimoreNew York2010-10-26-22.01064
+BaltimoreNew York2010-10-26123.01142
+BaltimoreNew York2010-10-2690.01599
+ChicagoNew York2010-10-2612.0361
+ChicagoNew York2010-10-260.0897
+ChicagoNew York2010-10-2629.01531
+ChicagoNew York2010-10-26-17.01610
+ChicagoNew York2010-10-266.03198
+BaltimoreNew York2010-10-27-18.01064
+BaltimoreNew York2010-10-2749.01142
+BaltimoreNew York2010-10-2792.01599
+ChicagoNew York2010-10-27148.0361
+ChicagoNew York2010-10-27-11.0897
+ChicagoNew York2010-10-2770.01531
+ChicagoNew York2010-10-278.01610
+ChicagoNew York2010-10-2721.03198
+BaltimoreNew York2010-10-28-4.01064
+BaltimoreNew York2010-10-28-14.01142
+BaltimoreNew York2010-10-28-14.01599
+ChicagoNew York2010-10-282.0361
+ChicagoNew York2010-10-282.0897
+ChicagoNew York2010-10-28-11.01531
+ChicagoNew York2010-10-283.01610
+ChicagoNew York2010-10-28-18.03198
+BaltimoreNew York2010-10-29-24.01064
+BaltimoreNew York2010-10-2921.01142
+BaltimoreNew York2010-10-29-2.01599
+ChicagoNew York2010-10-29-12.0361
+ChicagoNew York2010-10-29-11.0897
+ChicagoNew York2010-10-2915.01531
+ChicagoNew York2010-10-29-18.01610
+ChicagoNew York2010-10-29-4.03198
+BaltimoreNew York2010-10-3014.0272
+BaltimoreNew York2010-10-30-1.01805
+BaltimoreNew York2010-10-305.03171
+ChicagoNew York2010-10-30-6.0384
+ChicagoNew York2010-10-30-10.0426
+ChicagoNew York2010-10-30-5.0650
+ChicagoNew York2010-10-30-5.03085
+BaltimoreNew York2010-10-31-1.01599
+BaltimoreNew York2010-10-31-14.02571
+ChicagoNew York2010-10-31-25.0361
+ChicagoNew York2010-10-31-18.0897
+ChicagoNew York2010-10-31-4.01531
+ChicagoNew York2010-10-31-22.01610
+ChicagoNew York2010-10-31-15.03198
+ClevelandNew York2010-10-30-23.02018
+ClevelandNew York2010-10-30-12.02932
+ClevelandNew York2010-10-29-4.02630
+ClevelandNew York2010-10-29-19.02646
+ClevelandNew York2010-10-29-12.03014
+ClevelandNew York2010-10-283.02630
+ClevelandNew York2010-10-28-6.02646
+ClevelandNew York2010-10-281.03014
+ClevelandNew York2010-10-2716.02630
+ClevelandNew York2010-10-2727.03014
+ClevelandNew York2010-10-264.02630
+ClevelandNew York2010-10-26-27.02646
+ClevelandNew York2010-10-26-11.02662
+ClevelandNew York2010-10-2613.03014
+ClevelandNew York2010-10-25-4.02630
+ClevelandNew York2010-10-2581.02646
+ClevelandNew York2010-10-2542.03014
+ClevelandNew York2010-10-245.02254
+ClevelandNew York2010-10-24-11.02630
+ClevelandNew York2010-10-24-20.02646
+ClevelandNew York2010-10-24-9.03014
+ClevelandNew York2010-10-23-21.02932
+ClevelandNew York2010-10-221.02630
+ClevelandNew York2010-10-22-25.02646
+ClevelandNew York2010-10-22-3.03014
+ClevelandNew York2010-10-213.02630
+ClevelandNew York2010-10-2129.02646
+ClevelandNew York2010-10-2172.03014
+ClevelandNew York2010-10-20-8.02630
+ClevelandNew York2010-10-20-15.03014
+WashingtonNew York2010-10-23-25.05832
+WashingtonNew York2010-10-23-21.05904
+WashingtonNew York2010-10-23-18.05917
+WashingtonNew York2010-10-30-27.05904
+WashingtonNew York2010-10-30-16.05917
+WashingtonNew York2010-10-20-2.07291
+WashingtonNew York2010-10-2122.07291
+WashingtonNew York2010-10-23-16.07274
+WashingtonNew York2010-10-24-26.07282
+WashingtonNew York2010-10-259.07291
+WashingtonNew York2010-10-264.07291
+WashingtonNew York2010-10-2726.07291
+WashingtonNew York2010-10-2845.07291
+WashingtonNew York2010-10-291.07291
+WashingtonNew York2010-10-31-18.07282

Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java Sat Jul 27 21:20:03 2013
@@ -219,12 +219,20 @@ public abstract class HiveBaseResultSet 
       return null;
     }
 
+    if (obj instanceof Date) {
+      return (Date) obj;
+    }
+
     try {
-      return Date.valueOf((String) obj);
+      if (obj instanceof String) {
+        return Date.valueOf((String)obj);
+      }
     } catch (Exception e) {
       throw new SQLException("Cannot convert column " + columnIndex
               + " to date: " + e.toString());
     }
+
+    throw new SQLException("Illegal conversion");
   }
 
   public Date getDate(String columnName) throws SQLException {

Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java Sat Jul 27 21:20:03 2013
@@ -496,8 +496,7 @@ public class HivePreparedStatement imple
    */
 
   public void setDate(int parameterIndex, Date x) throws SQLException {
-    // TODO Auto-generated method stub
-    throw new SQLException("Method not supported");
+    this.parameters.put(parameterIndex, x.toString());
   }
 
   /*

Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java Sat Jul 27 21:20:03 2013
@@ -110,6 +110,8 @@ public class HiveResultSetMetaData imple
       return serdeConstants.INT_TYPE_NAME;
     } else if ("bigint".equalsIgnoreCase(type)) {
       return serdeConstants.BIGINT_TYPE_NAME;
+    } else if ("date".equalsIgnoreCase(type)) {
+      return serdeConstants.DATE_TYPE_NAME;
     } else if ("timestamp".equalsIgnoreCase(type)) {
       return serdeConstants.TIMESTAMP_TYPE_NAME;
     } else if ("decimal".equalsIgnoreCase(type)) {

Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java Sat Jul 27 21:20:03 2013
@@ -74,6 +74,8 @@ public class JdbcColumn {
     case Types.INTEGER:
     case Types.BIGINT:
       return columnPrecision(columnType) + 1; // allow +/-
+    case Types.DATE:
+      return 10;
     case Types.TIMESTAMP:
       return columnPrecision(columnType);
     // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
@@ -108,6 +110,8 @@ public class JdbcColumn {
       return 7;
     case Types.DOUBLE:
       return 15;
+    case Types.DATE:
+      return 10;
     case Types.TIMESTAMP:
       return 29;
     case Types.DECIMAL:
@@ -126,6 +130,7 @@ public class JdbcColumn {
     case Types.SMALLINT:
     case Types.INTEGER:
     case Types.BIGINT:
+    case Types.DATE:
       return 0;
     case Types.FLOAT:
       return 7;

Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java Sat Jul 27 21:20:03 2013
@@ -46,6 +46,8 @@ public class Utils {
       return Types.INTEGER;
     } else if ("bigint".equalsIgnoreCase(type)) {
       return Types.BIGINT;
+    } else if ("date".equalsIgnoreCase(type)) {
+      return Types.DATE;
     } else if ("timestamp".equalsIgnoreCase(type)) {
       return Types.TIMESTAMP;
     } else if ("decimal".equalsIgnoreCase(type)) {

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java Sat Jul 27 21:20:03 2013
@@ -224,13 +224,20 @@ public abstract class HiveBaseResultSet 
     if (obj == null) {
       return null;
     }
-
+    if (obj instanceof Date) {
+      return (Date) obj;
+    }
     try {
-      return Date.valueOf((String) obj);
+      if (obj instanceof String) {
+        return Date.valueOf((String)obj);
+      }
     } catch (Exception e) {
       throw new SQLException("Cannot convert column " + columnIndex
               + " to date: " + e.toString(), e);
     }
+    // If we fell through to here this is not a valid type conversion
+    throw new SQLException("Cannot convert column " + columnIndex
+        + " to date: Illegal conversion");
   }
 
   public Date getDate(String columnName) throws SQLException {
@@ -434,6 +441,15 @@ public abstract class HiveBaseResultSet 
     return null;
   }
 
+  private Date getDateValue(TStringValue tStringValue) {
+    if (tStringValue.isSetValue()) {
+      wasNull = false;
+      return Date.valueOf(tStringValue.getValue());
+    }
+    wasNull = true;
+    return null;
+  }
+
   private Timestamp getTimestampValue(TStringValue tStringValue) {
     if (tStringValue.isSetValue()) {
       wasNull = false;
@@ -495,6 +511,8 @@ public abstract class HiveBaseResultSet 
       return getStringValue(tColumnValue.getStringVal());
     case BINARY_TYPE:
       return getBinaryValue(tColumnValue.getStringVal());
+    case DATE_TYPE:
+      return getDateValue(tColumnValue.getStringVal());
     case TIMESTAMP_TYPE:
       return getTimestampValue(tColumnValue.getStringVal());
     case DECIMAL_TYPE:

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java Sat Jul 27 21:20:03 2013
@@ -43,10 +43,10 @@ import java.util.Calendar;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.hive.service.cli.thrift.TCLIService;
 import org.apache.hive.service.cli.thrift.TExecuteStatementReq;
 import org.apache.hive.service.cli.thrift.TExecuteStatementResp;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
-import org.apache.hive.service.cli.thrift.TCLIService;
 import org.apache.hive.service.cli.thrift.TSessionHandle;
 
 /**
@@ -510,8 +510,7 @@ public class HivePreparedStatement imple
    */
 
   public void setDate(int parameterIndex, Date x) throws SQLException {
-    // TODO Auto-generated method stub
-    throw new SQLException("Method not supported");
+    this.parameters.put(parameterIndex, x.toString());
   }
 
   /*

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java Sat Jul 27 21:20:03 2013
@@ -105,6 +105,8 @@ public class HiveResultSetMetaData imple
       return serdeConstants.BIGINT_TYPE_NAME;
     } else if ("timestamp".equalsIgnoreCase(type)) {
       return serdeConstants.TIMESTAMP_TYPE_NAME;
+    } else if ("date".equalsIgnoreCase(type)) {
+      return serdeConstants.DATE_TYPE_NAME;
     } else if ("decimal".equalsIgnoreCase(type)) {
       return serdeConstants.DECIMAL_TYPE_NAME;
     } else if ("binary".equalsIgnoreCase(type)) {

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java Sat Jul 27 21:20:03 2013
@@ -75,6 +75,8 @@ public class JdbcColumn {
     case Types.INTEGER:
     case Types.BIGINT:
       return columnPrecision(columnType) + 1; // allow +/-
+    case Types.DATE:
+      return 10;
     case Types.TIMESTAMP:
       return columnPrecision(columnType);
 
@@ -110,6 +112,8 @@ public class JdbcColumn {
       return 7;
     case Types.DOUBLE:
       return 15;
+    case Types.DATE:
+      return 10;
     case Types.TIMESTAMP:
       return 29;
     case Types.DECIMAL:
@@ -128,6 +132,7 @@ public class JdbcColumn {
     case Types.SMALLINT:
     case Types.INTEGER:
     case Types.BIGINT:
+    case Types.DATE:
       return 0;
     case Types.FLOAT:
       return 7;

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java Sat Jul 27 21:20:03 2013
@@ -128,6 +128,8 @@ public class Utils {
       return Types.INTEGER;
     } else if ("bigint".equalsIgnoreCase(type)) {
       return Types.BIGINT;
+    } else if ("date".equalsIgnoreCase(type)) {
+      return Types.DATE;
     } else if ("timestamp".equalsIgnoreCase(type)) {
       return Types.TIMESTAMP;
     } else if ("decimal".equalsIgnoreCase(type)) {

Modified: hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Sat Jul 27 21:20:03 2013
@@ -150,7 +150,9 @@ public class TestJdbcDriver extends Test
         + " c15 struct<r:int,s:struct<a:int,b:string>>,"
         + " c16 array<struct<m:map<string,string>,n:int>>,"
         + " c17 timestamp, "
-        + " c18 decimal) comment'" + dataTypeTableComment
+        + " c18 decimal,"
+        + " c19 binary,"
+        + " c20 date) comment'" + dataTypeTableComment
             +"' partitioned by (dt STRING)");
     assertFalse(res.next());
 
@@ -226,6 +228,7 @@ public class TestJdbcDriver extends Test
         + tableName
         + " where   'not?param?not?param' <> 'not_param??not_param' and ?=? "
         + " and 1=? and 2=? and 3.0=? and 4.0=? and 'test\\'string\"'=? and 5=? and ?=? "
+        + " and date '2012-01-01' = date ?"
         + " ) t  select '2011-03-25' ddate,'China',true bv, 10 num limit 10";
 
      ///////////////////////////////////////////////
@@ -296,7 +299,7 @@ public class TestJdbcDriver extends Test
     assertNotNull(
         "Execute the invalid setted sql statement should throw exception",
         expectedException);
-    
+
     // setObject to the yet unknown type java.util.Date
     expectedException = null;
     try {
@@ -326,6 +329,7 @@ public class TestJdbcDriver extends Test
     ps.setObject(8, 5L); //setLong
     ps.setObject(9, (byte) 1); //setByte
     ps.setObject(10, (byte) 1); //setByte
+    ps.setString(11, "2012-01-01"); //setString
 
     ps.setMaxRows(2);
     return ps;
@@ -345,6 +349,7 @@ public class TestJdbcDriver extends Test
     ps.setLong(8, 5L); //setLong
     ps.setByte(9, (byte) 1); //setByte
     ps.setByte(10, (byte) 1); //setByte
+    ps.setString(11, "2012-01-01"); //setString
 
     ps.setMaxRows(2);
     return ps;
@@ -438,6 +443,8 @@ public class TestJdbcDriver extends Test
     assertEquals(null, res.getString(17));
     assertEquals(null, res.getTimestamp(17));
     assertEquals(null, res.getBigDecimal(18));
+    assertEquals(null, res.getString(20));
+    assertEquals(null, res.getDate(20));
 
     // row 3
     assertTrue(res.next());
@@ -460,6 +467,8 @@ public class TestJdbcDriver extends Test
     assertEquals("2012-04-22 09:00:00.123456789", res.getString(17));
     assertEquals("2012-04-22 09:00:00.123456789", res.getTimestamp(17).toString());
     assertEquals("123456789.0123456", res.getBigDecimal(18).toString());
+    assertEquals("2013-01-01", res.getString(20));
+    assertEquals("2013-01-01", res.getDate(20).toString());
 
     // test getBoolean rules on non-boolean columns
     assertEquals(true, res.getBoolean(1));
@@ -850,13 +859,14 @@ public class TestJdbcDriver extends Test
 
     ResultSet res = stmt.executeQuery(
         "select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
-        "c1*2, sentences(null, null, null) as b, c17, c18 from " + dataTypeTableName + " limit 1");
+        "c1*2, sentences(null, null, null) as b, c17, c18, c20 from " + dataTypeTableName +
+        " limit 1");
     ResultSetMetaData meta = res.getMetaData();
 
     ResultSet colRS = con.getMetaData().getColumns(null, null,
         dataTypeTableName.toLowerCase(), null);
 
-    assertEquals(16, meta.getColumnCount());
+    assertEquals(17, meta.getColumnCount());
 
     assertTrue(colRS.next());
 
@@ -1066,6 +1076,13 @@ public class TestJdbcDriver extends Test
     assertEquals(Integer.MAX_VALUE, meta.getPrecision(16));
     assertEquals(Integer.MAX_VALUE, meta.getScale(16));
 
+    assertEquals("c20", meta.getColumnName(17));
+    assertEquals(Types.DATE, meta.getColumnType(17));
+    assertEquals("date", meta.getColumnTypeName(17));
+    assertEquals(10, meta.getColumnDisplaySize(17));
+    assertEquals(10, meta.getPrecision(17));
+    assertEquals(0, meta.getScale(17));
+
     for (int i = 1; i <= meta.getColumnCount(); i++) {
       assertFalse(meta.isAutoIncrement(i));
       assertFalse(meta.isCurrency(i));

Modified: hive/trunk/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/trunk/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java Sat Jul 27 21:20:03 2013
@@ -150,7 +150,8 @@ public class TestJdbcDriver2 extends Tes
         + " c16 array<struct<m:map<string,string>,n:int>>,"
         + " c17 timestamp, "
         + " c18 decimal, "
-        + " c19 binary) comment'" + dataTypeTableComment
+        + " c19 binary, "
+        + " c20 date) comment'" + dataTypeTableComment
             +"' partitioned by (dt STRING)");
 
     stmt.execute("load data local inpath '"
@@ -278,6 +279,7 @@ public class TestJdbcDriver2 extends Tes
         + tableName
         + " where   'not?param?not?param' <> 'not_param??not_param' and ?=? "
         + " and 1=? and 2=? and 3.0=? and 4.0=? and 'test\\'string\"'=? and 5=? and ?=? "
+        + " and date '2012-01-01' = date ?"
         + " ) t  select '2011-03-25' ddate,'China',true bv, 10 num limit 10";
 
      ///////////////////////////////////////////////
@@ -297,6 +299,7 @@ public class TestJdbcDriver2 extends Tes
       ps.setLong(8, 5L);
       ps.setByte(9, (byte) 1);
       ps.setByte(10, (byte) 1);
+      ps.setString(11, "2012-01-01");
 
       ps.setMaxRows(2);
 
@@ -445,6 +448,8 @@ public class TestJdbcDriver2 extends Tes
     assertEquals(null, res.getString(17));
     assertEquals(null, res.getString(18));
     assertEquals(null, res.getString(19));
+    assertEquals(null, res.getString(20));
+    assertEquals(null, res.getDate(20));
 
     // row 2
     assertTrue(res.next());
@@ -468,6 +473,8 @@ public class TestJdbcDriver2 extends Tes
     assertEquals(null, res.getTimestamp(17));
     assertEquals(null, res.getBigDecimal(18));
     assertEquals(null, res.getString(19));
+    assertEquals(null, res.getString(20));
+    assertEquals(null, res.getDate(20));
 
     // row 3
     assertTrue(res.next());
@@ -491,6 +498,8 @@ public class TestJdbcDriver2 extends Tes
     assertEquals("2012-04-22 09:00:00.123456789", res.getTimestamp(17).toString());
     assertEquals("123456789.0123456", res.getBigDecimal(18).toString());
     assertEquals("abcd", res.getString(19));
+    assertEquals("2013-01-01", res.getString(20));
+    assertEquals("2013-01-01", res.getDate(20).toString());
 
     // test getBoolean rules on non-boolean columns
     assertEquals(true, res.getBoolean(1));
@@ -899,13 +908,14 @@ public class TestJdbcDriver2 extends Tes
 
     ResultSet res = stmt.executeQuery(
         "select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
-        "c1*2, sentences(null, null, null) as b, c17, c18 from " + dataTypeTableName + " limit 1");
+        "c1*2, sentences(null, null, null) as b, c17, c18, c20 from " + dataTypeTableName +
+        " limit 1");
     ResultSetMetaData meta = res.getMetaData();
 
     ResultSet colRS = con.getMetaData().getColumns(null, null,
         dataTypeTableName.toLowerCase(), null);
 
-    assertEquals(16, meta.getColumnCount());
+    assertEquals(17, meta.getColumnCount());
 
     assertTrue(colRS.next());
 
@@ -1106,6 +1116,13 @@ public class TestJdbcDriver2 extends Tes
     assertEquals(Integer.MAX_VALUE, meta.getPrecision(16));
     assertEquals(Integer.MAX_VALUE, meta.getScale(16));
 
+    assertEquals("c20", meta.getColumnName(17));
+    assertEquals(Types.DATE, meta.getColumnType(17));
+    assertEquals("date", meta.getColumnTypeName(17));
+    assertEquals(10, meta.getColumnDisplaySize(17));
+    assertEquals(10, meta.getPrecision(17));
+    assertEquals(0, meta.getScale(17));
+
     for (int i = 1; i <= meta.getColumnCount(); i++) {
       assertFalse(meta.isAutoIncrement(i));
       assertFalse(meta.isCurrency(i));

Modified: hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java (original)
+++ hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java Sat Jul 27 21:20:03 2013
@@ -2415,6 +2415,401 @@ public final class OrcProto {
     // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.DecimalStatistics)
   }
   
+  public interface DateStatisticsOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+    
+    // optional sint32 minimum = 1;
+    boolean hasMinimum();
+    int getMinimum();
+    
+    // optional sint32 maximum = 2;
+    boolean hasMaximum();
+    int getMaximum();
+  }
+  public static final class DateStatistics extends
+      com.google.protobuf.GeneratedMessage
+      implements DateStatisticsOrBuilder {
+    // Use DateStatistics.newBuilder() to construct.
+    private DateStatistics(Builder builder) {
+      super(builder);
+    }
+    private DateStatistics(boolean noInit) {}
+    
+    private static final DateStatistics defaultInstance;
+    public static DateStatistics getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public DateStatistics getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_fieldAccessorTable;
+    }
+    
+    private int bitField0_;
+    // optional sint32 minimum = 1;
+    public static final int MINIMUM_FIELD_NUMBER = 1;
+    private int minimum_;
+    public boolean hasMinimum() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    public int getMinimum() {
+      return minimum_;
+    }
+    
+    // optional sint32 maximum = 2;
+    public static final int MAXIMUM_FIELD_NUMBER = 2;
+    private int maximum_;
+    public boolean hasMaximum() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    public int getMaximum() {
+      return maximum_;
+    }
+    
+    private void initFields() {
+      minimum_ = 0;
+      maximum_ = 0;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+      
+      memoizedIsInitialized = 1;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeSInt32(1, minimum_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeSInt32(2, maximum_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeSInt32Size(1, minimum_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeSInt32Size(2, maximum_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+    
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatisticsOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_descriptor;
+      }
+      
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_fieldAccessorTable;
+      }
+      
+      // Construct using org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+      
+      private Builder(BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+      
+      public Builder clear() {
+        super.clear();
+        minimum_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        maximum_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDescriptor();
+      }
+      
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics getDefaultInstanceForType() {
+        return org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDefaultInstance();
+      }
+      
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics build() {
+        org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+      
+      private org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics buildPartial() {
+        org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.minimum_ = minimum_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.maximum_ = maximum_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics) {
+          return mergeFrom((org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics other) {
+        if (other == org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDefaultInstance()) return this;
+        if (other.hasMinimum()) {
+          setMinimum(other.getMinimum());
+        }
+        if (other.hasMaximum()) {
+          setMaximum(other.getMaximum());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public final boolean isInitialized() {
+        return true;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              minimum_ = input.readSInt32();
+              break;
+            }
+            case 16: {
+              bitField0_ |= 0x00000002;
+              maximum_ = input.readSInt32();
+              break;
+            }
+          }
+        }
+      }
+      
+      private int bitField0_;
+      
+      // optional sint32 minimum = 1;
+      private int minimum_ ;
+      public boolean hasMinimum() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      public int getMinimum() {
+        return minimum_;
+      }
+      public Builder setMinimum(int value) {
+        bitField0_ |= 0x00000001;
+        minimum_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearMinimum() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        minimum_ = 0;
+        onChanged();
+        return this;
+      }
+      
+      // optional sint32 maximum = 2;
+      private int maximum_ ;
+      public boolean hasMaximum() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      public int getMaximum() {
+        return maximum_;
+      }
+      public Builder setMaximum(int value) {
+        bitField0_ |= 0x00000002;
+        maximum_ = value;
+        onChanged();
+        return this;
+      }
+      public Builder clearMaximum() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        maximum_ = 0;
+        onChanged();
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.DateStatistics)
+    }
+    
+    static {
+      defaultInstance = new DateStatistics(true);
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.DateStatistics)
+  }
+  
   public interface ColumnStatisticsOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
@@ -2446,6 +2841,11 @@ public final class OrcProto {
     boolean hasDecimalStatistics();
     org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics getDecimalStatistics();
     org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatisticsOrBuilder getDecimalStatisticsOrBuilder();
+    
+    // optional .org.apache.hadoop.hive.ql.io.orc.DateStatistics dateStatistics = 7;
+    boolean hasDateStatistics();
+    org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics getDateStatistics();
+    org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatisticsOrBuilder getDateStatisticsOrBuilder();
   }
   public static final class ColumnStatistics extends
       com.google.protobuf.GeneratedMessage
@@ -2551,6 +2951,19 @@ public final class OrcProto {
       return decimalStatistics_;
     }
     
+    // optional .org.apache.hadoop.hive.ql.io.orc.DateStatistics dateStatistics = 7;
+    public static final int DATESTATISTICS_FIELD_NUMBER = 7;
+    private org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics dateStatistics_;
+    public boolean hasDateStatistics() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics getDateStatistics() {
+      return dateStatistics_;
+    }
+    public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatisticsOrBuilder getDateStatisticsOrBuilder() {
+      return dateStatistics_;
+    }
+    
     private void initFields() {
       numberOfValues_ = 0L;
       intStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.getDefaultInstance();
@@ -2558,6 +2971,7 @@ public final class OrcProto {
       stringStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.getDefaultInstance();
       bucketStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.getDefaultInstance();
       decimalStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.getDefaultInstance();
+      dateStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDefaultInstance();
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -2589,6 +3003,9 @@ public final class OrcProto {
       if (((bitField0_ & 0x00000020) == 0x00000020)) {
         output.writeMessage(6, decimalStatistics_);
       }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        output.writeMessage(7, dateStatistics_);
+      }
       getUnknownFields().writeTo(output);
     }
     
@@ -2622,6 +3039,10 @@ public final class OrcProto {
         size += com.google.protobuf.CodedOutputStream
           .computeMessageSize(6, decimalStatistics_);
       }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(7, dateStatistics_);
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -2743,6 +3164,7 @@ public final class OrcProto {
           getStringStatisticsFieldBuilder();
           getBucketStatisticsFieldBuilder();
           getDecimalStatisticsFieldBuilder();
+          getDateStatisticsFieldBuilder();
         }
       }
       private static Builder create() {
@@ -2783,6 +3205,12 @@ public final class OrcProto {
           decimalStatisticsBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000020);
+        if (dateStatisticsBuilder_ == null) {
+          dateStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDefaultInstance();
+        } else {
+          dateStatisticsBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000040);
         return this;
       }
       
@@ -2865,6 +3293,14 @@ public final class OrcProto {
         } else {
           result.decimalStatistics_ = decimalStatisticsBuilder_.build();
         }
+        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+          to_bitField0_ |= 0x00000040;
+        }
+        if (dateStatisticsBuilder_ == null) {
+          result.dateStatistics_ = dateStatistics_;
+        } else {
+          result.dateStatistics_ = dateStatisticsBuilder_.build();
+        }
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -2899,6 +3335,9 @@ public final class OrcProto {
         if (other.hasDecimalStatistics()) {
           mergeDecimalStatistics(other.getDecimalStatistics());
         }
+        if (other.hasDateStatistics()) {
+          mergeDateStatistics(other.getDateStatistics());
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -2980,6 +3419,15 @@ public final class OrcProto {
               setDecimalStatistics(subBuilder.buildPartial());
               break;
             }
+            case 58: {
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.Builder subBuilder = org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.newBuilder();
+              if (hasDateStatistics()) {
+                subBuilder.mergeFrom(getDateStatistics());
+              }
+              input.readMessage(subBuilder, extensionRegistry);
+              setDateStatistics(subBuilder.buildPartial());
+              break;
+            }
           }
         }
       }
@@ -3457,6 +3905,96 @@ public final class OrcProto {
         return decimalStatisticsBuilder_;
       }
       
+      // optional .org.apache.hadoop.hive.ql.io.orc.DateStatistics dateStatistics = 7;
+      private org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics dateStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics, org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.Builder, org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatisticsOrBuilder> dateStatisticsBuilder_;
+      public boolean hasDateStatistics() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics getDateStatistics() {
+        if (dateStatisticsBuilder_ == null) {
+          return dateStatistics_;
+        } else {
+          return dateStatisticsBuilder_.getMessage();
+        }
+      }
+      public Builder setDateStatistics(org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics value) {
+        if (dateStatisticsBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          dateStatistics_ = value;
+          onChanged();
+        } else {
+          dateStatisticsBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000040;
+        return this;
+      }
+      public Builder setDateStatistics(
+          org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.Builder builderForValue) {
+        if (dateStatisticsBuilder_ == null) {
+          dateStatistics_ = builderForValue.build();
+          onChanged();
+        } else {
+          dateStatisticsBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000040;
+        return this;
+      }
+      public Builder mergeDateStatistics(org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics value) {
+        if (dateStatisticsBuilder_ == null) {
+          if (((bitField0_ & 0x00000040) == 0x00000040) &&
+              dateStatistics_ != org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDefaultInstance()) {
+            dateStatistics_ =
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.newBuilder(dateStatistics_).mergeFrom(value).buildPartial();
+          } else {
+            dateStatistics_ = value;
+          }
+          onChanged();
+        } else {
+          dateStatisticsBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000040;
+        return this;
+      }
+      public Builder clearDateStatistics() {
+        if (dateStatisticsBuilder_ == null) {
+          dateStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.getDefaultInstance();
+          onChanged();
+        } else {
+          dateStatisticsBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000040);
+        return this;
+      }
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.Builder getDateStatisticsBuilder() {
+        bitField0_ |= 0x00000040;
+        onChanged();
+        return getDateStatisticsFieldBuilder().getBuilder();
+      }
+      public org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatisticsOrBuilder getDateStatisticsOrBuilder() {
+        if (dateStatisticsBuilder_ != null) {
+          return dateStatisticsBuilder_.getMessageOrBuilder();
+        } else {
+          return dateStatistics_;
+        }
+      }
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics, org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.Builder, org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatisticsOrBuilder> 
+          getDateStatisticsFieldBuilder() {
+        if (dateStatisticsBuilder_ == null) {
+          dateStatisticsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics, org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.Builder, org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatisticsOrBuilder>(
+                  dateStatistics_,
+                  getParentForChildren(),
+                  isClean());
+          dateStatistics_ = null;
+        }
+        return dateStatisticsBuilder_;
+      }
+      
       // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.ColumnStatistics)
     }
     
@@ -6505,6 +7043,7 @@ public final class OrcProto {
       STRUCT(12, 12),
       UNION(13, 13),
       DECIMAL(14, 14),
+      DATE(15, 15),
       ;
       
       public static final int BOOLEAN_VALUE = 0;
@@ -6522,6 +7061,7 @@ public final class OrcProto {
       public static final int STRUCT_VALUE = 12;
       public static final int UNION_VALUE = 13;
       public static final int DECIMAL_VALUE = 14;
+      public static final int DATE_VALUE = 15;
       
       
       public final int getNumber() { return value; }
@@ -6543,6 +7083,7 @@ public final class OrcProto {
           case 12: return STRUCT;
           case 13: return UNION;
           case 14: return DECIMAL;
+          case 15: return DATE;
           default: return null;
         }
       }
@@ -6573,7 +7114,7 @@ public final class OrcProto {
       }
       
       private static final Kind[] VALUES = {
-        BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP, LIST, MAP, STRUCT, UNION, DECIMAL, 
+        BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP, LIST, MAP, STRUCT, UNION, DECIMAL, DATE, 
       };
       
       public static Kind valueOf(
@@ -10476,6 +11017,11 @@ public final class OrcProto {
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
       internal_static_org_apache_hadoop_hive_ql_io_orc_DecimalStatistics_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
     internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -10547,62 +11093,66 @@ public final class OrcProto {
       "istics\022\017\n\007minimum\030\001 \001(\t\022\017\n\007maximum\030\002 \001(\t" +
       "\"%\n\020BucketStatistics\022\021\n\005count\030\001 \003(\004B\002\020\001\"" +
       "B\n\021DecimalStatistics\022\017\n\007minimum\030\001 \001(\t\022\017\n" +
-      "\007maximum\030\002 \001(\t\022\013\n\003sum\030\003 \001(\t\"\260\003\n\020ColumnSt" +
-      "atistics\022\026\n\016numberOfValues\030\001 \001(\004\022J\n\rintS",
-      "tatistics\030\002 \001(\01323.org.apache.hadoop.hive" +
-      ".ql.io.orc.IntegerStatistics\022L\n\020doubleSt" +
-      "atistics\030\003 \001(\01322.org.apache.hadoop.hive." +
-      "ql.io.orc.DoubleStatistics\022L\n\020stringStat" +
-      "istics\030\004 \001(\01322.org.apache.hadoop.hive.ql" +
-      ".io.orc.StringStatistics\022L\n\020bucketStatis" +
-      "tics\030\005 \001(\01322.org.apache.hadoop.hive.ql.i" +
-      "o.orc.BucketStatistics\022N\n\021decimalStatist" +
-      "ics\030\006 \001(\01323.org.apache.hadoop.hive.ql.io" +
-      ".orc.DecimalStatistics\"n\n\rRowIndexEntry\022",
-      "\025\n\tpositions\030\001 \003(\004B\002\020\001\022F\n\nstatistics\030\002 \001" +
-      "(\01322.org.apache.hadoop.hive.ql.io.orc.Co" +
-      "lumnStatistics\"J\n\010RowIndex\022>\n\005entry\030\001 \003(" +
-      "\0132/.org.apache.hadoop.hive.ql.io.orc.Row" +
-      "IndexEntry\"\331\001\n\006Stream\022;\n\004kind\030\001 \002(\0162-.or" +
-      "g.apache.hadoop.hive.ql.io.orc.Stream.Ki" +
-      "nd\022\016\n\006column\030\002 \001(\r\022\016\n\006length\030\003 \001(\004\"r\n\004Ki" +
-      "nd\022\013\n\007PRESENT\020\000\022\010\n\004DATA\020\001\022\n\n\006LENGTH\020\002\022\023\n" +
-      "\017DICTIONARY_DATA\020\003\022\024\n\020DICTIONARY_COUNT\020\004" +
-      "\022\r\n\tSECONDARY\020\005\022\r\n\tROW_INDEX\020\006\"\221\001\n\016Colum",
-      "nEncoding\022C\n\004kind\030\001 \002(\01625.org.apache.had" +
-      "oop.hive.ql.io.orc.ColumnEncoding.Kind\022\026" +
-      "\n\016dictionarySize\030\002 \001(\r\"\"\n\004Kind\022\n\n\006DIRECT" +
-      "\020\000\022\016\n\nDICTIONARY\020\001\"\214\001\n\014StripeFooter\0229\n\007s" +
-      "treams\030\001 \003(\0132(.org.apache.hadoop.hive.ql" +
-      ".io.orc.Stream\022A\n\007columns\030\002 \003(\01320.org.ap" +
-      "ache.hadoop.hive.ql.io.orc.ColumnEncodin" +
-      "g\"\236\002\n\004Type\0229\n\004kind\030\001 \002(\0162+.org.apache.ha" +
-      "doop.hive.ql.io.orc.Type.Kind\022\024\n\010subtype" +
-      "s\030\002 \003(\rB\002\020\001\022\022\n\nfieldNames\030\003 \003(\t\"\260\001\n\004Kind",
-      "\022\013\n\007BOOLEAN\020\000\022\010\n\004BYTE\020\001\022\t\n\005SHORT\020\002\022\007\n\003IN" +
-      "T\020\003\022\010\n\004LONG\020\004\022\t\n\005FLOAT\020\005\022\n\n\006DOUBLE\020\006\022\n\n\006" +
-      "STRING\020\007\022\n\n\006BINARY\020\010\022\r\n\tTIMESTAMP\020\t\022\010\n\004L" +
-      "IST\020\n\022\007\n\003MAP\020\013\022\n\n\006STRUCT\020\014\022\t\n\005UNION\020\r\022\013\n" +
-      "\007DECIMAL\020\016\"x\n\021StripeInformation\022\016\n\006offse" +
-      "t\030\001 \001(\004\022\023\n\013indexLength\030\002 \001(\004\022\022\n\ndataLeng" +
-      "th\030\003 \001(\004\022\024\n\014footerLength\030\004 \001(\004\022\024\n\014number" +
-      "OfRows\030\005 \001(\004\"/\n\020UserMetadataItem\022\014\n\004name" +
-      "\030\001 \002(\t\022\r\n\005value\030\002 \002(\014\"\356\002\n\006Footer\022\024\n\014head" +
-      "erLength\030\001 \001(\004\022\025\n\rcontentLength\030\002 \001(\004\022D\n",
-      "\007stripes\030\003 \003(\01323.org.apache.hadoop.hive." +
-      "ql.io.orc.StripeInformation\0225\n\005types\030\004 \003" +
-      "(\0132&.org.apache.hadoop.hive.ql.io.orc.Ty" +
-      "pe\022D\n\010metadata\030\005 \003(\01322.org.apache.hadoop" +
-      ".hive.ql.io.orc.UserMetadataItem\022\024\n\014numb" +
-      "erOfRows\030\006 \001(\004\022F\n\nstatistics\030\007 \003(\01322.org" +
-      ".apache.hadoop.hive.ql.io.orc.ColumnStat" +
-      "istics\022\026\n\016rowIndexStride\030\010 \001(\r\"\255\001\n\nPostS" +
-      "cript\022\024\n\014footerLength\030\001 \001(\004\022F\n\013compressi" +
-      "on\030\002 \001(\01621.org.apache.hadoop.hive.ql.io.",
-      "orc.CompressionKind\022\034\n\024compressionBlockS" +
-      "ize\030\003 \001(\004\022\023\n\007version\030\004 \003(\rB\002\020\001\022\016\n\005magic\030" +
-      "\300> \001(\t*:\n\017CompressionKind\022\010\n\004NONE\020\000\022\010\n\004Z" +
-      "LIB\020\001\022\n\n\006SNAPPY\020\002\022\007\n\003LZO\020\003"
+      "\007maximum\030\002 \001(\t\022\013\n\003sum\030\003 \001(\t\"2\n\016DateStati" +
+      "stics\022\017\n\007minimum\030\001 \001(\021\022\017\n\007maximum\030\002 \001(\021\"",
+      "\372\003\n\020ColumnStatistics\022\026\n\016numberOfValues\030\001" +
+      " \001(\004\022J\n\rintStatistics\030\002 \001(\01323.org.apache" +
+      ".hadoop.hive.ql.io.orc.IntegerStatistics" +
+      "\022L\n\020doubleStatistics\030\003 \001(\01322.org.apache." +
+      "hadoop.hive.ql.io.orc.DoubleStatistics\022L" +
+      "\n\020stringStatistics\030\004 \001(\01322.org.apache.ha" +
+      "doop.hive.ql.io.orc.StringStatistics\022L\n\020" +
+      "bucketStatistics\030\005 \001(\01322.org.apache.hado" +
+      "op.hive.ql.io.orc.BucketStatistics\022N\n\021de" +
+      "cimalStatistics\030\006 \001(\01323.org.apache.hadoo",
+      "p.hive.ql.io.orc.DecimalStatistics\022H\n\016da" +
+      "teStatistics\030\007 \001(\01320.org.apache.hadoop.h" +
+      "ive.ql.io.orc.DateStatistics\"n\n\rRowIndex" +
+      "Entry\022\025\n\tpositions\030\001 \003(\004B\002\020\001\022F\n\nstatisti" +
+      "cs\030\002 \001(\01322.org.apache.hadoop.hive.ql.io." +
+      "orc.ColumnStatistics\"J\n\010RowIndex\022>\n\005entr" +
+      "y\030\001 \003(\0132/.org.apache.hadoop.hive.ql.io.o" +
+      "rc.RowIndexEntry\"\331\001\n\006Stream\022;\n\004kind\030\001 \002(" +
+      "\0162-.org.apache.hadoop.hive.ql.io.orc.Str" +
+      "eam.Kind\022\016\n\006column\030\002 \001(\r\022\016\n\006length\030\003 \001(\004",
+      "\"r\n\004Kind\022\013\n\007PRESENT\020\000\022\010\n\004DATA\020\001\022\n\n\006LENGT" +
+      "H\020\002\022\023\n\017DICTIONARY_DATA\020\003\022\024\n\020DICTIONARY_C" +
+      "OUNT\020\004\022\r\n\tSECONDARY\020\005\022\r\n\tROW_INDEX\020\006\"\221\001\n" +
+      "\016ColumnEncoding\022C\n\004kind\030\001 \002(\01625.org.apac" +
+      "he.hadoop.hive.ql.io.orc.ColumnEncoding." +
+      "Kind\022\026\n\016dictionarySize\030\002 \001(\r\"\"\n\004Kind\022\n\n\006" +
+      "DIRECT\020\000\022\016\n\nDICTIONARY\020\001\"\214\001\n\014StripeFoote" +
+      "r\0229\n\007streams\030\001 \003(\0132(.org.apache.hadoop.h" +
+      "ive.ql.io.orc.Stream\022A\n\007columns\030\002 \003(\01320." +
+      "org.apache.hadoop.hive.ql.io.orc.ColumnE",
+      "ncoding\"\250\002\n\004Type\0229\n\004kind\030\001 \002(\0162+.org.apa" +
+      "che.hadoop.hive.ql.io.orc.Type.Kind\022\024\n\010s" +
+      "ubtypes\030\002 \003(\rB\002\020\001\022\022\n\nfieldNames\030\003 \003(\t\"\272\001" +
+      "\n\004Kind\022\013\n\007BOOLEAN\020\000\022\010\n\004BYTE\020\001\022\t\n\005SHORT\020\002" +
+      "\022\007\n\003INT\020\003\022\010\n\004LONG\020\004\022\t\n\005FLOAT\020\005\022\n\n\006DOUBLE" +
+      "\020\006\022\n\n\006STRING\020\007\022\n\n\006BINARY\020\010\022\r\n\tTIMESTAMP\020" +
+      "\t\022\010\n\004LIST\020\n\022\007\n\003MAP\020\013\022\n\n\006STRUCT\020\014\022\t\n\005UNIO" +
+      "N\020\r\022\013\n\007DECIMAL\020\016\022\010\n\004DATE\020\017\"x\n\021StripeInfo" +
+      "rmation\022\016\n\006offset\030\001 \001(\004\022\023\n\013indexLength\030\002" +
+      " \001(\004\022\022\n\ndataLength\030\003 \001(\004\022\024\n\014footerLength",
+      "\030\004 \001(\004\022\024\n\014numberOfRows\030\005 \001(\004\"/\n\020UserMeta" +
+      "dataItem\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\014\"\356\002" +
+      "\n\006Footer\022\024\n\014headerLength\030\001 \001(\004\022\025\n\rconten" +
+      "tLength\030\002 \001(\004\022D\n\007stripes\030\003 \003(\01323.org.apa" +
+      "che.hadoop.hive.ql.io.orc.StripeInformat" +
+      "ion\0225\n\005types\030\004 \003(\0132&.org.apache.hadoop.h" +
+      "ive.ql.io.orc.Type\022D\n\010metadata\030\005 \003(\01322.o" +
+      "rg.apache.hadoop.hive.ql.io.orc.UserMeta" +
+      "dataItem\022\024\n\014numberOfRows\030\006 \001(\004\022F\n\nstatis" +
+      "tics\030\007 \003(\01322.org.apache.hadoop.hive.ql.i",
+      "o.orc.ColumnStatistics\022\026\n\016rowIndexStride" +
+      "\030\010 \001(\r\"\255\001\n\nPostScript\022\024\n\014footerLength\030\001 " +
+      "\001(\004\022F\n\013compression\030\002 \001(\01621.org.apache.ha" +
+      "doop.hive.ql.io.orc.CompressionKind\022\034\n\024c" +
+      "ompressionBlockSize\030\003 \001(\004\022\023\n\007version\030\004 \003" +
+      "(\rB\002\020\001\022\016\n\005magic\030\300> \001(\t*:\n\017CompressionKin" +
+      "d\022\010\n\004NONE\020\000\022\010\n\004ZLIB\020\001\022\n\n\006SNAPPY\020\002\022\007\n\003LZO" +
+      "\020\003"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -10649,16 +11199,24 @@ public final class OrcProto {
               new java.lang.String[] { "Minimum", "Maximum", "Sum", },
               org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics.Builder.class);
-          internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor =
+          internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_descriptor =
             getDescriptor().getMessageTypes().get(5);
+          internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_hadoop_hive_ql_io_orc_DateStatistics_descriptor,
+              new java.lang.String[] { "Minimum", "Maximum", },
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.class,
+              org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics.Builder.class);
+          internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor =
+            getDescriptor().getMessageTypes().get(6);
           internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor,
-              new java.lang.String[] { "NumberOfValues", "IntStatistics", "DoubleStatistics", "StringStatistics", "BucketStatistics", "DecimalStatistics", },
+              new java.lang.String[] { "NumberOfValues", "IntStatistics", "DoubleStatistics", "StringStatistics", "BucketStatistics", "DecimalStatistics", "DateStatistics", },
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndexEntry_descriptor =
-            getDescriptor().getMessageTypes().get(6);
+            getDescriptor().getMessageTypes().get(7);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndexEntry_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndexEntry_descriptor,
@@ -10666,7 +11224,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndexEntry.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndexEntry.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndex_descriptor =
-            getDescriptor().getMessageTypes().get(7);
+            getDescriptor().getMessageTypes().get(8);
           internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndex_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_RowIndex_descriptor,
@@ -10674,7 +11232,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndex.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndex.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Stream_descriptor =
-            getDescriptor().getMessageTypes().get(8);
+            getDescriptor().getMessageTypes().get(9);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Stream_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_Stream_descriptor,
@@ -10682,7 +11240,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Stream.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Stream.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnEncoding_descriptor =
-            getDescriptor().getMessageTypes().get(9);
+            getDescriptor().getMessageTypes().get(10);
           internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnEncoding_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnEncoding_descriptor,
@@ -10690,7 +11248,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeFooter_descriptor =
-            getDescriptor().getMessageTypes().get(10);
+            getDescriptor().getMessageTypes().get(11);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeFooter_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_StripeFooter_descriptor,
@@ -10698,7 +11256,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Type_descriptor =
-            getDescriptor().getMessageTypes().get(11);
+            getDescriptor().getMessageTypes().get(12);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Type_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_Type_descriptor,
@@ -10706,7 +11264,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Type.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Type.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_descriptor =
-            getDescriptor().getMessageTypes().get(12);
+            getDescriptor().getMessageTypes().get(13);
           internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_StripeInformation_descriptor,
@@ -10714,7 +11272,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeInformation.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeInformation.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_UserMetadataItem_descriptor =
-            getDescriptor().getMessageTypes().get(13);
+            getDescriptor().getMessageTypes().get(14);
           internal_static_org_apache_hadoop_hive_ql_io_orc_UserMetadataItem_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_UserMetadataItem_descriptor,
@@ -10722,7 +11280,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.UserMetadataItem.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.UserMetadataItem.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Footer_descriptor =
-            getDescriptor().getMessageTypes().get(14);
+            getDescriptor().getMessageTypes().get(15);
           internal_static_org_apache_hadoop_hive_ql_io_orc_Footer_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_Footer_descriptor,
@@ -10730,7 +11288,7 @@ public final class OrcProto {
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Footer.class,
               org.apache.hadoop.hive.ql.io.orc.OrcProto.Footer.Builder.class);
           internal_static_org_apache_hadoop_hive_ql_io_orc_PostScript_descriptor =
-            getDescriptor().getMessageTypes().get(15);
+            getDescriptor().getMessageTypes().get(16);
           internal_static_org_apache_hadoop_hive_ql_io_orc_PostScript_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_org_apache_hadoop_hive_ql_io_orc_PostScript_descriptor,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Sat Jul 27 21:20:03 2013
@@ -172,8 +172,8 @@ public enum ErrorMsg {
   DYNAMIC_PARTITION_STRICT_MODE(10096, "Dynamic partition strict mode requires at least one "
       + "static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict"),
   NONEXISTPARTCOL(10098, "Non-Partition column appears in the partition specification: "),
-  UNSUPPORTED_TYPE(10099, "DATE and DATETIME types aren't supported yet. Please use "
-      + "TIMESTAMP instead"),
+  UNSUPPORTED_TYPE(10099, "DATETIME type isn't supported yet. Please use "
+      + "DATE or TIMESTAMP instead"),
   CREATE_NON_NATIVE_AS(10100, "CREATE TABLE AS SELECT cannot be used for a non-native table"),
   LOAD_INTO_NON_NATIVE(10101, "A non-native table cannot be used as target for LOAD"),
   LOCKMGR_NOT_SPECIFIED(10102, "Lock manager not specified correctly, set hive.lock.manager"),

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1507713&r1=1507712&r2=1507713&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Sat Jul 27 21:20:03 2013
@@ -343,6 +343,8 @@ public final class FunctionRegistry {
     registerUDF(serdeConstants.STRING_TYPE_NAME, UDFToString.class, false,
         UDFToString.class.getSimpleName());
 
+    registerGenericUDF(serdeConstants.DATE_TYPE_NAME,
+        GenericUDFToDate.class);
     registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME,
         GenericUDFTimestamp.class);
     registerGenericUDF(serdeConstants.BINARY_TYPE_NAME,
@@ -707,6 +709,11 @@ public final class FunctionRegistry {
     if (from.equals(TypeInfoFactory.voidTypeInfo)) {
       return true;
     }
+    // Allow implicit String to Date conversion
+    if (from.equals(TypeInfoFactory.dateTypeInfo)
+        && to.equals(TypeInfoFactory.stringTypeInfo)) {
+      return true;
+    }
 
     if (from.equals(TypeInfoFactory.timestampTypeInfo)
         && to.equals(TypeInfoFactory.stringTypeInfo)) {
@@ -1267,7 +1274,8 @@ public final class FunctionRegistry {
         udfClass == UDFToDouble.class || udfClass == UDFToFloat.class ||
         udfClass == UDFToInteger.class || udfClass == UDFToLong.class ||
         udfClass == UDFToShort.class || udfClass == UDFToString.class ||
-        udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class;
+        udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class ||
+        udfClass == GenericUDFToDate.class;
   }
 
   /**



Mime
View raw message