hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1550684 [2/24] - in /hive/branches/tez: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/result...
Date Fri, 13 Dec 2013 10:56:54 GMT
Modified: hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java (original)
+++ hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java Fri Dec 13 10:56:38 2013
@@ -386,9 +386,25 @@ public class GenVectorCode extends Task 
       // See org.apache.hadoop.hive.ql.exec.vector.expressions for remaining cast VectorExpression
       // classes
 
-        {"ColumnUnaryMinus", "long"},
-        {"ColumnUnaryMinus", "double"},
+      {"ColumnUnaryMinus", "long"},
+      {"ColumnUnaryMinus", "double"},
 
+      // IF conditional expression
+      // fileHeader, resultType, arg2Type, arg3Type
+      {"IfExprColumnColumn", "long"},
+      {"IfExprColumnColumn", "double"},
+      {"IfExprColumnScalar", "long", "long"},
+      {"IfExprColumnScalar", "double", "long"},
+      {"IfExprColumnScalar", "long", "double"},
+      {"IfExprColumnScalar", "double", "double"},
+      {"IfExprScalarColumn", "long", "long"},
+      {"IfExprScalarColumn", "double", "long"},
+      {"IfExprScalarColumn", "long", "double"},
+      {"IfExprScalarColumn", "double", "double"},
+      {"IfExprScalarScalar", "long", "long"},
+      {"IfExprScalarScalar", "double", "long"},
+      {"IfExprScalarScalar", "long", "double"},
+      {"IfExprScalarScalar", "double", "double"},
 
       // template, <ClassName>, <ValueType>, <OperatorSymbol>, <DescriptionName>, <DescriptionValue>
       {"VectorUDAFMinMax", "VectorUDAFMinLong", "long", "<", "min",
@@ -567,6 +583,14 @@ public class GenVectorCode extends Task 
         generateFilterStringColumnCompareColumn(tdesc);
       } else if (tdesc[0].equals("StringColumnCompareColumn")) {
         generateStringColumnCompareColumn(tdesc);
+      } else if (tdesc[0].equals("IfExprColumnColumn")) {
+        generateIfExprColumnColumn(tdesc);
+      } else if (tdesc[0].equals("IfExprColumnScalar")) {
+        generateIfExprColumnScalar(tdesc);
+      } else if (tdesc[0].equals("IfExprScalarColumn")) {
+        generateIfExprScalarColumn(tdesc);
+      } else if (tdesc[0].equals("IfExprScalarScalar")) {
+        generateIfExprScalarScalar(tdesc);
       } else {
         continue;
       }
@@ -800,6 +824,89 @@ public class GenVectorCode extends Task 
         className, templateString);
   }
 
+  private void generateIfExprColumnColumn(String[] tdesc) throws IOException {
+    String operandType = tdesc[1];
+    String inputColumnVectorType = this.getColumnVectorType(operandType);
+    String outputColumnVectorType = inputColumnVectorType;
+    String returnType = operandType;
+    String className = "IfExpr" + getCamelCaseType(operandType) + "Column"
+        + getCamelCaseType(operandType) + "Column";
+    String outputFile = joinPath(this.expressionOutputDirectory, className + ".java");
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    // Expand, and write result
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
+    templateString = templateString.replaceAll("<OperandType>", operandType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
+  private void generateIfExprColumnScalar(String[] tdesc) throws IOException {
+    String operandType2 = tdesc[1];
+    String operandType3 = tdesc[2];
+    String arg2ColumnVectorType = this.getColumnVectorType(operandType2);
+    String returnType = getArithmeticReturnType(operandType2, operandType3);
+    String outputColumnVectorType = getColumnVectorType(returnType);
+    String className = "IfExpr" + getCamelCaseType(operandType2) + "Column"
+        + getCamelCaseType(operandType3) + "Scalar";
+    String outputFile = joinPath(this.expressionOutputDirectory, className + ".java");
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    // Expand, and write result
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<Arg2ColumnVectorType>", arg2ColumnVectorType);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<OperandType3>", operandType3);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
+  private void generateIfExprScalarColumn(String[] tdesc) throws IOException {
+    String operandType2 = tdesc[1];
+    String operandType3 = tdesc[2];
+    String arg3ColumnVectorType = this.getColumnVectorType(operandType3);
+    String returnType = getArithmeticReturnType(operandType2, operandType3);
+    String outputColumnVectorType = getColumnVectorType(returnType);
+    String className = "IfExpr" + getCamelCaseType(operandType2) + "Scalar"
+        + getCamelCaseType(operandType3) + "Column";
+    String outputFile = joinPath(this.expressionOutputDirectory, className + ".java");
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    // Expand, and write result
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<Arg3ColumnVectorType>", arg3ColumnVectorType);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<OperandType3>", operandType3);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
+  private void generateIfExprScalarScalar(String[] tdesc) throws IOException {
+    String operandType2 = tdesc[1];
+    String operandType3 = tdesc[2];
+    String arg3ColumnVectorType = this.getColumnVectorType(operandType3);
+    String returnType = getArithmeticReturnType(operandType2, operandType3);
+    String outputColumnVectorType = getColumnVectorType(returnType);
+    String className = "IfExpr" + getCamelCaseType(operandType2) + "Scalar"
+        + getCamelCaseType(operandType3) + "Scalar";
+    String outputFile = joinPath(this.expressionOutputDirectory, className + ".java");
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    // Expand, and write result
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<OperandType3>", operandType3);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
   // template, <ClassNamePrefix>, <ReturnType>, <OperandType>, <FuncName>, <OperandCast>, <ResultCast>
   private void generateColumnUnaryFunc(String[] tdesc) throws IOException {
     String classNamePrefix = tdesc[1];

Modified: hive/branches/tez/beeline/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/beeline/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/beeline/pom.xml (original)
+++ hive/branches/tez/beeline/pom.xml Fri Dec 13 10:56:38 2013
@@ -118,7 +118,7 @@
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-core</artifactId>
           <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
+          <optional>true</optional>
         </dependency>
       </dependencies>
     </profile>
@@ -131,6 +131,13 @@
           <version>${hadoop-23.version}</version>
           <optional>true</optional>
         </dependency>
+        <!-- test inter-project -->
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>

Modified: hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java (original)
+++ hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java Fri Dec 13 10:56:38 2013
@@ -138,7 +138,8 @@ class BeeLineOpts implements Completor {
   public int complete(String buf, int pos, List cand) {
     try {
       return new SimpleCompletor(propertyNames()).complete(buf, pos, cand);
-    } catch (Throwable t) {
+    } catch (Exception e) {
+      beeLine.handleException(e);
       return -1;
     }
   }

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Dec 13 10:56:38 2013
@@ -648,6 +648,17 @@ public class HiveConf extends Configurat
     // statistics annotation fetches column statistics for all required columns and for all
     // required partitions which can be very expensive sometimes
     HIVE_STATS_FETCH_COLUMN_STATS("hive.stats.fetch.column.stats", false),
+    // in the absence of table/partition stats, average row size will be used to
+    // estimate the number of rows/data size
+    HIVE_STATS_AVG_ROW_SIZE("hive.stats.avg.row.size", 10000),
+    // in the absence of column statistics, the estimated number of rows/data size that will
+    // emitted from join operator will depend on t factor
+    HIVE_STATS_JOIN_FACTOR("hive.stats.join.factor", (float) 1.1),
+    // in the absence of uncompressed/raw data size, total file size will be used for statistics
+    // annotation. But the file may be compressed, encoded and serialized which may be lesser in size
+    // than the actual uncompressed/raw data size. This factor will be multiplied to file size to estimate
+    // the raw data size.
+    HIVE_STATS_DESERIALIZATION_FACTOR("hive.stats.deserialization.factor", (float) 1.0),
 
     // Concurrency
     HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", false),

Modified: hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out Fri Dec 13 10:56:38 2013
@@ -62,7 +62,6 @@ STAGE PLANS:
           name: serde_regex
           isExternal: false
 
-
 PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out Fri Dec 13 10:56:38 2013
@@ -80,7 +80,6 @@ STAGE PLANS:
     Fetch Operator
       limit: 1
 
-
 PREHOOK: query: FROM src 
 
 SELECT dboutput ( 'jdbc:derby:../build/test_dboutput_db;create=true','','',
@@ -161,7 +160,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT
 
 dboutput('jdbc:derby:../build/test_dboutput_db','','',

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/fileformat_base64.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/fileformat_base64.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/fileformat_base64.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/fileformat_base64.q.out Fri Dec 13 10:56:38 2013
@@ -30,7 +30,6 @@ STAGE PLANS:
           name: base64_test
           isExternal: false
 
-
 PREHOOK: query: CREATE TABLE base64_test(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat'

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/serde_regex.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/serde_regex.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/serde_regex.q.out Fri Dec 13 10:56:38 2013
@@ -56,7 +56,6 @@ STAGE PLANS:
           name: serde_regex
           isExternal: false
 
-
 PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes.q.out Fri Dec 13 10:56:38 2013
@@ -134,7 +134,6 @@ STAGE PLANS:
           hdfs directory: true
 #### A masked pattern was here ####
 
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out Fri Dec 13 10:56:38 2013
@@ -134,7 +134,6 @@ STAGE PLANS:
           hdfs directory: true
 #### A masked pattern was here ####
 
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(cast(src.key as smallint), src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out Fri Dec 13 10:56:38 2013
@@ -134,7 +134,6 @@ STAGE PLANS:
           hdfs directory: true
 #### A masked pattern was here ####
 
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(cast(src.key as smallint), src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out Fri Dec 13 10:56:38 2013
@@ -107,7 +107,6 @@ STAGE PLANS:
   Stage: Stage-2
     Stats-Aggr Operator
 
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(cast(src.key as tinyint), src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out Fri Dec 13 10:56:38 2013
@@ -134,7 +134,6 @@ STAGE PLANS:
           hdfs directory: true
 #### A masked pattern was here ####
 
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_avg.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_avg.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_avg.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_avg.q.out Fri Dec 13 10:56:38 2013
@@ -73,7 +73,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT example_avg(substr(value,5)),
        example_avg(IF(substr(value,5) > 250, NULL, substr(value,5)))
 FROM src

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out Fri Dec 13 10:56:38 2013
@@ -83,7 +83,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT substr(value,5,1), example_group_concat("(", key, ":", value, ")")
 FROM src
 GROUP BY substr(value,5,1)

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max.q.out Fri Dec 13 10:56:38 2013
@@ -78,7 +78,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT example_max(substr(value,5)),
        example_max(IF(substr(value,5) > 250, NULL, substr(value,5)))
 FROM src

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out Fri Dec 13 10:56:38 2013
@@ -73,7 +73,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT example_max_n(substr(value,5),10),
        example_max_n(IF(substr(value,5) > 250, NULL, substr(value,5)),10)
 FROM src

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min.q.out Fri Dec 13 10:56:38 2013
@@ -78,7 +78,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT example_min(substr(value,5)),
        example_min(IF(substr(value,5) > 250, NULL, substr(value,5)))
 FROM src

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out Fri Dec 13 10:56:38 2013
@@ -73,7 +73,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT example_min_n(substr(value,5),10),
        example_min_n(IF(substr(value,5) < 250, NULL, substr(value,5)),10)
 FROM src

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_add.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_add.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_add.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_add.q.out Fri Dec 13 10:56:38 2013
@@ -66,7 +66,6 @@ STAGE PLANS:
     Fetch Operator
       limit: 1
 
-
 PREHOOK: query: SELECT example_add(1, 2),
        example_add(1, 2, 3),
        example_add(1, 2, 3, 4),

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out Fri Dec 13 10:56:38 2013
@@ -53,7 +53,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT example_arraysum(lint), example_mapconcat(mstringstring), example_structprint(lintstring[0])
 FROM src_thrift
 PREHOOK: type: QUERY

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_format.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_format.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_format.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udf_example_format.q.out Fri Dec 13 10:56:38 2013
@@ -54,7 +54,6 @@ STAGE PLANS:
     Fetch Operator
       limit: 1
 
-
 PREHOOK: query: SELECT example_format("abc"),
        example_format("%1$s", 1.1),
        example_format("%1$s %2$e", 1.1, 1.2),

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/udf_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/udf_row_sequence.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/udf_row_sequence.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/udf_row_sequence.q.out Fri Dec 13 10:56:38 2013
@@ -107,7 +107,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select key, row_sequence() as r
 from (select key from src order by key) x
 order by r

Modified: hive/branches/tez/data/files/alltypes.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/data/files/alltypes.txt?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/data/files/alltypes.txt (original)
+++ hive/branches/tez/data/files/alltypes.txt Fri Dec 13 10:56:38 2013
@@ -1,2 +1,2 @@
-true|10|100|1000|10000|4.0|20.0|2.2222|1969-12-31 15:59:58.174|1970-01-01 00:00:00|hello|k1:v1,k2:v2|100,200|{10, "foo"}
-true|20|200|2000|20000|8.0|40.0|4.2222|1970-12-31 15:59:58.174|1971-01-01 00:00:00||k3:v3,k4:v4|200,300|{20, "bar"}
+true|10|100|1000|10000|4.0|20.0|2.2222|1969-12-31 15:59:58.174|1970-01-01 00:00:00|hello|hello|k1:v1,k2:v2|100,200|{10, "foo"}
+true|20|200|2000|20000|8.0|40.0|4.2222|1970-12-31 15:59:58.174|1971-01-01 00:00:00|||k3:v3,k4:v4|200,300|{20, "bar"}

Modified: hive/branches/tez/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out (original)
+++ hive/branches/tez/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out Fri Dec 13 10:56:38 2013
@@ -37,7 +37,11 @@ Found 3 items
 #### A masked pattern was here ####
 PREHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE
 PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:hbasedb
+PREHOOK: Output: database:hbasedb
 POSTHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE
 POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:hbasedb
+POSTHOOK: Output: database:hbasedb
 Command failed with exit code = -1
 Query returned non-zero code: -1, cause: null

Modified: hive/branches/tez/hbase-handler/src/test/results/positive/external_table_ppd.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/results/positive/external_table_ppd.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/results/positive/external_table_ppd.q.out (original)
+++ hive/branches/tez/hbase-handler/src/test/results/positive/external_table_ppd.q.out Fri Dec 13 10:56:38 2013
@@ -166,7 +166,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT * FROM t_hbase where int_col > 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t_hbase

Modified: hive/branches/tez/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out (original)
+++ hive/branches/tez/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out Fri Dec 13 10:56:38 2013
@@ -65,7 +65,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown where key>'90'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_pushdown
@@ -229,7 +228,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown where key>=cast(40 + 50 as string)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_pushdown
@@ -292,7 +290,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown where key>'90' and value like '%9%'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_pushdown
@@ -356,7 +353,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown
 where key>='90' and value like '%9%' and key=cast(value as int)
 PREHOOK: type: QUERY
@@ -417,7 +413,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown
 where key<'80' and key>'90' and value like '%90%'
 PREHOOK: type: QUERY
@@ -458,7 +453,6 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             ListSink
 
-
 PREHOOK: query: -- with a predicate which is not actually part of the filter, so
 -- it should be ignored by pushdown
 
@@ -508,7 +502,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: -- with a predicate which is under an OR, so it should
 -- be ignored by pushdown
 
@@ -558,7 +551,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: -- following will get pushed into hbase after HIVE-2819
 explain select * from hbase_pushdown where key > '281' 
 and key < '287'
@@ -607,7 +599,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown where key > '281' 
 and key < '287'
 PREHOOK: type: QUERY
@@ -668,4 +659,3 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-

Modified: hive/branches/tez/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/results/positive/hbase_pushdown.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/results/positive/hbase_pushdown.q.out (original)
+++ hive/branches/tez/hbase-handler/src/test/results/positive/hbase_pushdown.q.out Fri Dec 13 10:56:38 2013
@@ -65,7 +65,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown where key=90
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_pushdown
@@ -123,7 +122,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown where key=90 and value like '%90%'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_pushdown
@@ -183,7 +181,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: -- with contradictory pushdowns
 
 explain select * from hbase_pushdown
@@ -231,7 +228,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_pushdown
 where key=80 and key=90 and value like '%90%'
 PREHOOK: type: QUERY
@@ -272,7 +268,6 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             ListSink
 
-
 PREHOOK: query: -- with a predicate which is not actually part of the filter, so
 -- it should be ignored by pushdown
 
@@ -322,7 +317,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: -- with a predicate which is under an OR, so it should
 -- be ignored by pushdown
 
@@ -372,7 +366,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: -- with pushdown disabled
 
 explain select * from hbase_pushdown where key=90
@@ -418,4 +411,3 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-

Modified: hive/branches/tez/hbase-handler/src/test/results/positive/hbase_queries.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/results/positive/hbase_queries.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/results/positive/hbase_queries.q.out (original)
+++ hive/branches/tez/hbase-handler/src/test/results/positive/hbase_queries.q.out Fri Dec 13 10:56:38 2013
@@ -66,7 +66,6 @@ STAGE PLANS:
                       serde: org.apache.hadoop.hive.hbase.HBaseSerDe
                       name: default.hbase_table_1
 
-
 PREHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -218,7 +217,6 @@ STAGE PLANS:
     Fetch Operator
       limit: 20
 
-
 PREHOOK: query: SELECT Y.* 
 FROM 
 (SELECT hbase_table_1.* FROM hbase_table_1) x
@@ -396,7 +394,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: SELECT Y.*
 FROM 
 (SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x
@@ -657,7 +654,6 @@ STAGE PLANS:
                   serde: org.apache.hadoop.hive.hbase.HBaseSerDe
                   name: default.hbase_table_3
 
-
 PREHOOK: query: INSERT OVERWRITE TABLE hbase_table_3
 SELECT x.key, x.value, Y.count 
 FROM 

Modified: hive/branches/tez/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out (original)
+++ hive/branches/tez/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out Fri Dec 13 10:56:38 2013
@@ -143,7 +143,6 @@ STAGE PLANS:
           hdfs directory: true
 #### A masked pattern was here ####
 
-
 PREHOOK: query: from src a
 insert overwrite table src_x1
 select key,"" where a.key > 0 AND a.key < 50

Modified: hive/branches/tez/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out (original)
+++ hive/branches/tez/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out Fri Dec 13 10:56:38 2013
@@ -63,7 +63,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_ppd_keyrange where key > 8 and key < 21
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_ppd_keyrange
@@ -125,7 +124,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_ppd_keyrange where key > 8 and key <= 17
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_ppd_keyrange
@@ -184,7 +182,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_ppd_keyrange where key > 8 and key <= 17 and value like '%11%'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_ppd_keyrange
@@ -235,7 +232,6 @@ STAGE PLANS:
     Fetch Operator
       limit: -1
 
-
 PREHOOK: query: select * from hbase_ppd_keyrange where key >=9  and key < 17 and key = 11
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_ppd_keyrange

Modified: hive/branches/tez/hcatalog/core/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/pom.xml (original)
+++ hive/branches/tez/hcatalog/core/pom.xml Fri Dec 13 10:56:38 2013
@@ -76,9 +76,6 @@
   <profiles>
     <profile>
       <id>hadoop-1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -130,6 +127,19 @@
         </dependency>
         <!-- test -->
         <dependency>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-servlet</artifactId>
+          <version>${jersey.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-hdfs</artifactId>
           <version>${hadoop-23.version}</version>
@@ -144,12 +154,31 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-hs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
           <version>${hadoop-23.version}</version>
           <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>
         <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-server-tests</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
           <groupId>org.apache.pig</groupId>
           <artifactId>pig</artifactId>
           <version>${pig.version}</version>

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java Fri Dec 13 10:56:38 2013
@@ -93,7 +93,7 @@ public class HdfsAuthorizationProvider e
 
     switch (priv.getPriv()) {
     case ALL:
-      throw new AuthorizationException("no matching Action for Privilege.All");
+      return FsAction.READ_WRITE;
     case ALTER_DATA:
       return FsAction.WRITE;
     case ALTER_METADATA:

Modified: hive/branches/tez/hcatalog/hcatalog-pig-adapter/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/hcatalog-pig-adapter/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/hcatalog-pig-adapter/pom.xml (original)
+++ hive/branches/tez/hcatalog/hcatalog-pig-adapter/pom.xml Fri Dec 13 10:56:38 2013
@@ -59,9 +59,6 @@
   <profiles>
     <profile>
       <id>hadoop-1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>

Modified: hive/branches/tez/hcatalog/server-extensions/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/server-extensions/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/server-extensions/pom.xml (original)
+++ hive/branches/tez/hcatalog/server-extensions/pom.xml Fri Dec 13 10:56:38 2013
@@ -100,9 +100,6 @@
   <profiles>
     <profile>
       <id>hadoop-1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>

Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/pom.xml (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/pom.xml Fri Dec 13 10:56:38 2013
@@ -91,9 +91,6 @@
   <profiles>
     <profile>
       <id>hadoop-1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -170,6 +167,12 @@
           <version>${hadoop-23.version}</version>
         </dependency>
         <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+        <dependency>
           <groupId>org.apache.hbase</groupId>
           <artifactId>hbase-client</artifactId>
           <version>${hbase.hadoop2.version}</version>
@@ -197,28 +200,41 @@
         <!-- test -->
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <artifactId>hadoop-common</artifactId>
           <version>${hadoop-23.version}</version>
           <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-hs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-hdfs</artifactId>
           <version>${hadoop-23.version}</version>
           <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>
         <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-yarn-server-tests</artifactId>
+         <version>${hadoop-23.version}</version>
+         <classifier>tests</classifier>
+         <scope>test</scope>
+       </dependency>
         <dependency>
           <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
+          <artifactId>hbase-common</artifactId>
           <version>${hbase.hadoop2.version}</version>
           <type>test-jar</type>
           <scope>test</scope>
@@ -237,6 +253,13 @@
           <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-server</artifactId>
+          <version>${hbase.hadoop2.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>

Modified: hive/branches/tez/hcatalog/webhcat/java-client/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/java-client/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/java-client/pom.xml (original)
+++ hive/branches/tez/hcatalog/webhcat/java-client/pom.xml Fri Dec 13 10:56:38 2013
@@ -58,9 +58,6 @@
   <profiles>
     <profile>
       <id>hadoop-1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>

Modified: hive/branches/tez/hcatalog/webhcat/svr/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/pom.xml (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/pom.xml Fri Dec 13 10:56:38 2013
@@ -105,9 +105,6 @@
   <profiles>
     <profile>
       <id>hadoop-1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>

Modified: hive/branches/tez/itests/hcatalog-unit/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/hcatalog-unit/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/itests/hcatalog-unit/pom.xml (original)
+++ hive/branches/tez/itests/hcatalog-unit/pom.xml Fri Dec 13 10:56:38 2013
@@ -218,8 +218,9 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <artifactId>hadoop-common</artifactId>
           <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>
         <dependency>
@@ -237,12 +238,36 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
           <version>${hadoop-23.version}</version>
           <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>
         <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-server-tests</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+          <classifier>tests</classifier>
+        </dependency>
+        <dependency>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-servlet</artifactId>
+          <version>${jersey.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-hs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
           <groupId>org.apache.hbase</groupId>
           <artifactId>hbase-client</artifactId>
           <version>${hbase.hadoop2.version}</version>

Modified: hive/branches/tez/itests/hive-unit/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/hive-unit/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/itests/hive-unit/pom.xml (original)
+++ hive/branches/tez/itests/hive-unit/pom.xml Fri Dec 13 10:56:38 2013
@@ -127,9 +127,6 @@
   <profiles>
     <profile>
       <id>hadoop-1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
       <dependencies>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -143,6 +140,58 @@
           <version>${hadoop-20S.version}</version>
           <scope>test</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-common</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-common</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <scope>test</scope>
+          <classifier>tests</classifier>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-hadoop-compat</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-hadoop-compat</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <scope>test</scope>
+          <classifier>tests</classifier>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-hadoop1-compat</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-hadoop1-compat</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <scope>test</scope>
+          <classifier>tests</classifier>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-server</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-server</artifactId>
+          <version>${hbase.hadoop1.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
    <profile>
@@ -174,6 +223,12 @@
           <version>${hadoop-23.version}</version>
           <scope>test</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hbase</groupId>
+          <artifactId>hbase-server</artifactId>
+          <version>${hbase.hadoop2.version}</version>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>

Modified: hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java (original)
+++ hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java Fri Dec 13 10:56:38 2013
@@ -118,11 +118,15 @@ public class TestClientSideAuthorization
 
     String userName = ugi.getUserName();
 
+    allowCreateDatabase(userName);
+
     CommandProcessorResponse ret = driver.run("create database " + dbName);
     assertEquals(0,ret.getResponseCode());
     Database db = msc.getDatabase(dbName);
     String dbLocn = db.getLocationUri();
 
+    disallowCreateDatabase(userName);
+
     validateCreateDb(db,dbName);
     disallowCreateInDb(dbName, userName, dbLocn);
 
@@ -183,6 +187,15 @@ public class TestClientSideAuthorization
     // nothing needed here by default
   }
 
+  protected void allowCreateDatabase(String userName)
+      throws Exception {
+    driver.run("grant create to user "+userName);
+  }
+
+  protected void disallowCreateDatabase(String userName)
+      throws Exception {
+    driver.run("revoke create from user "+userName);
+  }
 
   protected void allowCreateInDb(String dbName, String userName, String location)
       throws Exception {

Modified: hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java (original)
+++ hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java Fri Dec 13 10:56:38 2013
@@ -137,6 +137,8 @@ public class TestMetastoreAuthorizationP
     String tblName = getTestTableName();
     String userName = ugi.getUserName();
 
+    allowCreateDatabase(userName);
+
     CommandProcessorResponse ret = driver.run("create database " + dbName);
     assertEquals(0,ret.getResponseCode());
     Database db = msc.getDatabase(dbName);
@@ -145,6 +147,8 @@ public class TestMetastoreAuthorizationP
     validateCreateDb(db,dbName);
     disallowCreateInDb(dbName, userName, dbLocn);
 
+    disallowCreateDatabase(userName);
+
     driver.run("use " + dbName);
     ret = driver.run(
         String.format("create table %s (a string) partitioned by (b string)", tblName));
@@ -248,6 +252,16 @@ public class TestMetastoreAuthorizationP
 
   }
 
+  protected void allowCreateDatabase(String userName)
+      throws Exception {
+    driver.run("grant create to user "+userName);
+  }
+
+  protected void disallowCreateDatabase(String userName)
+      throws Exception {
+    driver.run("revoke create from user "+userName);
+  }
+
   protected void allowCreateInTbl(String tableName, String userName, String location)
       throws Exception{
     driver.run("grant create on table "+tableName+" to user "+userName);

Modified: hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Fri Dec 13 10:56:38 2013
@@ -48,6 +48,8 @@ import java.util.regex.Pattern;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.ql.processors.DfsProcessor;
+import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
@@ -1315,6 +1317,10 @@ public class TestJdbcDriver2 {
     assertFalse(meta.supportsMultipleResultSets());
     assertFalse(meta.supportsStoredProcedures());
     assertTrue(meta.supportsAlterTableWithAddColumn());
+
+    //-1 indicates malformed version.
+    assertTrue(meta.getDatabaseMajorVersion() > -1);
+    assertTrue(meta.getDatabaseMinorVersion() > -1);
   }
 
   @Test
@@ -1807,4 +1813,119 @@ public class TestJdbcDriver2 {
     Connection conn = driver.connect("jdbc:derby://localhost:10000/default", new Properties());
     assertNull(conn);
   }
+
+  /**
+   * Test the cursor repositioning to start of resultset
+   * @throws Exception
+   */
+  public void testFetchFirstQuery() throws Exception {
+    execFetchFirst("select c4 from " + dataTypeTableName + " order by c1", "c4", false);
+    execFetchFirst("select c4 from " + dataTypeTableName + " order by c1", "c4",  true);
+  }
+
+  /**
+   * Test the cursor repositioning to start of resultset from non-mr query
+   * @throws Exception
+   */
+  public void testFetchFirstNonMR() throws Exception {
+    execFetchFirst("select * from " + dataTypeTableName, "c4", false);
+  }
+
+  /**
+   *  Test for cursor repositioning to start of resultset for non-sql commands
+   * @throws Exception
+   */
+  public void testFetchFirstSetCmds() throws Exception {
+    execFetchFirst("set -v", SetProcessor.SET_COLUMN_NAME, false);
+  }
+
+  /**
+   *  Test for cursor repositioning to start of resultset for non-sql commands
+   * @throws Exception
+   */
+  public void testFetchFirstDfsCmds() throws Exception {
+    String wareHouseDir = conf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname);
+    execFetchFirst("dfs -ls " + wareHouseDir, DfsProcessor.DFS_RESULT_HEADER, false);
+  }
+
+
+  /**
+   * Negative Test for cursor repositioning to start of resultset
+   * Verify unsupported JDBC resultset attributes
+   * @throws Exception
+   */
+  public void testUnsupportedFetchTypes() throws Exception {
+    try {
+      con.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
+        ResultSet.CONCUR_READ_ONLY);
+      fail("createStatement with TYPE_SCROLL_SENSITIVE should fail");
+    } catch(SQLException e) {
+      assertEquals("HYC00", e.getSQLState().trim());
+    }
+
+    try {
+      con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
+        ResultSet.CONCUR_UPDATABLE);
+      fail("createStatement with CONCUR_UPDATABLE should fail");
+    } catch(SQLException e) {
+      assertEquals("HYC00", e.getSQLState().trim());
+    }
+  }
+
+  /**
+   * Negative Test for cursor repositioning to start of resultset
+   * Verify unsupported JDBC resultset methods
+   * @throws Exception
+   */
+  public void testFetchFirstError() throws Exception {
+    Statement stmt = con.createStatement();
+    ResultSet res = stmt.executeQuery("select * from " + tableName);
+    try {
+      res.beforeFirst();
+      fail("beforeFirst() should fail for normal resultset");
+    } catch (SQLException e) {
+      assertEquals("Method not supported for TYPE_FORWARD_ONLY resultset", e.getMessage());
+    }
+  }
+
+  /**
+   * Read the results locally. Then reset the read position to start and read the
+   * rows again verify that we get the same results next time.
+   * @param sqlStmt - SQL statement to execute
+   * @param colName - columns name to read
+   * @param oneRowOnly -  read and compare only one row from the resultset
+   * @throws Exception
+   */
+  private void execFetchFirst(String sqlStmt, String colName, boolean oneRowOnly)
+      throws Exception {
+    Statement stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
+          ResultSet.CONCUR_READ_ONLY);
+    ResultSet res = stmt.executeQuery(sqlStmt);
+
+    List<String> results = new ArrayList<String> ();
+    assertTrue(res.isBeforeFirst());
+    int rowNum = 0;
+    while (res.next()) {
+      results.add(res.getString(colName));
+      assertEquals(++rowNum, res.getRow());
+      assertFalse(res.isBeforeFirst());
+      if (oneRowOnly) {
+        break;
+      }
+    }
+    // reposition at the begining
+    res.beforeFirst();
+    assertTrue(res.isBeforeFirst());
+    rowNum = 0;
+    while (res.next()) {
+      // compare the results fetched last time
+      assertEquals(results.get(rowNum++), res.getString(colName));
+      assertEquals(rowNum, res.getRow());
+      assertFalse(res.isBeforeFirst());
+      if (oneRowOnly) {
+        break;
+      }
+    }
+  }
+
 }

Modified: hive/branches/tez/itests/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/itests/pom.xml (original)
+++ hive/branches/tez/itests/pom.xml Fri Dec 13 10:56:38 2013
@@ -34,6 +34,7 @@
   <modules>
    <module>custom-serde</module>
    <module>hcatalog-unit</module>
+   <module>hive-unit</module>
    <module>util</module>
    <module>test-serde</module>
    <module>qtest</module>

Modified: hive/branches/tez/itests/qtest/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/qtest/pom.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/itests/qtest/pom.xml (original)
+++ hive/branches/tez/itests/qtest/pom.xml Fri Dec 13 10:56:38 2013
@@ -36,7 +36,7 @@
     <run_disabled>false</run_disabled>
     <clustermode></clustermode>
     <execute.beeline.tests>false</execute.beeline.tests>
-    <minimr.query.files>list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q</minimr.query.files>
+    <minimr.query.files>stats_counter_partitioned.q,list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q</minimr.query.files>
     <minimr.query.negative.files>cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q</minimr.query.negative.files>
     <minitez.query.files>tez_join_tests.q,tez_joins_explain.q,mrr.q,tez_dml.q,tez_insert_overwrite_local_directory_1.q</minitez.query.files>
     <beeline.positive.exclude>add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rena
 me.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_o
 verwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q</beeline.positive.exclude>
@@ -210,6 +210,12 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-archives</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
           <version>${hadoop-23.version}</version>
           <scope>test</scope>
@@ -314,12 +320,6 @@
           <groupId>org.apache.hbase</groupId>
           <artifactId>hbase-server</artifactId>
           <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
           <classifier>tests</classifier>
           <scope>test</scope>
         </dependency>

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Fri Dec 13 10:56:38 2013
@@ -27,6 +27,7 @@ import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
 import java.sql.NClob;
 import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.sql.SQLClientInfoException;
 import java.sql.SQLException;
 import java.sql.SQLWarning;
@@ -452,8 +453,16 @@ public class HiveConnection implements j
 
   public Statement createStatement(int resultSetType, int resultSetConcurrency)
       throws SQLException {
-    // TODO Auto-generated method stub
-    throw new SQLException("Method not supported");
+    if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) {
+      throw new SQLException("Statement with resultset concurrency " +
+          resultSetConcurrency + " is not supported", "HYC00"); // Optional feature not implemented
+    }
+    if (resultSetType == ResultSet.TYPE_SCROLL_SENSITIVE) {
+      throw new SQLException("Statement with resultset type " + resultSetType +
+          " is not supported", "HYC00"); // Optional feature not implemented
+    }
+    return new HiveStatement(this, client, sessHandle,
+        resultSetType == ResultSet.TYPE_SCROLL_INSENSITIVE);
   }
 
   /*
@@ -540,6 +549,9 @@ public class HiveConnection implements j
    */
 
   public DatabaseMetaData getMetaData() throws SQLException {
+    if (isClosed) {
+      throw new SQLException("Connection is closed");
+    }
     return new HiveDatabaseMetaData(this, client, sessHandle);
   }
 

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java Fri Dec 13 10:56:38 2013
@@ -65,6 +65,9 @@ public class HiveDatabaseMetaData implem
   //  The maximum column length = MFieldSchema.FNAME in metastore/src/model/package.jdo
   private static final int maxColumnNameLength = 128;
 
+  //  Cached values, to save on round trips to database.
+  private String dbVersion = null;
+
   /**
    *
    */
@@ -254,11 +257,11 @@ public class HiveDatabaseMetaData implem
   }
 
   public int getDatabaseMajorVersion() throws SQLException {
-    throw new SQLException("Method not supported");
+    return Utils.getVersionPart(getDatabaseProductVersion(), 1);
   }
 
   public int getDatabaseMinorVersion() throws SQLException {
-    throw new SQLException("Method not supported");
+    return Utils.getVersionPart(getDatabaseProductVersion(), 2);
   }
 
   public String getDatabaseProductName() throws SQLException {
@@ -266,6 +269,9 @@ public class HiveDatabaseMetaData implem
   }
 
   public String getDatabaseProductVersion() throws SQLException {
+    if (dbVersion != null) { //lazy-caching of the version.
+      return dbVersion;
+    }
 
     TGetInfoReq req = new TGetInfoReq(sessHandle, GetInfoType.CLI_DBMS_VER.toTGetInfoType());
     TGetInfoResp resp;
@@ -276,7 +282,8 @@ public class HiveDatabaseMetaData implem
     }
     Utils.verifySuccess(resp.getStatus());
 
-    return resp.getInfoValue().getStringValue();
+    this.dbVersion = resp.getInfoValue().getStringValue();
+    return dbVersion;
   }
 
   public int getDefaultTransactionIsolation() throws SQLException {

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java Fri Dec 13 10:56:38 2013
@@ -20,6 +20,7 @@ package org.apache.hive.jdbc;
 
 import static org.apache.hive.service.cli.thrift.TCLIServiceConstants.TYPE_NAMES;
 
+import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.Statement;
 import java.sql.SQLException;
@@ -66,6 +67,8 @@ public class HiveQueryResultSet extends 
   private Iterator<TRow> fetchedRowsItr;
   private boolean isClosed = false;
   private boolean emptyResultSet = false;
+  private boolean isScrollable = false;
+  private boolean fetchFirst = false;
 
   public static class Builder {
 
@@ -86,6 +89,7 @@ public class HiveQueryResultSet extends 
     private List<JdbcColumnAttributes> colAttributes;
     private int fetchSize = 50;
     private boolean emptyResultSet = false;
+    private boolean isScrollable = false;
 
     public Builder(Statement statement) {
       this.statement = statement;
@@ -143,6 +147,11 @@ public class HiveQueryResultSet extends 
       return this;
     }
 
+    public Builder setScrollable(boolean setScrollable) {
+      this.isScrollable = setScrollable;
+      return this;
+    }
+
     public HiveQueryResultSet build() throws SQLException {
       return new HiveQueryResultSet(this);
     }
@@ -168,6 +177,7 @@ public class HiveQueryResultSet extends 
     } else {
       this.maxRows = builder.maxRows;
     }
+    this.isScrollable = builder.isScrollable;
   }
 
   /**
@@ -286,9 +296,18 @@ public class HiveQueryResultSet extends 
     }
 
     try {
+      TFetchOrientation orientation = TFetchOrientation.FETCH_NEXT;
+      if (fetchFirst) {
+        // If we are asked to start from begining, clear the current fetched resultset
+        orientation = TFetchOrientation.FETCH_FIRST;
+        fetchedRows = null;
+        fetchedRowsItr = null;
+        fetchFirst = false;
+      }
+
       if (fetchedRows == null || !fetchedRowsItr.hasNext()) {
         TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle,
-            TFetchOrientation.FETCH_NEXT, fetchSize);
+            orientation, fetchSize);
         TFetchResultsResp fetchResp = client.FetchResults(fetchReq);
         Utils.verifySuccessWithInfo(fetchResp.getStatus());
         fetchedRows = fetchResp.getResults().getRows();
@@ -334,6 +353,18 @@ public class HiveQueryResultSet extends 
   }
 
   @Override
+  public int getType() throws SQLException {
+    if (isClosed) {
+      throw new SQLException("Resultset is closed");
+    }
+    if (isScrollable) {
+      return ResultSet.TYPE_SCROLL_INSENSITIVE;
+    } else {
+      return ResultSet.TYPE_FORWARD_ONLY;
+    }
+  }
+
+  @Override
   public int getFetchSize() throws SQLException {
     if (isClosed) {
       throw new SQLException("Resultset is closed");
@@ -350,4 +381,36 @@ public class HiveQueryResultSet extends 
     //JDK 1.7
     throw new SQLException("Method not supported");
   }
+
+  /**
+   * Moves the cursor before the first row of the resultset.
+   *
+   * @see java.sql.ResultSet#next()
+   * @throws SQLException
+   *           if a database access error occurs.
+   */
+  @Override
+  public void beforeFirst() throws SQLException {
+    if (isClosed) {
+      throw new SQLException("Resultset is closed");
+    }
+    if (!isScrollable) {
+      throw new SQLException("Method not supported for TYPE_FORWARD_ONLY resultset");
+    }
+    fetchFirst = true;
+    rowsFetched = 0;
+  }
+
+  @Override
+  public boolean isBeforeFirst() throws SQLException {
+    if (isClosed) {
+      throw new SQLException("Resultset is closed");
+    }
+    return (rowsFetched == 0);
+  }
+
+  @Override
+  public int getRow() throws SQLException {
+    return rowsFetched;
+  }
 }

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java Fri Dec 13 10:56:38 2013
@@ -48,6 +48,7 @@ public class HiveStatement implements ja
   private final TSessionHandle sessHandle;
   Map<String,String> sessConf = new HashMap<String,String>();
   private int fetchSize = 50;
+  private boolean isScrollableResultset = false;
   /**
    * We need to keep a reference to the result set to support the following:
    * <code>
@@ -79,9 +80,15 @@ public class HiveStatement implements ja
    */
   public HiveStatement(HiveConnection connection, TCLIService.Iface client,
       TSessionHandle sessHandle) {
+    this(connection, client, sessHandle, false);
+  }
+
+  public HiveStatement(HiveConnection connection, TCLIService.Iface client,
+        TSessionHandle sessHandle, boolean isScrollableResultset) {
     this.connection = connection;
     this.client = client;
     this.sessHandle = sessHandle;
+    this.isScrollableResultset = isScrollableResultset;
   }
 
   /*
@@ -249,6 +256,7 @@ public class HiveStatement implements ja
     }
     resultSet =  new HiveQueryResultSet.Builder(this).setClient(client).setSessionHandle(sessHandle)
         .setStmtHandle(stmtHandle).setMaxRows(maxRows).setFetchSize(fetchSize)
+        .setScrollable(isScrollableResultset)
         .build();
     return true;
   }

Modified: hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/branches/tez/jdbc/src/java/org/apache/hive/jdbc/Utils.java Fri Dec 13 10:56:38 2013
@@ -270,4 +270,30 @@ public class Utils {
 
     return connParams;
   }
+
+  /**
+   * Takes a version string delmited by '.' and '-' characters
+   * and returns a partial version.
+   *
+   * @param fullVersion
+   *          version string.
+   * @param tokenPosition
+   *          position of version string to get starting at 1. eg, for a X.x.xxx
+   *          string, 1 will return the major version, 2 will return minor
+   *          version.
+   * @return version part, or -1 if version string was malformed.
+   */
+  static int getVersionPart(String fullVersion, int position) {
+    int version = -1;
+    try {
+      String[] tokens = fullVersion.split("[\\.-]"); //$NON-NLS-1$
+
+      if (tokens != null && tokens.length > 1 && tokens[position] != null) {
+        version = Integer.parseInt(tokens[position]);
+      }
+    } catch (Exception e) {
+      version = -1;
+    }
+    return version;
+  }
 }

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java Fri Dec 13 10:56:38 2013
@@ -758,7 +758,7 @@ class MetaStoreDirectSql {
         }
       }
       if (joins.get(partColIndex) == null) {
-        joins.set(partColIndex, "inner join \"PARTITION_KEY_VALS\" as \"FILTER" + partColIndex
+        joins.set(partColIndex, "inner join \"PARTITION_KEY_VALS\" \"FILTER" + partColIndex
             + "\" on \"FILTER"  + partColIndex + "\".\"PART_ID\" = \"PARTITIONS\".\"PART_ID\""
             + " and \"FILTER" + partColIndex + "\".\"INTEGER_IDX\" = " + partColIndex);
       }

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Fri Dec 13 10:56:38 2013
@@ -310,42 +310,6 @@ public class MetaStoreUtils {
   /**
    * getDeserializer
    *
-   * Get the Deserializer for a table given its name and properties.
-   *
-   * @param conf
-   *          hadoop config
-   * @param schema
-   *          the properties to use to instantiate the deserializer
-   * @return
-   *   Returns instantiated deserializer by looking up class name of deserializer stored in passed
-   *   in properties. Also, initializes the deserializer with schema stored in passed in properties.
-   * @exception MetaException
-   *              if any problems instantiating the Deserializer
-   *
-   *              todo - this should move somewhere into serde.jar
-   *
-   */
-  static public Deserializer getDeserializer(Configuration conf,
-      Properties schema) throws MetaException {
-    try {
-      String clazzName = schema.getProperty(serdeConstants.SERIALIZATION_LIB);
-      if(clazzName == null) {
-        throw new IllegalStateException("Property " + serdeConstants.SERIALIZATION_LIB + " cannot be null");
-      }
-      Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(clazzName)
-          .asSubclass(Deserializer.class), conf);
-      deserializer.initialize(conf, schema);
-      return deserializer;
-    } catch (Exception e) {
-      LOG.error("error in initSerDe: " + e.getClass().getName() + " "
-          + e.getMessage(), e);
-      throw new MetaException(e.getClass().getName() + " " + e.getMessage());
-    }
-  }
-
-  /**
-   * getDeserializer
-   *
    * Get the Deserializer for a table.
    *
    * @param conf

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Fri Dec 13 10:56:38 2013
@@ -4943,38 +4943,39 @@ public class ObjectStore implements RawS
     ColumnStatisticsObj statsObj = new ColumnStatisticsObj();
     statsObj.setColType(mStatsObj.getColType());
     statsObj.setColName(mStatsObj.getColName());
-    String colType = mStatsObj.getColType();
+    String colType = mStatsObj.getColType().toLowerCase();
     ColumnStatisticsData colStatsData = new ColumnStatisticsData();
 
-    if (colType.equalsIgnoreCase("boolean")) {
+    if (colType.equals("boolean")) {
       BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
       boolStats.setNumFalses(mStatsObj.getNumFalses());
       boolStats.setNumTrues(mStatsObj.getNumTrues());
       boolStats.setNumNulls(mStatsObj.getNumNulls());
       colStatsData.setBooleanStats(boolStats);
-    } else if (colType.equalsIgnoreCase("string")) {
+    } else if (colType.equals("string") ||
+        colType.startsWith("varchar") || colType.startsWith("char")) {
       StringColumnStatsData stringStats = new StringColumnStatsData();
       stringStats.setNumNulls(mStatsObj.getNumNulls());
       stringStats.setAvgColLen(mStatsObj.getAvgColLen());
       stringStats.setMaxColLen(mStatsObj.getMaxColLen());
       stringStats.setNumDVs(mStatsObj.getNumDVs());
       colStatsData.setStringStats(stringStats);
-    } else if (colType.equalsIgnoreCase("binary")) {
+    } else if (colType.equals("binary")) {
       BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
       binaryStats.setNumNulls(mStatsObj.getNumNulls());
       binaryStats.setAvgColLen(mStatsObj.getAvgColLen());
       binaryStats.setMaxColLen(mStatsObj.getMaxColLen());
       colStatsData.setBinaryStats(binaryStats);
-    } else if (colType.equalsIgnoreCase("bigint") || colType.equalsIgnoreCase("int") ||
-        colType.equalsIgnoreCase("smallint") || colType.equalsIgnoreCase("tinyint") ||
-        colType.equalsIgnoreCase("timestamp")) {
+    } else if (colType.equals("bigint") || colType.equals("int") ||
+        colType.equals("smallint") || colType.equals("tinyint") ||
+        colType.equals("timestamp")) {
       LongColumnStatsData longStats = new LongColumnStatsData();
       longStats.setNumNulls(mStatsObj.getNumNulls());
       longStats.setHighValue(mStatsObj.getLongHighValue());
       longStats.setLowValue(mStatsObj.getLongLowValue());
       longStats.setNumDVs(mStatsObj.getNumDVs());
       colStatsData.setLongStats(longStats);
-   } else if (colType.equalsIgnoreCase("double") || colType.equalsIgnoreCase("float")) {
+   } else if (colType.equals("double") || colType.equals("float")) {
      DoubleColumnStatsData doubleStats = new DoubleColumnStatsData();
      doubleStats.setNumNulls(mStatsObj.getNumNulls());
      doubleStats.setHighValue(mStatsObj.getDoubleHighValue());
@@ -5119,38 +5120,39 @@ public class ObjectStore implements RawS
     ColumnStatisticsObj statsObj = new ColumnStatisticsObj();
     statsObj.setColType(mStatsObj.getColType());
     statsObj.setColName(mStatsObj.getColName());
-    String colType = mStatsObj.getColType();
+    String colType = mStatsObj.getColType().toLowerCase();
     ColumnStatisticsData colStatsData = new ColumnStatisticsData();
 
-    if (colType.equalsIgnoreCase("boolean")) {
+    if (colType.equals("boolean")) {
       BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
       boolStats.setNumFalses(mStatsObj.getNumFalses());
       boolStats.setNumTrues(mStatsObj.getNumTrues());
       boolStats.setNumNulls(mStatsObj.getNumNulls());
       colStatsData.setBooleanStats(boolStats);
-    } else if (colType.equalsIgnoreCase("string")) {
+    } else if (colType.equals("string") ||
+        colType.startsWith("varchar") || colType.startsWith("char")) {
       StringColumnStatsData stringStats = new StringColumnStatsData();
       stringStats.setNumNulls(mStatsObj.getNumNulls());
       stringStats.setAvgColLen(mStatsObj.getAvgColLen());
       stringStats.setMaxColLen(mStatsObj.getMaxColLen());
       stringStats.setNumDVs(mStatsObj.getNumDVs());
       colStatsData.setStringStats(stringStats);
-    } else if (colType.equalsIgnoreCase("binary")) {
+    } else if (colType.equals("binary")) {
       BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
       binaryStats.setNumNulls(mStatsObj.getNumNulls());
       binaryStats.setAvgColLen(mStatsObj.getAvgColLen());
       binaryStats.setMaxColLen(mStatsObj.getMaxColLen());
       colStatsData.setBinaryStats(binaryStats);
-    } else if (colType.equalsIgnoreCase("tinyint") || colType.equalsIgnoreCase("smallint") ||
-        colType.equalsIgnoreCase("int") || colType.equalsIgnoreCase("bigint") ||
-        colType.equalsIgnoreCase("timestamp")) {
+    } else if (colType.equals("tinyint") || colType.equals("smallint") ||
+        colType.equals("int") || colType.equals("bigint") ||
+        colType.equals("timestamp")) {
       LongColumnStatsData longStats = new LongColumnStatsData();
       longStats.setNumNulls(mStatsObj.getNumNulls());
       longStats.setHighValue(mStatsObj.getLongHighValue());
       longStats.setLowValue(mStatsObj.getLongLowValue());
       longStats.setNumDVs(mStatsObj.getNumDVs());
       colStatsData.setLongStats(longStats);
-   } else if (colType.equalsIgnoreCase("double") || colType.equalsIgnoreCase("float")) {
+   } else if (colType.equals("double") || colType.equals("float")) {
      DoubleColumnStatsData doubleStats = new DoubleColumnStatsData();
      doubleStats.setNumNulls(mStatsObj.getNumNulls());
      doubleStats.setHighValue(mStatsObj.getDoubleHighValue());

Modified: hive/branches/tez/packaging/src/main/assembly/bin.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/packaging/src/main/assembly/bin.xml?rev=1550684&r1=1550683&r2=1550684&view=diff
==============================================================================
--- hive/branches/tez/packaging/src/main/assembly/bin.xml (original)
+++ hive/branches/tez/packaging/src/main/assembly/bin.xml Fri Dec 13 10:56:38 2013
@@ -39,8 +39,6 @@
       <useStrictFiltering>true</useStrictFiltering>
       <useTransitiveFiltering>true</useTransitiveFiltering>
       <excludes>
-        <exclude>org.apache.hadoop:*</exclude>
-        <exclude>org.apache.hbase:*</exclude>
         <exclude>org.apache.hive.hcatalog:*</exclude>
       </excludes>
     </dependencySet>



Mime
View raw message