hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject svn commit: r1622813 - in /hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql: optimizer/optiq/translator/ parse/ udf/generic/
Date Fri, 05 Sep 2014 22:24:44 GMT
Author: sershe
Date: Fri Sep  5 22:24:43 2014
New Revision: 1622813

URL: http://svn.apache.org/r1622813
Log:
HIVE-8003 : CBO: Handle Literal casting, Restrict CBO to select queries, Translate Strings,
Optiq Log (Laljo John Pullokkaran, reviewed by Sergey Shelukhin)

Modified:
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java?rev=1622813&r1=1622812&r2=1622813&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java
(original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java
Fri Sep  5 22:24:43 2014
@@ -1,5 +1,10 @@
 package org.apache.hadoop.hive.ql.optimizer.optiq.translator;
 
+import java.sql.Date;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+
 import org.apache.hadoop.hive.ql.optimizer.optiq.RelOptHiveTable;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
@@ -41,30 +46,26 @@ class ASTBuilder {
     return b.node();
   }
 
-  static ASTNode join(ASTNode left, ASTNode right, JoinRelType joinType,
-      ASTNode cond, boolean semiJoin) {
+  static ASTNode join(ASTNode left, ASTNode right, JoinRelType joinType, ASTNode cond,
+      boolean semiJoin) {
     ASTBuilder b = null;
 
     switch (joinType) {
     case INNER:
       if (semiJoin) {
-        b = ASTBuilder.construct(HiveParser.TOK_LEFTSEMIJOIN,
-            "TOK_LEFTSEMIJOIN");
+        b = ASTBuilder.construct(HiveParser.TOK_LEFTSEMIJOIN, "TOK_LEFTSEMIJOIN");
       } else {
         b = ASTBuilder.construct(HiveParser.TOK_JOIN, "TOK_JOIN");
       }
       break;
     case LEFT:
-      b = ASTBuilder.construct(HiveParser.TOK_LEFTOUTERJOIN,
-          "TOK_LEFTOUTERJOIN");
+      b = ASTBuilder.construct(HiveParser.TOK_LEFTOUTERJOIN, "TOK_LEFTOUTERJOIN");
       break;
     case RIGHT:
-      b = ASTBuilder.construct(HiveParser.TOK_RIGHTOUTERJOIN,
-          "TOK_RIGHTOUTERJOIN");
+      b = ASTBuilder.construct(HiveParser.TOK_RIGHTOUTERJOIN, "TOK_RIGHTOUTERJOIN");
       break;
     case FULL:
-      b = ASTBuilder.construct(HiveParser.TOK_FULLOUTERJOIN,
-          "TOK_FULLOUTERJOIN");
+      b = ASTBuilder.construct(HiveParser.TOK_FULLOUTERJOIN, "TOK_FULLOUTERJOIN");
       break;
     }
 
@@ -87,9 +88,8 @@ class ASTBuilder {
   }
 
   static ASTNode unqualifiedName(String colName) {
-    ASTBuilder b = ASTBuilder
-.construct(HiveParser.TOK_TABLE_OR_COL,
-        "TOK_TABLE_OR_COL").add(HiveParser.Identifier, colName);
+    ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL").add(
+        HiveParser.Identifier, colName);
     return b.node();
   }
 
@@ -108,39 +108,61 @@ class ASTBuilder {
 
   static ASTNode selectExpr(ASTNode expr, String alias) {
     return ASTBuilder.construct(HiveParser.TOK_SELEXPR, "TOK_SELEXPR").add(expr)
-      .add(HiveParser.Identifier, alias).node();
+        .add(HiveParser.Identifier, alias).node();
   }
 
   static ASTNode literal(RexLiteral literal) {
-    Object val = literal.getValue3();
+    Object val = null;
     int type = 0;
     SqlTypeName sqlType = literal.getType().getSqlTypeName();
 
     switch (sqlType) {
     case TINYINT:
+      val = literal.getValue3();
       type = HiveParser.TinyintLiteral;
       break;
     case SMALLINT:
+      val = literal.getValue3();
       type = HiveParser.SmallintLiteral;
       break;
     case INTEGER:
     case BIGINT:
+      val = literal.getValue3();
       type = HiveParser.BigintLiteral;
       break;
     case DECIMAL:
     case FLOAT:
     case DOUBLE:
     case REAL:
+      val = literal.getValue3();
       type = HiveParser.Number;
       break;
     case VARCHAR:
     case CHAR:
+      val = literal.getValue3();
       type = HiveParser.StringLiteral;
       val = "'" + String.valueOf(val) + "'";
       break;
     case BOOLEAN:
-      type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE
-          : HiveParser.KW_FALSE;
+      val = literal.getValue3();
+      type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE;
+      break;
+    case DATE: {
+      val = literal.getValue();
+      type = HiveParser.TOK_DATELITERAL;
+      DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
+      val = df.format(((Calendar) val).getTime());
+      val = "'" + val + "'";
+    }
+      break;
+    case TIME:
+    case TIMESTAMP: {
+      val = literal.getValue();
+      type = HiveParser.TOK_TIMESTAMP;
+      DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
+      val = df.format(((Calendar) val).getTime());
+      val = "'" + val + "'";
+    }
       break;
     case NULL:
       type = HiveParser.TOK_NULL;

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java?rev=1622813&r1=1622812&r2=1622813&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java
(original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java
Fri Sep  5 22:24:43 2014
@@ -2,11 +2,18 @@ package org.apache.hadoop.hive.ql.optimi
 
 import java.math.BigDecimal;
 import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
 import java.util.LinkedHashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
@@ -20,12 +27,15 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseNumeric;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToVarchar;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -81,8 +91,9 @@ public class RexNodeConverter {
 
   public RexNode convert(ExprNodeDesc expr) throws SemanticException {
     if (expr instanceof ExprNodeNullDesc) {
-      return m_cluster.getRexBuilder().makeNullLiteral(TypeConverter.convert(
-        expr.getTypeInfo(), m_cluster.getRexBuilder().getTypeFactory()).getSqlTypeName());
+      return m_cluster.getRexBuilder().makeNullLiteral(
+          TypeConverter.convert(expr.getTypeInfo(), m_cluster.getRexBuilder().getTypeFactory())
+              .getSqlTypeName());
     }
     if (expr instanceof ExprNodeGenericFuncDesc) {
       return convert((ExprNodeGenericFuncDesc) expr);
@@ -182,8 +193,9 @@ public class RexNodeConverter {
       GenericUDF udf = func.getGenericUDF();
       if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar)
           || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate)
-          || (udf instanceof GenericUDFToBinary) || (udf instanceof GenericUDFToUnixTimeStamp)
-          || castExprUsingUDFBridge(udf)) {
+          || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) {
+        // || (udf instanceof GenericUDFToUnixTimeStamp) || (udf instanceof
+        // GenericUDFTimestamp) || castExprUsingUDFBridge(udf)) {
         castExpr = m_cluster.getRexBuilder().makeCast(
             TypeConverter.convert(func.getTypeInfo(), m_cluster.getTypeFactory()),
             childRexNodeLst.get(0));
@@ -233,7 +245,9 @@ public class RexNodeConverter {
 
     PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
 
-    Object value = literal.getValue();
+    ConstantObjectInspector coi = literal.getWritableObjectInspector();
+    Object value = ObjectInspectorUtils.copyToStandardJavaObject(literal
+        .getWritableObjectInspector().getWritableConstantValue(), coi);
 
     RexNode optiqLiteral = null;
     // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
@@ -255,6 +269,10 @@ public class RexNodeConverter {
       break;
     // TODO: is Decimal an exact numeric or approximate numeric?
     case DECIMAL:
+      if (value instanceof HiveDecimal)
+        value = ((HiveDecimal) value).bigDecimalValue();
+      if (value instanceof Decimal128)
+        value = ((Decimal128) value).toBigDecimal();
       optiqLiteral = rexBuilder.makeExactLiteral((BigDecimal) value);
       break;
     case FLOAT:
@@ -263,11 +281,28 @@ public class RexNodeConverter {
     case DOUBLE:
       optiqLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), optiqDataType);
       break;
+    case CHAR:
+      if (value instanceof HiveChar)
+        value = ((HiveChar) value).getValue();
+      optiqLiteral = rexBuilder.makeLiteral((String) value);
+      break;
+    case VARCHAR:
+      if (value instanceof HiveVarchar)
+        value = ((HiveVarchar) value).getValue();
+      optiqLiteral = rexBuilder.makeLiteral((String) value);
+      break;
     case STRING:
       optiqLiteral = rexBuilder.makeLiteral((String) value);
       break;
     case DATE:
+      Calendar cal = new GregorianCalendar();
+      cal.setTime((Date) value);
+      optiqLiteral = rexBuilder.makeDateLiteral(cal);
+      break;
     case TIMESTAMP:
+      optiqLiteral = rexBuilder.makeTimestampLiteral((Calendar) value,
+          RelDataType.PRECISION_NOT_SPECIFIED);
+      break;
     case BINARY:
     case VOID:
     case UNKNOWN:

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java?rev=1622813&r1=1622812&r2=1622813&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java
(original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java
Fri Sep  5 22:24:43 2014
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.eigenbase.relopt.RelOptCluster;
 import org.eigenbase.reltype.RelDataType;
 import org.eigenbase.reltype.RelDataTypeFactory;
+import org.eigenbase.reltype.RelDataTypeFactoryImpl.JavaType;
 import org.eigenbase.reltype.RelDataTypeField;
 import org.eigenbase.rex.RexBuilder;
 import org.eigenbase.sql.type.SqlTypeName;
@@ -51,8 +52,7 @@ public class TypeConverter {
   };
 
   /*********************** Convert Hive Types To Optiq Types ***********************/
-  public static RelDataType getType(RelOptCluster cluster,
-      List<ColumnInfo> cInfoLst) {
+  public static RelDataType getType(RelOptCluster cluster, List<ColumnInfo> cInfoLst)
{
     RexBuilder rexBuilder = cluster.getRexBuilder();
     RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
     List<RelDataType> fieldTypes = new LinkedList<RelDataType>();
@@ -65,8 +65,7 @@ public class TypeConverter {
     return dtFactory.createStructType(fieldTypes, fieldNames);
   }
 
-  public static RelDataType getType(RelOptCluster cluster, RowResolver rr,
-      List<String> neededCols) {
+  public static RelDataType getType(RelOptCluster cluster, RowResolver rr, List<String>
neededCols) {
     RexBuilder rexBuilder = cluster.getRexBuilder();
     RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
     RowSchema rs = rr.getRowSchema();
@@ -105,8 +104,7 @@ public class TypeConverter {
     return convertedType;
   }
 
-  public static RelDataType convert(PrimitiveTypeInfo type,
-      RelDataTypeFactory dtFactory) {
+  public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory)
{
     RelDataType convertedType = null;
 
     switch (type.getPrimitiveCategory()) {
@@ -135,9 +133,7 @@ public class TypeConverter {
       convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
       break;
     case STRING:
-      //TODO: shall we pass -1 for len to distinguish between STRING & VARCHAR on way
out
-      convertedType = dtFactory.createSqlType(SqlTypeName.VARCHAR,
-          RelDataType.PRECISION_NOT_SPECIFIED);
+      convertedType = dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE);
       break;
     case DATE:
       convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
@@ -149,8 +145,9 @@ public class TypeConverter {
       convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
       break;
     case DECIMAL:
-      DecimalTypeInfo dtInf = (DecimalTypeInfo)type;
-      convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
+      DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
+      convertedType = dtFactory
+          .createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
       break;
     case VARCHAR:
       convertedType = dtFactory.createSqlType(SqlTypeName.VARCHAR,
@@ -172,45 +169,39 @@ public class TypeConverter {
     return convertedType;
   }
 
-  public static RelDataType convert(ListTypeInfo lstType,
-      RelDataTypeFactory dtFactory) {
+  public static RelDataType convert(ListTypeInfo lstType, RelDataTypeFactory dtFactory) {
     RelDataType elemType = convert(lstType.getListElementTypeInfo(), dtFactory);
     return dtFactory.createArrayType(elemType, -1);
   }
 
-  public static RelDataType convert(MapTypeInfo mapType,
-      RelDataTypeFactory dtFactory) {
+  public static RelDataType convert(MapTypeInfo mapType, RelDataTypeFactory dtFactory) {
     RelDataType keyType = convert(mapType.getMapKeyTypeInfo(), dtFactory);
     RelDataType valueType = convert(mapType.getMapValueTypeInfo(), dtFactory);
     return dtFactory.createMapType(keyType, valueType);
   }
 
-  public static RelDataType convert(StructTypeInfo structType,
-      final RelDataTypeFactory dtFactory) {
-    List<RelDataType> fTypes = Lists.transform(
-        structType.getAllStructFieldTypeInfos(),
+  public static RelDataType convert(StructTypeInfo structType, final RelDataTypeFactory dtFactory)
{
+    List<RelDataType> fTypes = Lists.transform(structType.getAllStructFieldTypeInfos(),
         new Function<TypeInfo, RelDataType>() {
           @Override
           public RelDataType apply(TypeInfo tI) {
             return convert(tI, dtFactory);
           }
         });
-    return dtFactory.createStructType(fTypes,
-        structType.getAllStructFieldNames());
+    return dtFactory.createStructType(fTypes, structType.getAllStructFieldNames());
   }
 
-  public static RelDataType convert(UnionTypeInfo unionType,
-      RelDataTypeFactory dtFactory) {
+  public static RelDataType convert(UnionTypeInfo unionType, RelDataTypeFactory dtFactory)
{
     // @todo what do we about unions?
     throw new UnsupportedOperationException();
   }
 
   public static TypeInfo convert(RelDataType rType) {
-    if ( rType.isStruct() ) {
+    if (rType.isStruct()) {
       return convertStructType(rType);
-    } else if ( rType.getComponentType() != null ) {
+    } else if (rType.getComponentType() != null) {
       return convertListType(rType);
-    } else if ( rType.getKeyType() != null ) {
+    } else if (rType.getKeyType() != null) {
       return convertMapType(rType);
     } else {
       return convertPrimtiveType(rType);
@@ -218,16 +209,14 @@ public class TypeConverter {
   }
 
   public static TypeInfo convertStructType(RelDataType rType) {
-    List<TypeInfo> fTypes = Lists.transform(
-        rType.getFieldList(),
+    List<TypeInfo> fTypes = Lists.transform(rType.getFieldList(),
         new Function<RelDataTypeField, TypeInfo>() {
           @Override
           public TypeInfo apply(RelDataTypeField f) {
             return convert(f.getType());
           }
         });
-    List<String> fNames = Lists.transform(
-        rType.getFieldList(),
+    List<String> fNames = Lists.transform(rType.getFieldList(),
         new Function<RelDataTypeField, String>() {
           @Override
           public String apply(RelDataTypeField f) {
@@ -247,7 +236,7 @@ public class TypeConverter {
   }
 
   public static TypeInfo convertPrimtiveType(RelDataType rType) {
-    switch(rType.getSqlTypeName()) {
+    switch (rType.getSqlTypeName()) {
     case BOOLEAN:
       return TypeInfoFactory.booleanTypeInfo;
     case TINYINT:
@@ -271,14 +260,14 @@ public class TypeConverter {
     case DECIMAL:
       return TypeInfoFactory.getDecimalTypeInfo(rType.getPrecision(), rType.getScale());
     case VARCHAR:
-      if (rType.getPrecision() == RelDataType.PRECISION_NOT_SPECIFIED)
+      if (rType.getPrecision() == Integer.MAX_VALUE)
         return TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
       else
         return TypeInfoFactory.getVarcharTypeInfo(rType.getPrecision());
     case CHAR:
       return TypeInfoFactory.getCharTypeInfo(rType.getPrecision());
     case OTHER:
-      default:
+    default:
       return TypeInfoFactory.voidTypeInfo;
     }
 
@@ -290,19 +279,21 @@ public class TypeConverter {
 
     switch (optiqType.getSqlTypeName()) {
     case CHAR: {
-      ht = new HiveToken(HiveParser.TOK_CHAR, "TOK_CHAR",
-          String.valueOf(optiqType.getPrecision()));
+      ht = new HiveToken(HiveParser.TOK_CHAR, "TOK_CHAR", String.valueOf(optiqType.getPrecision()));
     }
       break;
     case VARCHAR: {
-      ht = new HiveToken(HiveParser.TOK_VARCHAR, "TOK_VARCHAR",
-          String.valueOf(optiqType.getPrecision()));
+      if (optiqType.getPrecision() == Integer.MAX_VALUE)
+        ht = new HiveToken(HiveParser.TOK_STRING, "TOK_STRING", String.valueOf(optiqType
+            .getPrecision()));
+      else
+        ht = new HiveToken(HiveParser.TOK_VARCHAR, "TOK_VARCHAR", String.valueOf(optiqType
+            .getPrecision()));
     }
       break;
     case DECIMAL: {
-      ht = new HiveToken(HiveParser.TOK_DECIMAL, "TOK_DECIMAL",
-          String.valueOf(optiqType.getPrecision()), String.valueOf(optiqType
-              .getScale()));
+      ht = new HiveToken(HiveParser.TOK_DECIMAL, "TOK_DECIMAL", String.valueOf(optiqType
+          .getPrecision()), String.valueOf(optiqType.getScale()));
     }
       break;
     default:

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1622813&r1=1622812&r2=1622813&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri
Sep  5 22:24:43 2014
@@ -11847,14 +11847,13 @@ public class SemanticAnalyzer extends Ba
   // TODO: Extend QP to indicate LV, Multi Insert, Cubes, Rollups...
   private boolean canHandleQuery() {
     boolean runOptiqPlanner = false;
-
-    if (((queryProperties.getJoinCount() > 1) || conf.getBoolVar(ConfVars.HIVE_IN_TEST))
-        && !queryProperties.hasClusterBy()
-        && !queryProperties.hasDistributeBy()
-        && !queryProperties.hasSortBy()
-        && !queryProperties.hasPTF()
-        && !queryProperties.usesScript()
-        && !queryProperties.hasMultiDestQuery()) {
+    // Assumption: If top level QB is query then everything below it must also
+    // be Query
+    if (qb.getIsQuery()
+        && ((queryProperties.getJoinCount() > 1) || conf.getBoolVar(ConfVars.HIVE_IN_TEST))
+        && !queryProperties.hasClusterBy() && !queryProperties.hasDistributeBy()
+        && !queryProperties.hasSortBy() && !queryProperties.hasPTF()
+        && !queryProperties.usesScript() && !queryProperties.hasMultiDestQuery())
{
       runOptiqPlanner = true;
     } else {
       LOG.info("Can not invoke CBO; query contains operators not supported for CBO.");
@@ -11944,7 +11943,7 @@ public class SemanticAnalyzer extends Ba
 
       optiqOptimizedPlan = hepPlanner.findBestExp();
 
-      if (LOG.isDebugEnabled()) {
+      if (LOG.isDebugEnabled() && !conf.getBoolVar(ConfVars.HIVE_IN_TEST)) {
         LOG.debug("CBO Planning details:\n");
         LOG.debug("Original Plan:\n");
         LOG.debug(RelOptUtil.toString(optiqGenPlan, SqlExplainLevel.ALL_ATTRIBUTES));

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java?rev=1622813&r1=1622812&r2=1622813&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
(original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
Fri Sep  5 22:24:43 2014
@@ -22,6 +22,7 @@ import java.util.TimeZone;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -33,7 +34,9 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-
+@Description(name = "from_utc_timestamp",
+             value = "from_utc_timestamp(timestamp, string timezone) - "
+                     + "Assumes given timestamp ist UTC and converts to given timezone (as
of Hive 0.8.0)")
 public class GenericUDFFromUtcTimestamp extends GenericUDF {
 
   static final Log LOG = LogFactory.getLog(GenericUDFFromUtcTimestamp.class);

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java?rev=1622813&r1=1622812&r2=1622813&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
(original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
Fri Sep  5 22:24:43 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -39,6 +40,8 @@ import org.apache.hadoop.hive.serde2.obj
  * Creates a TimestampWritable object using PrimitiveObjectInspectorConverter
  *
  */
+@Description(name = "timestamp",
+value = "cast(date as timestamp) - Returns timestamp")
 @VectorizedExpressions({CastLongToTimestampViaLongToLong.class,
   CastDoubleToTimestampViaDoubleToLong.class, CastDecimalToTimestamp.class})
 public class GenericUDFTimestamp extends GenericUDF {

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java?rev=1622813&r1=1622812&r2=1622813&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java
(original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java
Fri Sep  5 22:24:43 2014
@@ -17,7 +17,11 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 
+@Description(name = "to_utc_timestamp",
+             value = "to_utc_timestamp(timestamp, string timezone) - "
+                     + "Assumes given timestamp is in given timezone and converts to UTC
(as of Hive 0.8.0)")
 public class GenericUDFToUtcTimestamp extends
     GenericUDFFromUtcTimestamp {
 



Mime
View raw message