hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From xu...@apache.org
Subject svn commit: r1669775 [18/35] - in /hive/branches/spark: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/...
Date Sat, 28 Mar 2015 14:03:49 GMT
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java Sat Mar 28 14:03:43 2015
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.exec.Ut
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 /**
  * ColumnStatsSemanticAnalyzer.
@@ -94,13 +95,9 @@ public class ColumnStatsSemanticAnalyzer
 
   private Table getTable(ASTNode tree) throws SemanticException {
     String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0));
-    try {
-      return db.getTable(tableName);
-    } catch (InvalidTableException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e);
-    } catch (HiveException e) {
-      throw new SemanticException(e.getMessage(), e);
-    }
+    String currentDb = SessionState.get().getCurrentDatabase();
+    String [] names = Utilities.getDbTableName(currentDb, tableName);
+    return getTable(names[0], names[1], true);
   }
 
   private Map<String,String> getPartKeyValuePairsFromAST(ASTNode tree) {
@@ -315,6 +312,8 @@ public class ColumnStatsSemanticAnalyzer
       }
     }
     rewrittenQueryBuilder.append(" from ");
+    rewrittenQueryBuilder.append(tbl.getDbName());
+    rewrittenQueryBuilder.append(".");
     rewrittenQueryBuilder.append(tbl.getTableName());
     isRewritten = true;
 
@@ -428,7 +427,7 @@ public class ColumnStatsSemanticAnalyzer
       qb.setAnalyzeRewrite(true);
       qbp = qb.getParseInfo();
       analyzeRewrite = new AnalyzeRewriteContext();
-      analyzeRewrite.setTableName(tbl.getTableName());
+      analyzeRewrite.setTableName(tbl.getDbName() + "." + tbl.getTableName());
       analyzeRewrite.setTblLvl(isTableLevel);
       analyzeRewrite.setColName(colNames);
       analyzeRewrite.setColType(colType);

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Sat Mar 28 14:03:43 2015
@@ -509,7 +509,7 @@ public class DDLSemanticAnalyzer extends
     if (colType == null)
       throw new SemanticException("column type not found");
 
-    ColumnStatsDesc cStatsDesc = new ColumnStatsDesc(tbl.getTableName(),
+    ColumnStatsDesc cStatsDesc = new ColumnStatsDesc(tbl.getDbName() + "." + tbl.getTableName(),
         Arrays.asList(colName), Arrays.asList(colType), partSpec == null);
     ColumnStatsUpdateTask cStatsUpdateTask = (ColumnStatsUpdateTask) TaskFactory
         .get(new ColumnStatsUpdateWork(cStatsDesc, partName, mapProp), conf);
@@ -1093,7 +1093,7 @@ public class DDLSemanticAnalyzer extends
       }
     }
 
-    storageFormat.fillDefaultStorageFormat();
+    storageFormat.fillDefaultStorageFormat(false);
     if (indexTableName == null) {
       indexTableName = MetaStoreUtils.getIndexTableName(qTabName[0], qTabName[1], indexName);
       indexTableName = qTabName[0] + "." + indexTableName; // on same database with base table

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g Sat Mar 28 14:03:43 2015
@@ -35,6 +35,9 @@ k=3;
       RecognitionException e) {
     gParent.errors.add(new ParseError(gParent, e, tokenNames));
   }
+  protected boolean useSQL11ReservedKeywordsForIdentifier() {
+    return gParent.useSQL11ReservedKeywordsForIdentifier();
+  }
 }
 
 @rulecatch {
@@ -126,7 +129,7 @@ lateralView
 @init {gParent.pushMsg("lateral view", state); }
 @after {gParent.popMsg(state); }
 	:
-	KW_LATERAL KW_VIEW KW_OUTER function tableAlias (KW_AS identifier ((COMMA)=> COMMA identifier)*)?
+	(KW_LATERAL KW_VIEW KW_OUTER) => KW_LATERAL KW_VIEW KW_OUTER function tableAlias (KW_AS identifier ((COMMA)=> COMMA identifier)*)?
 	-> ^(TOK_LATERAL_VIEW_OUTER ^(TOK_SELECT ^(TOK_SELEXPR function identifier* tableAlias)))
 	|
 	KW_LATERAL KW_VIEW function tableAlias (KW_AS identifier ((COMMA)=> COMMA identifier)*)?
@@ -177,7 +180,12 @@ tableSample
 tableSource
 @init { gParent.pushMsg("table source", state); }
 @after { gParent.popMsg(state); }
-    : tabname=tableName (props=tableProperties)? (ts=tableSample)? (KW_AS? alias=Identifier)?
+    : tabname=tableName 
+    ((tableProperties) => props=tableProperties)?
+    ((tableSample) => ts=tableSample)? 
+    ((KW_AS) => (KW_AS alias=Identifier) 
+    |
+    (Identifier) => (alias=Identifier))?
     -> ^(TOK_TABREF $tabname $props? $ts? $alias?)
     ;
 
@@ -232,11 +240,11 @@ partitionedTableFunction
 @init { gParent.pushMsg("ptf clause", state); }
 @after { gParent.popMsg(state); } 
    :
-   name=Identifier
-   LPAREN KW_ON ptfsrc=partitionTableFunctionSource partitioningSpec?
-     ((Identifier LPAREN expression RPAREN ) => Identifier LPAREN expression RPAREN ( COMMA Identifier LPAREN expression RPAREN)*)? 
-   RPAREN alias=Identifier?
-   ->   ^(TOK_PTBLFUNCTION $name $alias? partitionTableFunctionSource partitioningSpec? expression*)
+   name=Identifier LPAREN KW_ON 
+   ((partitionTableFunctionSource) => (ptfsrc=partitionTableFunctionSource spec=partitioningSpec?))
+   ((Identifier LPAREN expression RPAREN ) => Identifier LPAREN expression RPAREN ( COMMA Identifier LPAREN expression RPAREN)*)?
+   ((RPAREN) => (RPAREN)) ((Identifier) => alias=Identifier)?
+   ->   ^(TOK_PTBLFUNCTION $name $alias? $ptfsrc $spec? expression*)
    ; 
 
 //----------------------- Rules for parsing whereClause -----------------------------

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Sat Mar 28 14:03:43 2015
@@ -42,7 +42,6 @@ KW_TRUE : 'TRUE';
 KW_FALSE : 'FALSE';
 KW_ALL : 'ALL';
 KW_NONE: 'NONE';
-KW_DEFAULT : 'DEFAULT';
 KW_AND : 'AND';
 KW_OR : 'OR';
 KW_NOT : 'NOT' | '!';
@@ -123,6 +122,7 @@ KW_DOUBLE: 'DOUBLE';
 KW_DATE: 'DATE';
 KW_DATETIME: 'DATETIME';
 KW_TIMESTAMP: 'TIMESTAMP';
+KW_INTERVAL: 'INTERVAL';
 KW_DECIMAL: 'DECIMAL';
 KW_STRING: 'STRING';
 KW_CHAR: 'CHAR';
@@ -298,6 +298,12 @@ KW_AUTHORIZATION: 'AUTHORIZATION';
 KW_CONF: 'CONF';
 KW_VALUES: 'VALUES';
 KW_RELOAD: 'RELOAD';
+KW_YEAR: 'YEAR';
+KW_MONTH: 'MONTH';
+KW_DAY: 'DAY';
+KW_HOUR: 'HOUR';
+KW_MINUTE: 'MINUTE';
+KW_SECOND: 'SECOND';
 
 // Operators
 // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sat Mar 28 14:03:43 2015
@@ -111,6 +111,16 @@ TOK_DATELITERAL;
 TOK_DATETIME;
 TOK_TIMESTAMP;
 TOK_TIMESTAMPLITERAL;
+TOK_INTERVAL_YEAR_MONTH;
+TOK_INTERVAL_YEAR_MONTH_LITERAL;
+TOK_INTERVAL_DAY_TIME;
+TOK_INTERVAL_DAY_TIME_LITERAL;
+TOK_INTERVAL_YEAR_LITERAL;
+TOK_INTERVAL_MONTH_LITERAL;
+TOK_INTERVAL_DAY_LITERAL;
+TOK_INTERVAL_HOUR_LITERAL;
+TOK_INTERVAL_MINUTE_LITERAL;
+TOK_INTERVAL_SECOND_LITERAL;
 TOK_STRING;
 TOK_CHAR;
 TOK_VARCHAR;
@@ -354,6 +364,8 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.util.Collection;
 import java.util.HashMap;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
 }
 
 
@@ -370,7 +382,6 @@ import java.util.HashMap;
     xlateMap.put("KW_FALSE", "FALSE");
     xlateMap.put("KW_ALL", "ALL");
     xlateMap.put("KW_NONE", "NONE");
-    xlateMap.put("KW_DEFAULT", "DEFAULT");
     xlateMap.put("KW_AND", "AND");
     xlateMap.put("KW_OR", "OR");
     xlateMap.put("KW_NOT", "NOT");
@@ -620,6 +631,13 @@ import java.util.HashMap;
   private CommonTree throwSetOpException() throws RecognitionException {
     throw new FailedPredicateException(input, "orderByClause clusterByClause distributeByClause sortByClause limitClause can only be applied to the whole union.", "");
   }
+  private Configuration hiveConf;
+  public void setHiveConf(Configuration hiveConf) {
+    this.hiveConf = hiveConf;
+  }
+  protected boolean useSQL11ReservedKeywordsForIdentifier() {
+    return !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SUPPORT_SQL11_RESERVED_KEYWORDS);
+  }
 }
 
 @rulecatch {
@@ -712,8 +730,8 @@ ddlStatement
     | unlockDatabase
     | createRoleStatement
     | dropRoleStatement
-    | grantPrivileges
-    | revokePrivileges
+    | (grantPrivileges) => grantPrivileges
+    | (revokePrivileges) => revokePrivileges
     | showGrants
     | showRoleGrants
     | showRolePrincipals
@@ -955,8 +973,7 @@ alterStatement
 alterTableStatementSuffix
 @init { pushMsg("alter table statement", state); }
 @after { popMsg(state); }
-    : alterStatementSuffixRename[true]
-    | alterStatementSuffixUpdateStatsCol
+    : (alterStatementSuffixRename[true]) => alterStatementSuffixRename[true]
     | alterStatementSuffixDropPartitions[true]
     | alterStatementSuffixAddPartitions[true]
     | alterStatementSuffixTouch
@@ -1297,15 +1314,15 @@ fileFormat
 tabTypeExpr
 @init { pushMsg("specifying table types", state); }
 @after { popMsg(state); }
-
-   : identifier (DOT^ (KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier))*
-   ;
-
-descTabTypeExpr
-@init { pushMsg("specifying describe table types", state); }
-@after { popMsg(state); }
-
-   : identifier (DOT^ (KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier))* identifier?
+   : identifier (DOT^ 
+   (
+   (KW_ELEM_TYPE) => KW_ELEM_TYPE
+   | 
+   (KW_KEY_TYPE) => KW_KEY_TYPE
+   | 
+   (KW_VALUE_TYPE) => KW_VALUE_TYPE 
+   | identifier
+   ))* identifier?
    ;
 
 partTypeExpr
@@ -1314,21 +1331,22 @@ partTypeExpr
     :  tabTypeExpr partitionSpec? -> ^(TOK_TABTYPE tabTypeExpr partitionSpec?)
     ;
 
-descPartTypeExpr
-@init { pushMsg("specifying describe table partitions", state); }
-@after { popMsg(state); }
-    :  descTabTypeExpr partitionSpec? -> ^(TOK_TABTYPE descTabTypeExpr partitionSpec?)
-    ;
-
 descStatement
 @init { pushMsg("describe statement", state); }
 @after { popMsg(state); }
-    : (KW_DESCRIBE|KW_DESC) (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
-    | (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED|descOptions=KW_PRETTY)? (parttype=descPartTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?)
-    | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?)
+    :
+    (KW_DESCRIBE|KW_DESC)
+    (
+    (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
+    |
+    (KW_FUNCTION) => KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?)
+    |
+    (KW_FORMATTED|KW_EXTENDED|KW_PRETTY) => ((descOptions=KW_FORMATTED|descOptions=KW_EXTENDED|descOptions=KW_PRETTY) parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions)
+    |
+    parttype=partTypeExpr -> ^(TOK_DESCTABLE $parttype)
+    )
     ;
 
-
 analyzeStatement
 @init { pushMsg("analyze statement", state); }
 @after { popMsg(state); }
@@ -1350,8 +1368,12 @@ showStatement
     | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=identifier)? KW_LIKE showStmtIdentifier partitionSpec?
     -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?)
     | KW_SHOW KW_TBLPROPERTIES tableName (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES tableName $prptyName?)
-    | KW_SHOW KW_LOCKS (parttype=partTypeExpr)? (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWLOCKS $parttype? $isExtended?)
-    | KW_SHOW KW_LOCKS (KW_DATABASE|KW_SCHEMA) (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?)
+    | KW_SHOW KW_LOCKS 
+      (
+      (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?)
+      |
+      (parttype=partTypeExpr)? (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWLOCKS $parttype? $isExtended?)
+      )
     | KW_SHOW (showOptions=KW_FORMATTED)? (KW_INDEX|KW_INDEXES) KW_ON showStmtIdentifier ((KW_FROM|KW_IN) db_name=identifier)?
     -> ^(TOK_SHOWINDEXES showStmtIdentifier $showOptions? $db_name?)
     | KW_SHOW KW_COMPACTIONS -> ^(TOK_SHOW_COMPACTIONS)
@@ -1459,8 +1481,12 @@ showCurrentRole
 setRole
 @init {pushMsg("set role", state);}
 @after {popMsg(state);}
-    : KW_SET KW_ROLE roleName=identifier
-    -> ^(TOK_SHOW_SET_ROLE $roleName)
+    : KW_SET KW_ROLE 
+    (
+    (KW_ALL) => (all=KW_ALL) -> ^(TOK_SHOW_SET_ROLE Identifier[$all.text])
+    |
+    identifier -> ^(TOK_SHOW_SET_ROLE identifier)
+    )
     ;
 
 showGrants
@@ -1481,7 +1507,7 @@ showRolePrincipals
 privilegeIncludeColObject
 @init {pushMsg("privilege object including columns", state);}
 @after {popMsg(state);}
-    : KW_ALL -> ^(TOK_RESOURCE_ALL)
+    : (KW_ALL) => KW_ALL -> ^(TOK_RESOURCE_ALL)
     | privObjectCols -> ^(TOK_PRIV_OBJECT_COL privObjectCols)
     ;
 
@@ -1720,7 +1746,7 @@ tableSkewed
 @init { pushMsg("table skewed specification", state); }
 @after { popMsg(state); }
     :
-     KW_SKEWED KW_BY LPAREN skewedCols=columnNameList RPAREN KW_ON LPAREN (skewedValues=skewedValueElement) RPAREN (storedAsDirs)?
+     KW_SKEWED KW_BY LPAREN skewedCols=columnNameList RPAREN KW_ON LPAREN (skewedValues=skewedValueElement) RPAREN ((storedAsDirs) => storedAsDirs)?
     -> ^(TOK_TABLESKEWED $skewedCols $skewedValues storedAsDirs?)
     ;
 
@@ -1851,7 +1877,7 @@ tableFileFormat
 @init { pushMsg("table file format specification", state); }
 @after { popMsg(state); }
     :
-      KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+      (KW_STORED KW_AS KW_INPUTFORMAT) => KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
       -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
       | KW_STORED KW_BY storageHandler=StringLiteral
          (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
@@ -2018,6 +2044,9 @@ primitiveType
     | KW_DATE          ->    TOK_DATE
     | KW_DATETIME      ->    TOK_DATETIME
     | KW_TIMESTAMP     ->    TOK_TIMESTAMP
+    // Uncomment to allow intervals as table column types
+    //| KW_INTERVAL KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH
+    //| KW_INTERVAL KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME
     | KW_STRING        ->    TOK_STRING
     | KW_BINARY        ->    TOK_BINARY
     | KW_DECIMAL (LPAREN prec=Number (COMMA scale=Number)? RPAREN)? -> ^(TOK_DECIMAL $prec? $scale?)
@@ -2231,7 +2260,7 @@ simpleSelectStatement
    whereClause?
    groupByClause?
    havingClause?
-   window_clause?
+   ((window_clause) => window_clause)?
    -> ^(TOK_QUERY fromClause? ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
                      selectClause whereClause? groupByClause? havingClause? window_clause?))
    ;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Sat Mar 28 14:03:43 2015
@@ -35,6 +35,9 @@ k=3;
       RecognitionException e) {
     gParent.errors.add(new ParseError(gParent, e, tokenNames));
   }
+  protected boolean useSQL11ReservedKeywordsForIdentifier() {
+    return gParent.useSQL11ReservedKeywordsForIdentifier();
+  }
 }
 
 @rulecatch {
@@ -51,40 +54,41 @@ groupByClause
 @after { gParent.popMsg(state); }
     :
     KW_GROUP KW_BY
-    groupByExpression
-    ( COMMA groupByExpression )*
+    expression
+    ( COMMA expression)*
     ((rollup=KW_WITH KW_ROLLUP) | (cube=KW_WITH KW_CUBE)) ?
     (sets=KW_GROUPING KW_SETS 
     LPAREN groupingSetExpression ( COMMA groupingSetExpression)*  RPAREN ) ?
-    -> {rollup != null}? ^(TOK_ROLLUP_GROUPBY groupByExpression+)
-    -> {cube != null}? ^(TOK_CUBE_GROUPBY groupByExpression+)
-    -> {sets != null}? ^(TOK_GROUPING_SETS groupByExpression+ groupingSetExpression+)
-    -> ^(TOK_GROUPBY groupByExpression+)
+    -> {rollup != null}? ^(TOK_ROLLUP_GROUPBY expression+)
+    -> {cube != null}? ^(TOK_CUBE_GROUPBY expression+)
+    -> {sets != null}? ^(TOK_GROUPING_SETS expression+ groupingSetExpression+)
+    -> ^(TOK_GROUPBY expression+)
     ;
 
 groupingSetExpression
 @init {gParent.pushMsg("grouping set expression", state); }
 @after {gParent.popMsg(state); }
    :
-   groupByExpression
-   -> ^(TOK_GROUPING_SETS_EXPRESSION groupByExpression)
+   (LPAREN) => groupingSetExpressionMultiple 
    |
+   groupingExpressionSingle
+   ;
+
+groupingSetExpressionMultiple
+@init {gParent.pushMsg("grouping set part expression", state); }
+@after {gParent.popMsg(state); }
+   :
    LPAREN 
-   groupByExpression (COMMA groupByExpression)*
+   expression? (COMMA expression)*
    RPAREN
-   -> ^(TOK_GROUPING_SETS_EXPRESSION groupByExpression+)
-   |
-   LPAREN
-   RPAREN
-   -> ^(TOK_GROUPING_SETS_EXPRESSION)
+   -> ^(TOK_GROUPING_SETS_EXPRESSION expression*)
    ;
 
-
-groupByExpression
-@init { gParent.pushMsg("group by expression", state); }
+groupingExpressionSingle
+@init { gParent.pushMsg("groupingExpression expression", state); }
 @after { gParent.popMsg(state); }
     :
-    expression
+    expression -> ^(TOK_GROUPING_SETS_EXPRESSION expression)
     ;
 
 havingClause
@@ -101,6 +105,26 @@ havingCondition
     expression
     ;
 
+expressionsInParenthese
+    :
+    LPAREN expression (COMMA expression)* RPAREN -> expression+
+    ;
+
+expressionsNotInParenthese
+    :
+    expression (COMMA expression)* -> expression+
+    ;
+
+columnRefOrderInParenthese
+    :
+    LPAREN columnRefOrder (COMMA columnRefOrder)* RPAREN -> columnRefOrder+
+    ;
+
+columnRefOrderNotInParenthese
+    :
+    columnRefOrder (COMMA columnRefOrder)* -> columnRefOrder+
+    ;
+    
 // order by a,b
 orderByClause
 @init { gParent.pushMsg("order by clause", state); }
@@ -108,17 +132,17 @@ orderByClause
     :
     KW_ORDER KW_BY columnRefOrder ( COMMA columnRefOrder)* -> ^(TOK_ORDERBY columnRefOrder+)
     ;
-
+    
 clusterByClause
 @init { gParent.pushMsg("cluster by clause", state); }
 @after { gParent.popMsg(state); }
     :
     KW_CLUSTER KW_BY
-    LPAREN expression (COMMA expression)* RPAREN -> ^(TOK_CLUSTERBY expression+)
+    (
+    (LPAREN) => expressionsInParenthese -> ^(TOK_CLUSTERBY expressionsInParenthese)
     |
-    KW_CLUSTER KW_BY
-    expression
-    ( (COMMA)=>COMMA expression )* -> ^(TOK_CLUSTERBY expression+)
+    expressionsNotInParenthese -> ^(TOK_CLUSTERBY expressionsNotInParenthese)
+    )
     ;
 
 partitionByClause
@@ -126,10 +150,11 @@ partitionByClause
 @after { gParent.popMsg(state); }
     :
     KW_PARTITION KW_BY
-    LPAREN expression (COMMA expression)* RPAREN -> ^(TOK_DISTRIBUTEBY expression+)
+    (
+    (LPAREN) => expressionsInParenthese -> ^(TOK_DISTRIBUTEBY expressionsInParenthese)
     |
-    KW_PARTITION KW_BY
-    expression ((COMMA)=> COMMA expression)* -> ^(TOK_DISTRIBUTEBY expression+)
+    expressionsNotInParenthese -> ^(TOK_DISTRIBUTEBY expressionsNotInParenthese)
+    )
     ;
 
 distributeByClause
@@ -137,10 +162,11 @@ distributeByClause
 @after { gParent.popMsg(state); }
     :
     KW_DISTRIBUTE KW_BY
-    LPAREN expression (COMMA expression)* RPAREN -> ^(TOK_DISTRIBUTEBY expression+)
+    (
+    (LPAREN) => expressionsInParenthese -> ^(TOK_DISTRIBUTEBY expressionsInParenthese)
     |
-    KW_DISTRIBUTE KW_BY
-    expression ((COMMA)=> COMMA expression)* -> ^(TOK_DISTRIBUTEBY expression+)
+    expressionsNotInParenthese -> ^(TOK_DISTRIBUTEBY expressionsNotInParenthese)
+    )
     ;
 
 sortByClause
@@ -148,12 +174,11 @@ sortByClause
 @after { gParent.popMsg(state); }
     :
     KW_SORT KW_BY
-    LPAREN columnRefOrder
-    ( COMMA columnRefOrder)* RPAREN -> ^(TOK_SORTBY columnRefOrder+)
+    (
+    (LPAREN) => columnRefOrderInParenthese -> ^(TOK_SORTBY columnRefOrderInParenthese)
     |
-    KW_SORT KW_BY
-    columnRefOrder
-    ( (COMMA)=> COMMA columnRefOrder)* -> ^(TOK_SORTBY columnRefOrder+)
+    columnRefOrderNotInParenthese -> ^(TOK_SORTBY columnRefOrderNotInParenthese)
+    )
     ;
 
 // fun(par1, par2, par3)
@@ -164,7 +189,7 @@ function
     functionName
     LPAREN
       (
-        (star=STAR)
+        (STAR) => (star=STAR)
         | (dist=KW_DISTINCT)? (selectExpression (COMMA selectExpression)*)?
       )
     RPAREN (KW_OVER ws=window_specification)?
@@ -173,29 +198,15 @@ function
                             -> ^(TOK_FUNCTIONDI functionName (selectExpression+)?)
     ;
 
-nonParenthesizedFunction
-@init { gParent.pushMsg("non-parenthesized function name", state); }
-@after { gParent.popMsg(state); }
-    :
-    nonParenthesizedFunctionName
-        -> ^(TOK_FUNCTION nonParenthesizedFunctionName)
-    ;
-
-nonParenthesizedFunctionName
-@init { gParent.pushMsg("non-parenthesized function name", state); }
-@after { gParent.popMsg(state); }
-    :
-    KW_CURRENT_DATE | KW_CURRENT_TIMESTAMP
-    ;
-
 functionName
 @init { gParent.pushMsg("function name", state); }
 @after { gParent.popMsg(state); }
     : // Keyword IF is also a function name
-    KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE | functionIdentifier
+    (KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE) => (KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE)
+    | 
+    (functionIdentifier) => functionIdentifier
     |
-    // This allows current_timestamp() to work as well as current_timestamp
-    nonParenthesizedFunctionName
+    {!useSQL11ReservedKeywordsForIdentifier()}? sql11ReservedKeywordsUsedAsCastFunctionName -> Identifier[$sql11ReservedKeywordsUsedAsCastFunctionName.text]
     ;
 
 castExpression
@@ -237,6 +248,7 @@ constant
     Number
     | dateLiteral
     | timestampLiteral
+    | intervalLiteral
     | StringLiteral
     | stringLiteralSequence
     | BigintLiteral
@@ -267,6 +279,8 @@ dateLiteral
       // This makes the dateLiteral more consistent with the other type literals.
       adaptor.create(TOK_DATELITERAL, $StringLiteral.text)
     }
+    |
+    KW_CURRENT_DATE -> ^(TOK_FUNCTION KW_CURRENT_DATE)
     ;
 
 timestampLiteral
@@ -275,6 +289,28 @@ timestampLiteral
     {
       adaptor.create(TOK_TIMESTAMPLITERAL, $StringLiteral.text)
     }
+    |
+    KW_CURRENT_TIMESTAMP -> ^(TOK_FUNCTION KW_CURRENT_TIMESTAMP)
+    ;
+
+intervalLiteral
+    :
+    KW_INTERVAL StringLiteral qualifiers=intervalQualifiers ->
+    {
+      adaptor.create(qualifiers.tree.token.getType(), $StringLiteral.text)
+    }
+    ;
+
+intervalQualifiers
+    :
+    KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH_LITERAL
+    | KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME_LITERAL
+    | KW_YEAR -> TOK_INTERVAL_YEAR_LITERAL
+    | KW_MONTH -> TOK_INTERVAL_MONTH_LITERAL
+    | KW_DAY -> TOK_INTERVAL_DAY_LITERAL
+    | KW_HOUR -> TOK_INTERVAL_HOUR_LITERAL
+    | KW_MINUTE -> TOK_INTERVAL_MINUTE_LITERAL
+    | KW_SECOND -> TOK_INTERVAL_SECOND_LITERAL
     ;
 
 expression
@@ -286,12 +322,11 @@ expression
 
 atomExpression
     :
-    KW_NULL -> TOK_NULL
-    | constant
+    (KW_NULL) => KW_NULL -> TOK_NULL
+    | (constant) => constant
     | castExpression
     | caseExpression
     | whenExpression
-    | nonParenthesizedFunction
     | (functionName LPAREN) => function
     | tableOrColumn
     | LPAREN! expression RPAREN!
@@ -543,7 +578,7 @@ sysFuncNames
 
 descFuncNames
     :
-      sysFuncNames
+      (sysFuncNames) => sysFuncNames
     | StringLiteral
     | functionIdentifier
     ;
@@ -552,6 +587,9 @@ identifier
     :
     Identifier
     | nonReserved -> Identifier[$nonReserved.text]
+    // If it decides to support SQL11 reserved keywords, i.e., useSQL11ReservedKeywordsForIdentifier()=false, 
+    // the sql11keywords in existing q tests will NOT be added back.
+    | {useSQL11ReservedKeywordsForIdentifier()}? sql11ReservedKeywordsUsedAsIdentifier -> Identifier[$sql11ReservedKeywordsUsedAsIdentifier.text]
     ;
 
 functionIdentifier
@@ -570,7 +608,44 @@ principalIdentifier
     | QuotedIdentifier
     ;
 
+//the new version of nonReserved + sql11ReservedKeywordsUsedAsIdentifier = old version of nonReserved 
 nonReserved
     :
-    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | 
 KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_
 BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION | KW_VALUES | KW_URI | KW_SERVER | KW_RELOAD
+    KW_ADD | KW_ADMIN | KW_AFTER | KW_ANALYZE | KW_ARCHIVE | KW_ASC | KW_BEFORE | KW_BUCKET | KW_BUCKETS
+    | KW_CASCADE | KW_CHANGE | KW_CLUSTER | KW_CLUSTERED | KW_CLUSTERSTATUS | KW_COLLECTION | KW_COLUMNS
+    | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | KW_CONTINUE | KW_DATA | KW_DAY
+    | KW_DATABASES | KW_DATETIME | KW_DBPROPERTIES | KW_DEFERRED | KW_DEFINED | KW_DELIMITED | KW_DEPENDENCY 
+    | KW_DESC | KW_DIRECTORIES | KW_DIRECTORY | KW_DISABLE | KW_DISTRIBUTE | KW_ELEM_TYPE 
+    | KW_ENABLE | KW_ESCAPED | KW_EXCLUSIVE | KW_EXPLAIN | KW_EXPORT | KW_FIELDS | KW_FILE | KW_FILEFORMAT
+    | KW_FIRST | KW_FORMAT | KW_FORMATTED | KW_FUNCTIONS | KW_HOLD_DDLTIME | KW_HOUR | KW_IDXPROPERTIES | KW_IGNORE
+    | KW_INDEX | KW_INDEXES | KW_INPATH | KW_INPUTDRIVER | KW_INPUTFORMAT | KW_ITEMS | KW_JAR
+    | KW_KEYS | KW_KEY_TYPE | KW_LIMIT | KW_LINES | KW_LOAD | KW_LOCATION | KW_LOCK | KW_LOCKS | KW_LOGICAL | KW_LONG
+    | KW_MAPJOIN | KW_MATERIALIZED | KW_MINUS | KW_MINUTE | KW_MONTH | KW_MSCK | KW_NOSCAN | KW_NO_DROP | KW_OFFLINE | KW_OPTION
+    | KW_OUTPUTDRIVER | KW_OUTPUTFORMAT | KW_OVERWRITE | KW_OWNER | KW_PARTITIONED | KW_PARTITIONS | KW_PLUS | KW_PRETTY | KW_PRINCIPALS
+    | KW_PROTECTION | KW_PURGE | KW_READ | KW_READONLY | KW_REBUILD | KW_RECORDREADER | KW_RECORDWRITER
+    | KW_REGEXP | KW_RELOAD | KW_RENAME | KW_REPAIR | KW_REPLACE | KW_RESTRICT | KW_REWRITE | KW_RLIKE
+    | KW_ROLE | KW_ROLES | KW_SCHEMA | KW_SCHEMAS | KW_SECOND | KW_SEMI | KW_SERDE | KW_SERDEPROPERTIES | KW_SERVER | KW_SETS | KW_SHARED
+    | KW_SHOW | KW_SHOW_DATABASE | KW_SKEWED | KW_SORT | KW_SORTED | KW_SSL | KW_STATISTICS | KW_STORED
+    | KW_STREAMTABLE | KW_STRING | KW_STRUCT | KW_TABLES | KW_TBLPROPERTIES | KW_TEMPORARY | KW_TERMINATED
+    | KW_TINYINT | KW_TOUCH | KW_TRANSACTIONS | KW_UNARCHIVE | KW_UNDO | KW_UNIONTYPE | KW_UNLOCK | KW_UNSET
+    | KW_UNSIGNED | KW_URI | KW_USE | KW_UTC | KW_UTCTIMESTAMP | KW_VALUE_TYPE | KW_VIEW | KW_WHILE | KW_YEAR
+    ;
+
+//The following SQL2011 reserved keywords are used as cast function name only, it is a subset of the sql11ReservedKeywordsUsedAsIdentifier.
+sql11ReservedKeywordsUsedAsCastFunctionName
+    :
+    KW_BIGINT | KW_BINARY | KW_BOOLEAN | KW_CURRENT_DATE | KW_CURRENT_TIMESTAMP | KW_DATE | KW_DOUBLE | KW_FLOAT | KW_INT | KW_SMALLINT | KW_TIMESTAMP
+    ;
+
+//The following SQL2011 reserved keywords are used as identifiers in many q tests, they may be added back due to backward compatibility.
+sql11ReservedKeywordsUsedAsIdentifier
+    :
+    KW_ALL | KW_ALTER | KW_ARRAY | KW_AS | KW_AUTHORIZATION | KW_BETWEEN | KW_BIGINT | KW_BINARY | KW_BOOLEAN 
+    | KW_BOTH | KW_BY | KW_CREATE | KW_CUBE | KW_CURRENT_DATE | KW_CURRENT_TIMESTAMP | KW_CURSOR | KW_DATE | KW_DECIMAL | KW_DELETE | KW_DESCRIBE 
+    | KW_DOUBLE | KW_DROP | KW_EXISTS | KW_EXTERNAL | KW_FALSE | KW_FETCH | KW_FLOAT | KW_FOR | KW_FULL | KW_GRANT 
+    | KW_GROUP | KW_GROUPING | KW_IMPORT | KW_IN | KW_INNER | KW_INSERT | KW_INT | KW_INTERSECT | KW_INTO | KW_IS | KW_LATERAL 
+    | KW_LEFT | KW_LIKE | KW_LOCAL | KW_NONE | KW_NULL | KW_OF | KW_ORDER | KW_OUT | KW_OUTER | KW_PARTITION 
+    | KW_PERCENT | KW_PROCEDURE | KW_RANGE | KW_READS | KW_REVOKE | KW_RIGHT 
+    | KW_ROLLUP | KW_ROW | KW_ROWS | KW_SET | KW_SMALLINT | KW_TABLE | KW_TIMESTAMP | KW_TO | KW_TRIGGER | KW_TRUE 
+    | KW_TRUNCATE | KW_UNION | KW_UPDATE | KW_USER | KW_USING | KW_VALUES | KW_WITH
     ;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java Sat Mar 28 14:03:43 2015
@@ -152,10 +152,6 @@ public class MapReduceCompiler extends T
       topOp.setChildOperators(null);
     }
 
-    if (topOp.getChildOperators() == null) {
-      return;
-    }
-
     for (Operator<? extends OperatorDesc> op : topOp.getChildOperators()) {
       breakOperatorTree(op);
     }
@@ -194,6 +190,7 @@ public class MapReduceCompiler extends T
 
     final Context lCtx = ctx;
     PathFilter p = new PathFilter() {
+      @Override
       public boolean accept(Path file) {
         return !lCtx.isMRTmpFileURI(file.toUri().getPath());
       }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Sat Mar 28 14:03:43 2015
@@ -193,6 +193,9 @@ public class ParseDriver {
       lexer.setHiveConf(ctx.getConf());
     }
     HiveParser parser = new HiveParser(tokens);
+    if (ctx != null) {
+      parser.setHiveConf(ctx.getConf());
+    }
     parser.setTreeAdaptor(adaptor);
     HiveParser.statement_return r = null;
     try {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g Sat Mar 28 14:03:43 2015
@@ -35,6 +35,9 @@ k=3;
       RecognitionException e) {
     gParent.errors.add(new ParseError(gParent, e, tokenNames));
   }
+  protected boolean useSQL11ReservedKeywordsForIdentifier() {
+    return gParent.useSQL11ReservedKeywordsForIdentifier();
+  }
 }
 
 @rulecatch {
@@ -125,10 +128,11 @@ selectItem
 @init { gParent.pushMsg("selection target", state); }
 @after { gParent.popMsg(state); }
     :
+    (tableAllColumns) => tableAllColumns -> ^(TOK_SELEXPR tableAllColumns)
+    |
     ( expression
       ((KW_AS? identifier) | (KW_AS LPAREN identifier (COMMA identifier)* RPAREN))?
     ) -> ^(TOK_SELEXPR expression identifier*)
-    | tableAllColumns -> ^(TOK_SELEXPR tableAllColumns)
     ;
 
 trfmClause
@@ -148,7 +152,9 @@ selectExpression
 @init { gParent.pushMsg("select expression", state); }
 @after { gParent.popMsg(state); }
     :
-    expression | tableAllColumns
+    (tableAllColumns) => tableAllColumns
+    |
+    expression
     ;
 
 selectExpressionList

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sat Mar 28 14:03:43 2015
@@ -1856,7 +1856,7 @@ public class SemanticAnalyzer extends Ba
           return true;
         }
       } catch (Exception e) {
-        throw new HiveException("Unable to determine if " + path + "is encrypted: " + e, e);
+        throw new HiveException("Unable to determine if " + path + " is encrypted: " + e, e);
       }
     }
 
@@ -1919,7 +1919,7 @@ public class SemanticAnalyzer extends Ba
    * Gets the strongest encrypted table path.
    *
    * @param qb The QB object that contains a list of all table locations.
-   * @return The strongest encrypted path
+   * @return The strongest encrypted path. It may return NULL if there are not tables encrypted, or are not HDFS tables.
    * @throws HiveException if an error occurred attempting to compare the encryption strength
    */
   private Path getStrongestEncryptedTablePath(QB qb) throws HiveException {
@@ -1932,17 +1932,14 @@ public class SemanticAnalyzer extends Ba
       if (tab != null) {
         Path tablePath = tab.getDataLocation();
         if (tablePath != null) {
-          try {
-            if (strongestPath == null) {
-              strongestPath = tablePath;
-            } else if ("hdfs".equals(tablePath.toUri().getScheme())
-                && isPathEncrypted(tablePath)
-                && comparePathKeyStrength(tablePath, strongestPath) > 0)
-            {
-              strongestPath = tablePath;
+          if ("hdfs".equalsIgnoreCase(tablePath.toUri().getScheme())) {
+            if (isPathEncrypted(tablePath)) {
+              if (strongestPath == null) {
+                strongestPath = tablePath;
+              } else if (comparePathKeyStrength(tablePath, strongestPath) > 0) {
+                strongestPath = tablePath;
+              }
             }
-          } catch (HiveException e) {
-            throw new HiveException("Unable to find the most secure table path: " + e, e);
           }
         }
       }
@@ -1966,22 +1963,19 @@ public class SemanticAnalyzer extends Ba
   private Path getStagingDirectoryPathname(QB qb) throws HiveException {
     Path stagingPath = null, tablePath;
 
-    // Looks for the most encrypted table location (if there is one)
+    // Looks for the most encrypted table location
+    // It may return null if there are not tables encrypted, or are not part of HDFS
     tablePath = getStrongestEncryptedTablePath(qb);
-    if (tablePath != null && isPathEncrypted(tablePath)) {
-      // Only HDFS paths can be checked for encryption
-      if ("hdfs".equals(tablePath.toUri().getScheme())) {
-        if (isPathReadOnly(tablePath)) {
-          Path tmpPath = ctx.getMRTmpPath();
-          if (comparePathKeyStrength(tablePath, tmpPath) < 0) {
-            throw new HiveException("Read-only encrypted tables cannot be read " +
-                "if the scratch directory is not encrypted (or encryption is weak)");
-          } else {
-            stagingPath = tmpPath;
-          }
+    if (tablePath != null) {
+      // At this point, tablePath is part of HDFS and it is encrypted
+      if (isPathReadOnly(tablePath)) {
+        Path tmpPath = ctx.getMRTmpPath();
+        if (comparePathKeyStrength(tablePath, tmpPath) < 0) {
+          throw new HiveException("Read-only encrypted tables cannot be read " +
+              "if the scratch directory is not encrypted (or encryption is weak)");
+        } else {
+          stagingPath = tmpPath;
         }
-      } else {
-        LOG.debug("Encryption is not applicable to table path " + tablePath.toString());
       }
 
       if (stagingPath == null) {
@@ -10757,7 +10751,7 @@ public class SemanticAnalyzer extends Ba
       }
     }
 
-    storageFormat.fillDefaultStorageFormat();
+    storageFormat.fillDefaultStorageFormat(isExt);
 
     if ((command_type == CTAS) && (storageFormat.getStorageHandler() != null)) {
       throw new SemanticException(ErrorMsg.CREATE_NON_NATIVE_AS.getMsg());
@@ -10776,7 +10770,7 @@ public class SemanticAnalyzer extends Ba
       }
     }
 
-    addDbAndTabToOutputs(qualifiedTabName);
+    addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE);
 
     if (isTemporary) {
       if (partCols.size() > 0) {
@@ -10876,11 +10870,13 @@ public class SemanticAnalyzer extends Ba
     return null;
   }
 
-  private void addDbAndTabToOutputs(String[] qualifiedTabName) throws SemanticException {
+  private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type) throws SemanticException {
     Database database  = getDatabase(qualifiedTabName[0]);
     outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
-    outputs.add(new WriteEntity(new Table(qualifiedTabName[0], qualifiedTabName[1]),
-        WriteEntity.WriteType.DDL_NO_LOCK));
+
+    Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]);
+    t.setTableType(type);
+    outputs.add(new WriteEntity(t, WriteEntity.WriteType.DDL_NO_LOCK));
   }
 
   private ASTNode analyzeCreateView(ASTNode ast, QB qb)
@@ -10946,7 +10942,7 @@ public class SemanticAnalyzer extends Ba
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createVwDesc), conf));
 
-    addDbAndTabToOutputs(qualTabName);
+    addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW);
     return selectStmt;
   }
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java Sat Mar 28 14:03:43 2015
@@ -104,9 +104,15 @@ public class StorageFormat {
     }
   }
 
-  protected void fillDefaultStorageFormat() throws SemanticException {
+  protected void fillDefaultStorageFormat(boolean isExternal) throws SemanticException {
     if ((inputFormat == null) && (storageHandler == null)) {
       String defaultFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT);
+      String defaultManagedFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTMANAGEDFILEFORMAT);
+
+      if (!isExternal && !"none".equals(defaultManagedFormat)) {
+	defaultFormat = defaultManagedFormat;
+      }
+
       if (StringUtils.isBlank(defaultFormat)) {
         inputFormat = IOConstants.TEXTFILE_INPUT;
         outputFormat = IOConstants.TEXTFILE_OUTPUT;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Sat Mar 28 14:03:43 2015
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.math.BigDecimal;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -34,6 +35,8 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
@@ -75,6 +78,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hive.common.util.DateUtils;
 
 import com.google.common.collect.Lists;
 
@@ -175,9 +179,18 @@ public class TypeCheckProcFactory {
         + HiveParser.KW_FALSE + "%"), tf.getBoolExprProcessor());
     opRules.put(new RuleRegExp("R5", HiveParser.TOK_DATELITERAL + "%|"
         + HiveParser.TOK_TIMESTAMPLITERAL + "%"), tf.getDateTimeExprProcessor());
-    opRules.put(new RuleRegExp("R6", HiveParser.TOK_TABLE_OR_COL + "%"),
+    opRules.put(new RuleRegExp("R6",
+        HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_YEAR_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_MONTH_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_DAY_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_HOUR_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_MINUTE_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_SECOND_LITERAL + "%"), tf.getIntervalExprProcessor());
+    opRules.put(new RuleRegExp("R7", HiveParser.TOK_TABLE_OR_COL + "%"),
         tf.getColumnExprProcessor());
-    opRules.put(new RuleRegExp("R7", HiveParser.TOK_SUBQUERY_OP + "%"),
+    opRules.put(new RuleRegExp("R8", HiveParser.TOK_SUBQUERY_OP + "%"),
         tf.getSubQueryExprProcessor());
 
     // The dispatcher fires the processor corresponding to the closest matching
@@ -472,6 +485,79 @@ public class TypeCheckProcFactory {
   }
 
   /**
+   * Processor for interval constants.
+   */
+  public static class IntervalExprProcessor implements NodeProcessor {
+
+    private static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(DateUtils.NANOS_PER_SEC);
+    @Override
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+        Object... nodeOutputs) throws SemanticException {
+
+      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
+      if (ctx.getError() != null) {
+        return null;
+      }
+
+      ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
+      if (desc != null) {
+        return desc;
+      }
+
+      ASTNode expr = (ASTNode) nd;
+      String intervalString = BaseSemanticAnalyzer.stripQuotes(expr.getText());
+
+      // Get the string value and convert to a Interval value.
+      try {
+        switch (expr.getType()) {
+          case HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
+                HiveIntervalYearMonth.valueOf(intervalString));
+          case HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                HiveIntervalDayTime.valueOf(intervalString));
+          case HiveParser.TOK_INTERVAL_YEAR_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
+                new HiveIntervalYearMonth(Integer.parseInt(intervalString), 0));
+          case HiveParser.TOK_INTERVAL_MONTH_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
+                new HiveIntervalYearMonth(0, Integer.parseInt(intervalString)));
+          case HiveParser.TOK_INTERVAL_DAY_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(Integer.parseInt(intervalString), 0, 0, 0, 0));
+          case HiveParser.TOK_INTERVAL_HOUR_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(0, Integer.parseInt(intervalString), 0, 0, 0));
+          case HiveParser.TOK_INTERVAL_MINUTE_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(0, 0, Integer.parseInt(intervalString), 0, 0));
+          case HiveParser.TOK_INTERVAL_SECOND_LITERAL:
+            BigDecimal bd = new BigDecimal(intervalString);
+            BigDecimal bdSeconds = new BigDecimal(bd.toBigInteger());
+            BigDecimal bdNanos = bd.subtract(bdSeconds);
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(0, 0, 0, bdSeconds.intValueExact(),
+                    bdNanos.multiply(NANOS_PER_SEC_BD).intValue()));
+          default:
+            throw new IllegalArgumentException("Invalid time literal type " + expr.getType());
+        }
+      } catch (Exception err) {
+        throw new SemanticException(
+            "Unable to convert interval literal '" + intervalString + "' to interval value.", err);
+      }
+    }
+  }
+
+  /**
+   * Factory method to get IntervalExprProcessor.
+   *
+   * @return IntervalExprProcessor.
+   */
+  public IntervalExprProcessor getIntervalExprProcessor() {
+    return new IntervalExprProcessor();
+  }
+
+  /**
    * Processor for table columns.
    */
   public static class ColumnExprProcessor implements NodeProcessor {
@@ -619,6 +705,10 @@ public class TypeCheckProcFactory {
           serdeConstants.DATE_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
           serdeConstants.TIMESTAMP_TYPE_NAME);
+      conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_YEAR_MONTH,
+          serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
+      conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_DAY_TIME,
+          serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_DECIMAL,
           serdeConstants.DECIMAL_TYPE_NAME);
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java Sat Mar 28 14:03:43 2015
@@ -54,11 +54,11 @@ public class AbstractOperatorDesc implem
     this.vectorMode = vm;
   }
   
-  public OpTraits getOpTraits() {
+  public OpTraits getTraits() {
     return opTraits;
   }
   
-  public void setOpTraits(OpTraits opTraits) {
+  public void setTraits(OpTraits opTraits) {
     this.opTraits = opTraits;
   }
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java Sat Mar 28 14:03:43 2015
@@ -24,24 +24,18 @@ import java.io.Serializable;
 public class CommonMergeJoinDesc extends MapJoinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private int numBuckets;
-  private boolean isSubQuery;
   private int mapJoinConversionPos;
 
   CommonMergeJoinDesc() {
   }
 
-  public CommonMergeJoinDesc(int numBuckets, boolean isSubQuery, int mapJoinConversionPos,
+  public CommonMergeJoinDesc(int numBuckets, int mapJoinConversionPos,
       MapJoinDesc joinDesc) {
     super(joinDesc);
     this.numBuckets = numBuckets;
-    this.isSubQuery = isSubQuery;
     this.mapJoinConversionPos = mapJoinConversionPos;
   }
 
-  public boolean getCustomMerge() {
-    return isSubQuery;
-  }
-
   public int getNumBuckets() {
     return numBuckets;
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java Sat Mar 28 14:03:43 2015
@@ -52,9 +52,7 @@ public class MapJoinDesc extends JoinDes
   // TODO: should these rather be arrays?
   private Map<Integer, String> parentToInput = new HashMap<Integer, String>();
   private Map<Integer, Long> parentKeyCounts = new HashMap<Integer, Long>();
-
-  // for tez. used to remember which type of a Bucket Map Join this is.
-  private boolean customBucketMapJoin;
+  private Map<Integer, Long> parentDataSizes = new HashMap<Integer, Long>();
 
   // table alias (small) --> input file name (big) --> target file names (small)
   private Map<String, Map<String, List<String>>> aliasBucketFileNameMapping;
@@ -90,7 +88,7 @@ public class MapJoinDesc extends JoinDes
     this.dumpFilePrefix = clone.dumpFilePrefix;
     this.parentToInput = clone.parentToInput;
     this.parentKeyCounts = clone.parentKeyCounts;
-    this.customBucketMapJoin = clone.customBucketMapJoin;
+    this.parentDataSizes = clone.parentDataSizes;
   }
 
   public MapJoinDesc(final Map<Byte, List<ExprNodeDesc>> keys,
@@ -136,6 +134,10 @@ public class MapJoinDesc extends JoinDes
     return parentKeyCounts;
   }
 
+  public Map<Integer, Long> getParentDataSizes() {
+    return parentDataSizes;
+  }
+
   @Explain(displayName = "Estimated key counts", normalExplain = false)
   public String getKeyCountsExplainDesc() {
     StringBuilder result = null;
@@ -327,14 +329,7 @@ public class MapJoinDesc extends JoinDes
     return hashtableMemoryUsage;
   }
 
-  public void setCustomBucketMapJoin(boolean customBucketMapJoin) {
-    this.customBucketMapJoin = customBucketMapJoin;
-  }
-
-  public boolean getCustomBucketMapJoin() {
-    return this.customBucketMapJoin;
-  }
-
+  @Override
   public boolean isMapSideJoin() {
     return true;
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OpTraits.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OpTraits.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OpTraits.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OpTraits.java Sat Mar 28 14:03:43 2015
@@ -25,11 +25,14 @@ public class OpTraits {
   List<List<String>> bucketColNames;
   List<List<String>> sortColNames;
   int numBuckets;
+  int numReduceSinks;
 
-  public OpTraits(List<List<String>> bucketColNames, int numBuckets, List<List<String>> sortColNames) {
+  public OpTraits(List<List<String>> bucketColNames, int numBuckets,
+      List<List<String>> sortColNames, int numReduceSinks) {
     this.bucketColNames = bucketColNames;
     this.numBuckets = numBuckets;
     this.sortColNames = sortColNames;
+    this.numReduceSinks = numReduceSinks;
   }
 
   public List<List<String>> getBucketColNames() {
@@ -55,4 +58,12 @@ public class OpTraits {
   public List<List<String>> getSortCols() {
     return sortColNames;
   }
+
+  public void setNumReduceSinks(int numReduceSinks) {
+    this.numReduceSinks = numReduceSinks;
+  }
+
+  public int getNumReduceSinks() {
+    return this.numReduceSinks;
+  }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java Sat Mar 28 14:03:43 2015
@@ -25,7 +25,7 @@ public interface OperatorDesc extends Se
   public Object clone() throws CloneNotSupportedException;
   public Statistics getStatistics();
   public void setStatistics(Statistics statistics);
-  public OpTraits getOpTraits();
-  public void setOpTraits(OpTraits opTraits);
+  public OpTraits getTraits();
+  public void setTraits(OpTraits opTraits);
   public Map<String, String> getOpProps();
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java Sat Mar 28 14:03:43 2015
@@ -108,8 +108,6 @@ public class PredicateTransitivePropagat
       RowSchema parentRS, ExprNodeDesc filterExpr) {
     Operator<FilterDesc> filter = OperatorFactory.get(new FilterDesc(filterExpr, false),
         new RowSchema(parentRS.getSignature()));
-    filter.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
-    filter.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>());
     filter.getParentOperators().add(parent);
     filter.getChildOperators().add(target);
     parent.replaceChild(target, filter);
@@ -224,7 +222,7 @@ public class PredicateTransitivePropagat
 
   private static class Vectors {
 
-    private Set<Integer>[] vector;
+    private final Set<Integer>[] vector;
 
     @SuppressWarnings("unchecked")
     public Vectors(int length) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java Sat Mar 28 14:03:43 2015
@@ -97,8 +97,6 @@ public class SyntheticJoinPredicate impl
       RowSchema parentRS, ExprNodeDesc filterExpr) {
     Operator<FilterDesc> filter = OperatorFactory.get(new FilterDesc(filterExpr, false),
         new RowSchema(parentRS.getSignature()));
-    filter.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
-    filter.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>());
     filter.getParentOperators().add(parent);
     filter.getChildOperators().add(target);
     parent.replaceChild(target, filter);

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java Sat Mar 28 14:03:43 2015
@@ -1448,9 +1448,13 @@ public class StatsUtils {
   }
 
   public static long getAvailableMemory(Configuration conf) {
-    int memory = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVETEZCONTAINERSIZE) > 0 ?
-        HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVETEZCONTAINERSIZE) :
-        conf.getInt(MRJobConfig.MAP_MEMORY_MB, MRJobConfig.DEFAULT_MAP_MEMORY_MB);
+    int memory = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVETEZCONTAINERSIZE);
+    if (memory <= 0) {
+      memory = conf.getInt(MRJobConfig.MAP_MEMORY_MB, MRJobConfig.DEFAULT_MAP_MEMORY_MB);
+      if (memory <= 0) {
+        memory = 1024;
+      }
+    }
     return memory;
   }
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java Sat Mar 28 14:03:43 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.api.CompactionRequest;
 import org.apache.hadoop.hive.metastore.api.CompactionType;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.ShowCompactRequest;
 import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
@@ -87,12 +88,28 @@ public class Initiator extends Compactor
             LOG.debug("Checking to see if we should compact " + ci.getFullPartitionName());
             try {
               Table t = resolveTable(ci);
+              if (t == null) {
+                // Most likely this means it's a temp table
+                LOG.debug("Can't find table " + ci.getFullTableName() + ", assuming it's a temp " +
+                    "table and moving on.");
+                continue;
+              }
+
               // check if no compaction set for this table
               if (noAutoCompactSet(t)) {
                 LOG.info("Table " + tableName(t) + " marked true so we will not compact it.");
                 continue;
               }
 
+              // Check to see if this is a table level request on a partitioned table.  If so,
+              // then it's a dynamic partitioning case and we shouldn't check the table itself.
+              if (t.getPartitionKeys() != null && t.getPartitionKeys().size() > 0 &&
+                  ci.partName  == null) {
+                LOG.debug("Skipping entry for " + ci.getFullTableName() + " as it is from dynamic" +
+                    " partitioning");
+                continue;
+              }
+
               // Check if we already have initiated or are working on a compaction for this partition
               // or table.  If so, skip it.  If we are just waiting on cleaning we can still check,
               // as it may be time to compact again even though we haven't cleaned.

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java Sat Mar 28 14:03:43 2015
@@ -69,6 +69,7 @@ public class UDFJson extends UDF {
   }
   private static final ObjectMapper MAPPER = new ObjectMapper(JSON_FACTORY);
   private static final JavaType MAP_TYPE = TypeFactory.fromClass(Map.class);
+  private static final JavaType LIST_TYPE = TypeFactory.fromClass(List.class);
 
   // An LRU cache using a linked hash map
   static class HashCache<K, V> extends LinkedHashMap<K, V> {
@@ -123,11 +124,25 @@ public class UDFJson extends UDF {
    */
   public Text evaluate(String jsonString, String pathString) {
 
-    if (jsonString == null || jsonString == "" || pathString == null
-        || pathString == "") {
+    if (jsonString == null || jsonString.isEmpty() || pathString == null
+        || pathString.isEmpty() || pathString.charAt(0) != '$') {
       return null;
     }
 
+    int pathExprStart = 1;
+    boolean isRootArray = false;
+
+    if (pathString.length() > 1) {
+      if (pathString.charAt(1) == '[') {
+        pathExprStart = 0;
+        isRootArray = true;
+      } else if (pathString.charAt(1) == '.') {
+        isRootArray = pathString.length() > 2 && pathString.charAt(2) == '[';
+      } else {
+        return null;
+      }
+    }
+
     // Cache pathExpr
     String[] pathExpr = pathExprCache.get(pathString);
     if (pathExpr == null) {
@@ -135,24 +150,22 @@ public class UDFJson extends UDF {
       pathExprCache.put(pathString, pathExpr);
     }
 
-    if (!pathExpr[0].equalsIgnoreCase("$")) {
-      return null;
-    }
     // Cache extractObject
     Object extractObject = extractObjectCache.get(jsonString);
     if (extractObject == null) {
+      JavaType javaType = isRootArray ? LIST_TYPE : MAP_TYPE;
       try {
-        extractObject = MAPPER.readValue(jsonString, MAP_TYPE);
+        extractObject = MAPPER.readValue(jsonString, javaType);
       } catch (Exception e) {
         return null;
       }
       extractObjectCache.put(jsonString, extractObject);
     }
-    for (int i = 1; i < pathExpr.length; i++) {
+    for (int i = pathExprStart; i < pathExpr.length; i++) {
       if (extractObject == null) {
           return null;
       }
-      extractObject = extract(extractObject, pathExpr[i]);
+      extractObject = extract(extractObject, pathExpr[i], i == pathExprStart && isRootArray);
     }
     if (extractObject instanceof Map || extractObject instanceof List) {
       try {
@@ -168,36 +181,37 @@ public class UDFJson extends UDF {
     return result;
   }
 
-  private Object extract(Object json, String path) {
-
-    // Cache patternkey.matcher(path).matches()
-    Matcher mKey = null;
-    Boolean mKeyMatches = mKeyMatchesCache.get(path);
-    if (mKeyMatches == null) {
-      mKey = patternKey.matcher(path);
-      mKeyMatches = mKey.matches() ? Boolean.TRUE : Boolean.FALSE;
-      mKeyMatchesCache.put(path, mKeyMatches);
-    }
-    if (!mKeyMatches.booleanValue()) {
-      return null;
-    }
-
-    // Cache mkey.group(1)
-    String mKeyGroup1 = mKeyGroup1Cache.get(path);
-    if (mKeyGroup1 == null) {
-      if (mKey == null) {
+  private Object extract(Object json, String path, boolean skipMapProc) {
+    // skip MAP processing for the first path element if root is array
+    if (!skipMapProc) {
+      // Cache patternkey.matcher(path).matches()
+      Matcher mKey = null;
+      Boolean mKeyMatches = mKeyMatchesCache.get(path);
+      if (mKeyMatches == null) {
         mKey = patternKey.matcher(path);
         mKeyMatches = mKey.matches() ? Boolean.TRUE : Boolean.FALSE;
         mKeyMatchesCache.put(path, mKeyMatches);
-        if (!mKeyMatches.booleanValue()) {
-          return null;
+      }
+      if (!mKeyMatches.booleanValue()) {
+        return null;
+      }
+
+      // Cache mkey.group(1)
+      String mKeyGroup1 = mKeyGroup1Cache.get(path);
+      if (mKeyGroup1 == null) {
+        if (mKey == null) {
+          mKey = patternKey.matcher(path);
+          mKeyMatches = mKey.matches() ? Boolean.TRUE : Boolean.FALSE;
+          mKeyMatchesCache.put(path, mKeyMatches);
+          if (!mKeyMatches.booleanValue()) {
+            return null;
+          }
         }
+        mKeyGroup1 = mKey.group(1);
+        mKeyGroup1Cache.put(path, mKeyGroup1);
       }
-      mKeyGroup1 = mKey.group(1);
-      mKeyGroup1Cache.put(path, mKeyGroup1);
+      json = extract_json_withkey(json, mKeyGroup1);
     }
-    json = extract_json_withkey(json, mKeyGroup1);
-
     // Cache indexList
     ArrayList<String> indexList = indexListCache.get(path);
     if (indexList == null) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java Sat Mar 28 14:03:43 2015
@@ -138,15 +138,15 @@ public class GenericUDAFComputeStats ext
       } else {
         soi = (StructObjectInspector) parameters[0];
 
-        countTruesField = soi.getStructFieldRef("CountTrues");
+        countTruesField = soi.getStructFieldRef("counttrues");
         countTruesFieldOI = (WritableLongObjectInspector)
                                countTruesField.getFieldObjectInspector();
 
-        countFalsesField = soi.getStructFieldRef("CountFalses");
+        countFalsesField = soi.getStructFieldRef("countfalses");
         countFalsesFieldOI = (WritableLongObjectInspector)
                                 countFalsesField.getFieldObjectInspector();
 
-        countNullsField = soi.getStructFieldRef("CountNulls");
+        countNullsField = soi.getStructFieldRef("countnulls");
         countNullsFieldOI = (WritableLongObjectInspector) countNullsField.getFieldObjectInspector();
       }
 
@@ -158,10 +158,10 @@ public class GenericUDAFComputeStats ext
       foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
 
       List<String> fname = new ArrayList<String>();
-      fname.add("ColumnType");
-      fname.add("CountTrues");
-      fname.add("CountFalses");
-      fname.add("CountNulls");
+      fname.add("columntype");
+      fname.add("counttrues");
+      fname.add("countfalses");
+      fname.add("countnulls");
 
       partialResult = new Object[4];
       partialResult[0] = new Text();
@@ -320,13 +320,13 @@ public class GenericUDAFComputeStats ext
     protected transient OI maxFieldOI;
 
     protected transient StructField countNullsField;
-    protected transient WritableLongObjectInspector countNullsFieldOI;
+    protected transient LongObjectInspector countNullsFieldOI;
 
     protected transient StructField ndvField;
-    protected transient WritableStringObjectInspector ndvFieldOI;
+    protected transient StringObjectInspector ndvFieldOI;
 
     protected transient StructField numBitVectorsField;
-    protected transient WritableIntObjectInspector numBitVectorsFieldOI;
+    protected transient IntObjectInspector numBitVectorsFieldOI;
 
     /* Partial aggregation result returned by TerminatePartial. Partial result is a struct
      * containing a long field named "count".
@@ -352,20 +352,20 @@ public class GenericUDAFComputeStats ext
       } else {
         soi = (StructObjectInspector) parameters[0];
 
-        minField = soi.getStructFieldRef("Min");
+        minField = soi.getStructFieldRef("min");
         minFieldOI = (OI) minField.getFieldObjectInspector();
 
-        maxField = soi.getStructFieldRef("Max");
+        maxField = soi.getStructFieldRef("max");
         maxFieldOI = (OI) maxField.getFieldObjectInspector();
 
-        countNullsField = soi.getStructFieldRef("CountNulls");
-        countNullsFieldOI = (WritableLongObjectInspector) countNullsField.getFieldObjectInspector();
+        countNullsField = soi.getStructFieldRef("countnulls");
+        countNullsFieldOI = (LongObjectInspector) countNullsField.getFieldObjectInspector();
 
-        ndvField = soi.getStructFieldRef("BitVector");
-        ndvFieldOI = (WritableStringObjectInspector) ndvField.getFieldObjectInspector();
+        ndvField = soi.getStructFieldRef("bitvector");
+        ndvFieldOI = (StringObjectInspector) ndvField.getFieldObjectInspector();
 
-        numBitVectorsField = soi.getStructFieldRef("NumBitVectors");
-        numBitVectorsFieldOI = (WritableIntObjectInspector)
+        numBitVectorsField = soi.getStructFieldRef("numbitvectors");
+        numBitVectorsFieldOI = (IntObjectInspector)
             numBitVectorsField.getFieldObjectInspector();
       }
 
@@ -380,12 +380,12 @@ public class GenericUDAFComputeStats ext
         foi.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
 
         List<String> fname = new ArrayList<String>();
-        fname.add("ColumnType");
-        fname.add("Min");
-        fname.add("Max");
-        fname.add("CountNulls");
-        fname.add("BitVector");
-        fname.add("NumBitVectors");
+        fname.add("columnType");
+        fname.add("min");
+        fname.add("max");
+        fname.add("countnulls");
+        fname.add("bitvector");
+        fname.add("numbitvectors");
 
         partialResult = new Object[6];
         partialResult[0] = new Text();
@@ -404,11 +404,11 @@ public class GenericUDAFComputeStats ext
         foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
 
         List<String> fname = new ArrayList<String>();
-        fname.add("ColumnType");
-        fname.add("Min");
-        fname.add("Max");
-        fname.add("CountNulls");
-        fname.add("NumDistinctValues");
+        fname.add("columnType");
+        fname.add("min");
+        fname.add("max");
+        fname.add("countnulls");
+        fname.add("numdistinctvalues");
 
         result = new Object[5];
         result[0] = new Text();
@@ -706,22 +706,22 @@ public class GenericUDAFComputeStats ext
     private transient StructObjectInspector soi;
 
     private transient StructField maxLengthField;
-    private transient WritableLongObjectInspector maxLengthFieldOI;
+    private transient LongObjectInspector maxLengthFieldOI;
 
     private transient StructField sumLengthField;
-    private transient WritableLongObjectInspector sumLengthFieldOI;
+    private transient LongObjectInspector sumLengthFieldOI;
 
     private transient StructField countField;
-    private transient WritableLongObjectInspector countFieldOI;
+    private transient LongObjectInspector countFieldOI;
 
     private transient StructField countNullsField;
-    private transient WritableLongObjectInspector countNullsFieldOI;
+    private transient LongObjectInspector countNullsFieldOI;
 
     private transient StructField ndvField;
-    private transient WritableStringObjectInspector ndvFieldOI;
+    private transient StringObjectInspector ndvFieldOI;
 
     private transient StructField numBitVectorsField;
-    private transient WritableIntObjectInspector numBitVectorsFieldOI;
+    private transient IntObjectInspector numBitVectorsFieldOI;
 
     /* Output of final result of the aggregation
      */
@@ -738,23 +738,23 @@ public class GenericUDAFComputeStats ext
       } else {
         soi = (StructObjectInspector) parameters[0];
 
-        maxLengthField = soi.getStructFieldRef("MaxLength");
-        maxLengthFieldOI = (WritableLongObjectInspector) maxLengthField.getFieldObjectInspector();
+        maxLengthField = soi.getStructFieldRef("maxlength");
+        maxLengthFieldOI = (LongObjectInspector) maxLengthField.getFieldObjectInspector();
 
-        sumLengthField = soi.getStructFieldRef("SumLength");
-        sumLengthFieldOI = (WritableLongObjectInspector) sumLengthField.getFieldObjectInspector();
+        sumLengthField = soi.getStructFieldRef("sumlength");
+        sumLengthFieldOI = (LongObjectInspector) sumLengthField.getFieldObjectInspector();
 
-        countField = soi.getStructFieldRef("Count");
-        countFieldOI = (WritableLongObjectInspector) countField.getFieldObjectInspector();
+        countField = soi.getStructFieldRef("count");
+        countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
 
-        countNullsField = soi.getStructFieldRef("CountNulls");
-        countNullsFieldOI = (WritableLongObjectInspector) countNullsField.getFieldObjectInspector();
+        countNullsField = soi.getStructFieldRef("countnulls");
+        countNullsFieldOI = (LongObjectInspector) countNullsField.getFieldObjectInspector();
 
-        ndvField = soi.getStructFieldRef("BitVector");
-        ndvFieldOI = (WritableStringObjectInspector) ndvField.getFieldObjectInspector();
+        ndvField = soi.getStructFieldRef("bitvector");
+        ndvFieldOI = (StringObjectInspector) ndvField.getFieldObjectInspector();
 
-        numBitVectorsField = soi.getStructFieldRef("NumBitVectors");
-        numBitVectorsFieldOI = (WritableIntObjectInspector)
+        numBitVectorsField = soi.getStructFieldRef("numbitvectors");
+        numBitVectorsFieldOI = (IntObjectInspector)
                                   numBitVectorsField.getFieldObjectInspector();
       }
 
@@ -770,13 +770,13 @@ public class GenericUDAFComputeStats ext
         foi.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
 
         List<String> fname = new ArrayList<String>();
-        fname.add("ColumnType");
-        fname.add("MaxLength");
-        fname.add("SumLength");
-        fname.add("Count");
-        fname.add("CountNulls");
-        fname.add("BitVector");
-        fname.add("NumBitVectors");
+        fname.add("columntype");
+        fname.add("maxlength");
+        fname.add("sumlength");
+        fname.add("count");
+        fname.add("countnulls");
+        fname.add("bitvector");
+        fname.add("numbitvectors");
 
         partialResult = new Object[7];
         partialResult[0] = new Text();
@@ -798,11 +798,11 @@ public class GenericUDAFComputeStats ext
         foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
 
         List<String> fname = new ArrayList<String>();
-        fname.add("ColumnType");
-        fname.add("MaxLength");
-        fname.add("AvgLength");
-        fname.add("CountNulls");
-        fname.add("NumDistinctValues");
+        fname.add("columntype");
+        fname.add("maxlength");
+        fname.add("avglength");
+        fname.add("countnulls");
+        fname.add("numdistinctvalues");
 
         result = new Object[5];
         result[0] = new Text();
@@ -1030,16 +1030,16 @@ public class GenericUDAFComputeStats ext
     private transient StructObjectInspector soi;
 
     private transient StructField maxLengthField;
-    private transient WritableLongObjectInspector maxLengthFieldOI;
+    private transient LongObjectInspector maxLengthFieldOI;
 
     private transient StructField sumLengthField;
-    private transient WritableLongObjectInspector sumLengthFieldOI;
+    private transient LongObjectInspector sumLengthFieldOI;
 
     private transient StructField countField;
-    private transient WritableLongObjectInspector countFieldOI;
+    private transient LongObjectInspector countFieldOI;
 
     private transient StructField countNullsField;
-    private transient WritableLongObjectInspector countNullsFieldOI;
+    private transient LongObjectInspector countNullsFieldOI;
 
     /* Output of final result of the aggregation
      */
@@ -1055,17 +1055,17 @@ public class GenericUDAFComputeStats ext
       } else {
         soi = (StructObjectInspector) parameters[0];
 
-        maxLengthField = soi.getStructFieldRef("MaxLength");
-        maxLengthFieldOI = (WritableLongObjectInspector) maxLengthField.getFieldObjectInspector();
+        maxLengthField = soi.getStructFieldRef("maxlength");
+        maxLengthFieldOI = (LongObjectInspector) maxLengthField.getFieldObjectInspector();
 
-        sumLengthField = soi.getStructFieldRef("SumLength");
-        sumLengthFieldOI = (WritableLongObjectInspector) sumLengthField.getFieldObjectInspector();
+        sumLengthField = soi.getStructFieldRef("sumlength");
+        sumLengthFieldOI = (LongObjectInspector) sumLengthField.getFieldObjectInspector();
 
-        countField = soi.getStructFieldRef("Count");
-        countFieldOI = (WritableLongObjectInspector) countField.getFieldObjectInspector();
+        countField = soi.getStructFieldRef("count");
+        countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
 
-        countNullsField = soi.getStructFieldRef("CountNulls");
-        countNullsFieldOI = (WritableLongObjectInspector) countNullsField.getFieldObjectInspector();
+        countNullsField = soi.getStructFieldRef("countnulls");
+        countNullsFieldOI = (LongObjectInspector) countNullsField.getFieldObjectInspector();
 
       }
 
@@ -1079,11 +1079,11 @@ public class GenericUDAFComputeStats ext
         foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
 
         List<String> fname = new ArrayList<String>();
-        fname.add("ColumnType");
-        fname.add("MaxLength");
-        fname.add("SumLength");
-        fname.add("Count");
-        fname.add("CountNulls");
+        fname.add("columntype");
+        fname.add("maxlength");
+        fname.add("sumlength");
+        fname.add("count");
+        fname.add("countnulls");
 
         partialResult = new Object[5];
         partialResult[0] = new Text();
@@ -1102,10 +1102,10 @@ public class GenericUDAFComputeStats ext
         foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
 
         List<String> fname = new ArrayList<String>();
-        fname.add("ColumnType");
-        fname.add("MaxLength");
-        fname.add("AvgLength");
-        fname.add("CountNulls");
+        fname.add("columntype");
+        fname.add("maxlength");
+        fname.add("avglength");
+        fname.add("countnulls");
 
         result = new Object[4];
         result[0] = new Text();

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java Sat Mar 28 14:03:43 2015
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.exec.De
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -158,16 +159,16 @@ public class GenericUDAFContextNGrams im
    */
   public static class GenericUDAFContextNGramEvaluator extends GenericUDAFEvaluator {
     // For PARTIAL1 and COMPLETE: ObjectInspectors for original data
-    private transient StandardListObjectInspector outerInputOI;
+    private transient ListObjectInspector outerInputOI;
     private transient StandardListObjectInspector innerInputOI;
-    private transient StandardListObjectInspector contextListOI;
+    private transient ListObjectInspector contextListOI;
     private PrimitiveObjectInspector contextOI;
     private PrimitiveObjectInspector inputOI;
     private transient PrimitiveObjectInspector kOI;
     private transient PrimitiveObjectInspector pOI;
 
     // For PARTIAL2 and FINAL: ObjectInspectors for partial aggregations
-    private transient StandardListObjectInspector loi;
+    private transient ListObjectInspector loi;
 
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
@@ -175,7 +176,7 @@ public class GenericUDAFContextNGrams im
 
       // Init input object inspectors
       if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
-        outerInputOI = (StandardListObjectInspector) parameters[0];
+        outerInputOI = (ListObjectInspector) parameters[0];
         if(outerInputOI.getListElementObjectInspector().getCategory() ==
             ObjectInspector.Category.LIST) {
           // We're dealing with input that is an array of arrays of strings
@@ -186,7 +187,7 @@ public class GenericUDAFContextNGrams im
           inputOI = (PrimitiveObjectInspector) outerInputOI.getListElementObjectInspector();
           innerInputOI = null;
         }
-        contextListOI = (StandardListObjectInspector) parameters[1];
+        contextListOI = (ListObjectInspector) parameters[1];
         contextOI = (PrimitiveObjectInspector) contextListOI.getListElementObjectInspector();
         kOI = (PrimitiveObjectInspector) parameters[2];
         if(parameters.length == 4) {
@@ -196,7 +197,7 @@ public class GenericUDAFContextNGrams im
         }
       } else {
           // Init the list object inspector for handling partial aggregations
-          loi = (StandardListObjectInspector) parameters[0];
+          loi = (ListObjectInspector) parameters[0];
       }
 
       // Init output object inspectors.
@@ -229,10 +230,10 @@ public class GenericUDAFContextNGrams im
         return;
       }
       NGramAggBuf myagg = (NGramAggBuf) agg;
-      List<Text> partial = (List<Text>) loi.getList(obj);
+      List partial = (List) loi.getList(obj);
 
       // remove the context words from the end of the list
-      int contextSize = Integer.parseInt( ((Text)partial.get(partial.size()-1)).toString() );
+      int contextSize = Integer.parseInt( partial.get(partial.size()-1).toString() );
       partial.remove(partial.size()-1);
       if(myagg.context.size() > 0)  {
         if(contextSize != myagg.context.size()) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java Sat Mar 28 14:03:43 2015
@@ -88,8 +88,9 @@ public class GenericUDAFCount implements
     public ObjectInspector init(Mode m, ObjectInspector[] parameters)
     throws HiveException {
       super.init(m, parameters);
-      partialCountAggOI =
-        PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+      if (mode == Mode.PARTIAL2 || mode == Mode.FINAL) {
+        partialCountAggOI = (LongObjectInspector)parameters[0];
+      }
       result = new LongWritable(0);
       return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java Sat Mar 28 14:03:43 2015
@@ -91,12 +91,12 @@ public class GenericUDAFEWAHBitmap exten
             .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
       } else if (m == Mode.PARTIAL2 || m == Mode.FINAL) {
         internalMergeOI = (StandardListObjectInspector) parameters[0];
-        inputOI = PrimitiveObjectInspectorFactory.writableByteObjectInspector;
+        inputOI = (PrimitiveObjectInspector)internalMergeOI.getListElementObjectInspector();
         loi = (StandardListObjectInspector) ObjectInspectorFactory
             .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
         return loi;
       } else { // Mode.COMPLETE, ie. no map-side aggregation, requires ordering
-        inputOI = PrimitiveObjectInspectorFactory.writableByteObjectInspector;
+        inputOI = (PrimitiveObjectInspector)parameters[0];
         loi = (StandardListObjectInspector) ObjectInspectorFactory
             .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
         return loi;



Mime
View raw message