hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1671979 [1/6] - in /hive/trunk: common/ common/src/java/org/apache/hadoop/hive/common/jsonexplain/ common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/ common/src/java/org/apache/hadoop/hive/conf/ itests/src/test/resources/ ql/sr...
Date Wed, 08 Apr 2015 00:05:56 GMT
Author: hashutosh
Date: Wed Apr  8 00:05:54 2015
New Revision: 1671979

URL: http://svn.apache.org/r1671979
Log:
HIVE-9780 : Add another level of explain for RDBMS audience (Pengcheng Xiong via John Pullokkaran & Ashutosh Chauhan)

Added:
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParserFactory.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Attr.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Connection.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java
    hive/trunk/ql/src/test/queries/clientpositive/explainuser_1.q
    hive/trunk/ql/src/test/queries/clientpositive/explainuser_2.q
    hive/trunk/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/explainuser_2.q.out
Modified:
    hive/trunk/common/pom.xml
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/itests/src/test/resources/testconfiguration.properties
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/CalciteSemanticException.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveProject.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DemuxDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DependencyCollectionWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DummyStoreDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicPruningEventDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LockTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MergeJoinWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MuxDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OrcFileMergeDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RCFileMergeDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SMBJoinDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java

Modified: hive/trunk/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/common/pom.xml?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/common/pom.xml (original)
+++ hive/trunk/common/pom.xml Wed Apr  8 00:05:54 2015
@@ -93,6 +93,11 @@
       <version>${junit.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>${json.version}</version>
+    </dependency>
   </dependencies>
 
   <profiles>

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain;
+
+import java.io.PrintStream;
+
+import org.json.JSONObject;
+
+/**
+ * JsonParser is the interface for classes that print a JSONObject
+ * into outputStream.
+ */
+public interface JsonParser {
+  public void print(JSONObject inputObject, PrintStream outputStream) throws Exception;
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParserFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParserFactory.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParserFactory.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParserFactory.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain;
+
+import org.apache.hadoop.hive.common.jsonexplain.tez.TezJsonParser;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public class JsonParserFactory {
+
+  private JsonParserFactory() {
+    // avoid instantiation
+  }
+
+  /**
+   * @param conf
+   * @return the appropriate JsonParser to print a JSONObject into outputStream.
+   */
+  public static JsonParser getParser(HiveConf conf) {
+    if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
+      return new TezJsonParser();
+    }
+    return null;
+  }
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Attr.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Attr.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Attr.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Attr.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+public class Attr implements Comparable<Attr> {
+  String name;
+  String value;
+
+  public Attr(String name, String value) {
+    super();
+    this.name = name;
+    this.value = value;
+  }
+
+  @Override
+  public int compareTo(Attr o) {
+    return this.name.compareToIgnoreCase(o.name);
+  }
+
+  public String toString() {
+    return this.name + this.value;
+  }
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Connection.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Connection.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Connection.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Connection.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+public class Connection {
+  public String type;
+  public Vertex from;
+
+  public Connection(String type, Vertex from) {
+    super();
+    this.type = type;
+    this.from = from;
+  }
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public class Op {
+  String name;
+  String operatorId;
+  Op parent;
+  List<Op> children;
+  List<Attr> attrs;
+  // the jsonObject for this operator
+  JSONObject opObject;
+  // the vertex that this operator belongs to
+  Vertex vertex;
+  // the vertex that this operator output to if this operator is a
+  // ReduceOutputOperator
+  String outputVertexName;
+
+  public Op(String name, String id, String outputVertexName, List<Op> children, List<Attr> attrs,
+      JSONObject opObject, Vertex vertex) throws JSONException {
+    super();
+    this.name = name;
+    this.operatorId = id;
+    this.outputVertexName = outputVertexName;
+    this.children = children;
+    this.attrs = attrs;
+    this.opObject = opObject;
+    this.vertex = vertex;
+  }
+
+  private void inlineJoinOp() throws Exception {
+    // inline map join operator
+    if (this.name.equals("Map Join Operator")) {
+      JSONObject mapjoinObj = opObject.getJSONObject("Map Join Operator");
+      // get the map for posToVertex
+      JSONObject verticeObj = mapjoinObj.getJSONObject("input vertices:");
+      Map<String, String> posToVertex = new HashMap<>();
+      for (String pos : JSONObject.getNames(verticeObj)) {
+        String vertexName = verticeObj.getString(pos);
+        posToVertex.put(pos, vertexName);
+        // update the connection
+        Connection c = null;
+        for (Connection connection : vertex.parentConnections) {
+          if (connection.from.name.equals(vertexName)) {
+            c = connection;
+            break;
+          }
+        }
+        if (c != null) {
+          TezJsonParser.addInline(this, c);
+        }
+      }
+      // update the attrs
+      removeAttr("input vertices:");
+      // update the keys to use vertex name
+      JSONObject keys = mapjoinObj.getJSONObject("keys:");
+      if (keys.length() != 0) {
+        JSONObject newKeys = new JSONObject();
+        for (String key : JSONObject.getNames(keys)) {
+          String vertexName = posToVertex.get(key);
+          if (vertexName != null) {
+            newKeys.put(vertexName, keys.get(key));
+          } else {
+            newKeys.put(this.vertex.name, keys.get(key));
+          }
+        }
+        // update the attrs
+        removeAttr("keys:");
+        this.attrs.add(new Attr("keys:", newKeys.toString()));
+      }
+    }
+    // inline merge join operator in a self-join
+    else if (this.name.equals("Merge Join Operator")) {
+      if (this.vertex != null) {
+        for (Vertex v : this.vertex.mergeJoinDummyVertexs) {
+          TezJsonParser.addInline(this, new Connection(null, v));
+        }
+      }
+    } else {
+      throw new Exception("Unknown join operator");
+    }
+  }
+
+  private String getNameWithOpId() {
+    if (operatorId != null) {
+      return this.name + " [" + operatorId + "]";
+    } else {
+      return this.name;
+    }
+  }
+
+  /**
+   * @param out
+   * @param indentFlag
+   * @param branchOfJoinOp
+   *          This parameter is used to show if it is a branch of a Join
+   *          operator so that we can decide the corresponding indent.
+   * @throws Exception
+   */
+  public void print(PrintStream out, List<Boolean> indentFlag, boolean branchOfJoinOp)
+      throws Exception {
+    // print name
+    if (TezJsonParser.printSet.contains(this)) {
+      out.println(TezJsonParser.prefixString(indentFlag) + " Please refer to the previous "
+          + this.getNameWithOpId());
+      return;
+    }
+    TezJsonParser.printSet.add(this);
+    if (!branchOfJoinOp) {
+      out.println(TezJsonParser.prefixString(indentFlag) + this.getNameWithOpId());
+    } else {
+      out.println(TezJsonParser.prefixString(indentFlag, "|<-") + this.getNameWithOpId());
+    }
+    branchOfJoinOp = false;
+    // if this operator is a join operator
+    if (this.name.contains("Join")) {
+      inlineJoinOp();
+      branchOfJoinOp = true;
+    }
+    // if this operator is the last operator, we summarize the non-inlined
+    // vertex
+    List<Connection> noninlined = new ArrayList<>();
+    if (this.parent == null) {
+      if (this.vertex != null) {
+        for (Connection connection : this.vertex.parentConnections) {
+          if (!TezJsonParser.isInline(connection.from)) {
+            noninlined.add(connection);
+          }
+        }
+      }
+    }
+    // print attr
+    List<Boolean> attFlag = new ArrayList<>();
+    attFlag.addAll(indentFlag);
+    // should print | if (1) it is branchOfJoinOp or (2) it is the last op and
+    // has following non-inlined vertex
+    if (branchOfJoinOp || (this.parent == null && !noninlined.isEmpty())) {
+      attFlag.add(true);
+    } else {
+      attFlag.add(false);
+    }
+    Collections.sort(attrs);
+    for (Attr attr : attrs) {
+      out.println(TezJsonParser.prefixString(attFlag) + attr.toString());
+    }
+    // print inline vertex
+    if (TezJsonParser.inlineMap.containsKey(this)) {
+      for (int index = 0; index < TezJsonParser.inlineMap.get(this).size(); index++) {
+        Connection connection = TezJsonParser.inlineMap.get(this).get(index);
+        List<Boolean> vertexFlag = new ArrayList<>();
+        vertexFlag.addAll(indentFlag);
+        if (branchOfJoinOp) {
+          vertexFlag.add(true);
+        }
+        // if there is an inline vertex but the operator itself is not on a join
+        // branch,
+        // then it means it is from a vertex created by an operator tree,
+        // e.g., fetch operator, etc.
+        else {
+          vertexFlag.add(false);
+        }
+        connection.from.print(out, vertexFlag, connection.type, this.vertex);
+      }
+    }
+    // print parent op, i.e., where data comes from
+    if (this.parent != null) {
+      List<Boolean> parentFlag = new ArrayList<>();
+      parentFlag.addAll(indentFlag);
+      parentFlag.add(false);
+      this.parent.print(out, parentFlag, branchOfJoinOp);
+    }
+    // print next vertex
+    else {
+      for (int index = 0; index < noninlined.size(); index++) {
+        Vertex v = noninlined.get(index).from;
+        List<Boolean> vertexFlag = new ArrayList<>();
+        vertexFlag.addAll(indentFlag);
+        if (index != noninlined.size() - 1) {
+          vertexFlag.add(true);
+        } else {
+          vertexFlag.add(false);
+        }
+        v.print(out, vertexFlag, noninlined.get(index).type, this.vertex);
+      }
+    }
+  }
+
+  public void removeAttr(String name) {
+    int removeIndex = -1;
+    for (int index = 0; index < attrs.size(); index++) {
+      if (attrs.get(index).name.equals(name)) {
+        removeIndex = index;
+        break;
+      }
+    }
+    if (removeIndex != -1) {
+      attrs.remove(removeIndex);
+    }
+  }
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Stage.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public class Stage {
+  String name;
+  // upstream stages, e.g., root stage
+  List<Stage> parentStages;
+  // downstream stages.
+  List<Stage> childStages;
+  Map<String, Vertex> vertexs;
+  List<Attr> attrs;
+  LinkedHashMap<Vertex, List<Connection>> tezStageDependency;
+  // some stage may contain only a single operator, e.g., create table operator,
+  // fetch operator.
+  Op op;
+
+  public Stage(String name) {
+    super();
+    this.name = name;
+    parentStages = new ArrayList<>();
+    childStages = new ArrayList<>();
+    attrs = new ArrayList<>();
+    vertexs = new LinkedHashMap<>();
+  }
+
+  public void addDependency(JSONObject object, Map<String, Stage> stages) throws JSONException {
+    if (!object.has("ROOT STAGE")) {
+      String names = object.getString("DEPENDENT STAGES");
+      for (String name : names.split(",")) {
+        Stage parent = stages.get(name.trim());
+        this.parentStages.add(parent);
+        parent.childStages.add(this);
+      }
+    }
+  }
+
+  /**
+   * @param object
+   * @throws Exception
+   *           If the object of stage contains "Tez", we need to extract the
+   *           vertices and edges Else we need to directly extract operators
+   *           and/or attributes.
+   */
+  public void extractVertex(JSONObject object) throws Exception {
+    if (object.has("Tez")) {
+      this.tezStageDependency = new LinkedHashMap<>();
+      JSONObject tez = (JSONObject) object.get("Tez");
+      JSONObject vertices = tez.getJSONObject("Vertices:");
+      if (tez.has("Edges:")) {
+        JSONObject edges = tez.getJSONObject("Edges:");
+        // iterate for the first time to get all the vertices
+        for (String to : JSONObject.getNames(edges)) {
+          vertexs.put(to, new Vertex(to, vertices.getJSONObject(to)));
+        }
+        // iterate for the second time to get all the vertex dependency
+        for (String to : JSONObject.getNames(edges)) {
+          Object o = edges.get(to);
+          Vertex v = vertexs.get(to);
+          // 1 to 1 mapping
+          if (o instanceof JSONObject) {
+            JSONObject obj = (JSONObject) o;
+            String parent = obj.getString("parent");
+            Vertex parentVertex = vertexs.get(parent);
+            if (parentVertex == null) {
+              parentVertex = new Vertex(parent, vertices.getJSONObject(parent));
+              vertexs.put(parent, parentVertex);
+            }
+            String type = obj.getString("type");
+            // for union vertex, we reverse the dependency relationship
+            if (!"CONTAINS".equals(type)) {
+              v.addDependency(new Connection(type, parentVertex));
+              parentVertex.children.add(v);
+            } else {
+              parentVertex.addDependency(new Connection(type, v));
+              v.children.add(parentVertex);
+            }
+            this.tezStageDependency.put(v, Arrays.asList(new Connection(type, parentVertex)));
+          } else {
+            // 1 to many mapping
+            JSONArray from = (JSONArray) o;
+            List<Connection> list = new ArrayList<>();
+            for (int index = 0; index < from.length(); index++) {
+              JSONObject obj = from.getJSONObject(index);
+              String parent = obj.getString("parent");
+              Vertex parentVertex = vertexs.get(parent);
+              if (parentVertex == null) {
+                parentVertex = new Vertex(parent, vertices.getJSONObject(parent));
+                vertexs.put(parent, parentVertex);
+              }
+              String type = obj.getString("type");
+              if (!"CONTAINS".equals(type)) {
+                v.addDependency(new Connection(type, parentVertex));
+                parentVertex.children.add(v);
+              } else {
+                parentVertex.addDependency(new Connection(type, v));
+                v.children.add(parentVertex);
+              }
+              list.add(new Connection(type, parentVertex));
+            }
+            this.tezStageDependency.put(v, list);
+          }
+        }
+      } else {
+        for (String vertexName : JSONObject.getNames(vertices)) {
+          vertexs.put(vertexName, new Vertex(vertexName, vertices.getJSONObject(vertexName)));
+        }
+      }
+      // The opTree in vertex is extracted
+      for (Vertex v : vertexs.values()) {
+        if (!v.union) {
+          v.extractOpTree();
+          v.checkMultiReduceOperator();
+        }
+      }
+    } else {
+      String[] names = JSONObject.getNames(object);
+      for (String name : names) {
+        if (name.contains("Operator")) {
+          this.op = extractOp(name, object.getJSONObject(name));
+        } else {
+          attrs.add(new Attr(name, object.get(name).toString()));
+        }
+      }
+    }
+  }
+
+  /**
+   * @param opName
+   * @param opObj
+   * @return
+   * @throws JSONException
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   * @throws Exception
+   *           This method address the create table operator, fetch operator,
+   *           etc
+   */
+  Op extractOp(String opName, JSONObject opObj) throws JSONException, JsonParseException,
+      JsonMappingException, IOException, Exception {
+    List<Attr> attrs = new ArrayList<>();
+    Vertex v = null;
+    if (opObj.length() > 0) {
+      String[] names = JSONObject.getNames(opObj);
+      for (String name : names) {
+        Object o = opObj.get(name);
+        if (isPrintable(o)) {
+          attrs.add(new Attr(name, o.toString()));
+        } else if (o instanceof JSONObject) {
+          JSONObject attrObj = (JSONObject) o;
+          if (attrObj.length() > 0) {
+            if (name.equals("Processor Tree:")) {
+              JSONObject object = new JSONObject();
+              object.put(name, attrObj);
+              v = new Vertex(null, object);
+              v.extractOpTree();
+            } else {
+              for (String attrName : JSONObject.getNames(attrObj)) {
+                attrs.add(new Attr(attrName, attrObj.get(attrName).toString()));
+              }
+            }
+          }
+        } else {
+          throw new Exception("Unsupported object in " + this.name);
+        }
+      }
+    }
+    Op op = new Op(opName, null, null, null, attrs, null, v);
+    if (v != null) {
+      TezJsonParser.addInline(op, new Connection(null, v));
+    }
+    return op;
+  }
+
+  private boolean isPrintable(Object val) {
+    if (val instanceof Boolean || val instanceof String || val instanceof Integer
+        || val instanceof Long || val instanceof Byte || val instanceof Float
+        || val instanceof Double || val instanceof Path) {
+      return true;
+    }
+    if (val != null && val.getClass().isPrimitive()) {
+      return true;
+    }
+    return false;
+  }
+
+  public void print(PrintStream out, List<Boolean> indentFlag) throws JSONException, Exception {
+    // print stagename
+    if (TezJsonParser.printSet.contains(this)) {
+      out.println(TezJsonParser.prefixString(indentFlag) + " Please refer to the previous "
+          + this.name);
+      return;
+    }
+    TezJsonParser.printSet.add(this);
+    out.println(TezJsonParser.prefixString(indentFlag) + this.name);
+    // print vertexes
+    List<Boolean> nextIndentFlag = new ArrayList<>();
+    nextIndentFlag.addAll(indentFlag);
+    nextIndentFlag.add(false);
+    for (Vertex candidate : this.vertexs.values()) {
+      if (!TezJsonParser.isInline(candidate) && candidate.children.isEmpty()) {
+        candidate.print(out, nextIndentFlag, null, null);
+      }
+    }
+    if (!attrs.isEmpty()) {
+      Collections.sort(attrs);
+      for (Attr attr : attrs) {
+        out.println(TezJsonParser.prefixString(nextIndentFlag) + attr.toString());
+      }
+    }
+    if (op != null) {
+      op.print(out, nextIndentFlag, false);
+    }
+    nextIndentFlag.add(false);
+    // print dependent stages
+    for (Stage stage : this.parentStages) {
+      stage.print(out, nextIndentFlag);
+    }
+  }
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.jsonexplain.JsonParser;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public class TezJsonParser implements JsonParser {
+  JSONObject inputObject;
+  Map<String, Stage> stages;
+  PrintStream outputStream;
+  protected final Log LOG;
+  // the object that has been printed.
+  public static Set<Object> printSet = new HashSet<>();
+  // the vertex that should be inlined. <Operator, list of Vertex that is inlined>
+  public static Map<Op, List<Connection>> inlineMap = new HashMap<>();
+  public TezJsonParser() {
+    super();
+    LOG = LogFactory.getLog(this.getClass().getName());
+  }
+  public void extractStagesAndPlans() throws JSONException, JsonParseException,
+      JsonMappingException, Exception, IOException {
+    // extract stages
+    this.stages = new HashMap<String, Stage>();
+    JSONObject dependency = inputObject.getJSONObject("STAGE DEPENDENCIES");
+    if (dependency.length() > 0) {
+      // iterate for the first time to get all the names of stages.
+      for (String stageName : JSONObject.getNames(dependency)) {
+        this.stages.put(stageName, new Stage(stageName));
+      }
+      // iterate for the second time to get all the dependency.
+      for (String stageName : JSONObject.getNames(dependency)) {
+        JSONObject dependentStageNames = dependency.getJSONObject(stageName);
+        this.stages.get(stageName).addDependency(dependentStageNames, this.stages);
+      }
+    }
+    // extract stage plans
+    JSONObject stagePlans = inputObject.getJSONObject("STAGE PLANS");
+    if (stagePlans.length() > 0) {
+      for (String stageName : JSONObject.getNames(stagePlans)) {
+        JSONObject stagePlan = stagePlans.getJSONObject(stageName);
+        this.stages.get(stageName).extractVertex(stagePlan);
+      }
+    }
+  }
+
+  /**
+   * @param indentFlag
+   * help to generate correct indent
+   * @return 
+   */
+  public static String prefixString(List<Boolean> indentFlag) {
+    StringBuilder sb = new StringBuilder();
+    for (int index = 0; index < indentFlag.size(); index++) {
+      if (indentFlag.get(index))
+        sb.append("|  ");
+      else
+        sb.append("   ");
+    }
+    return sb.toString();
+  }
+
+  /**
+   * @param indentFlag
+   * @param tail
+   * help to generate correct indent with a specific tail
+   * @return
+   */
+  public static String prefixString(List<Boolean> indentFlag, String tail) {
+    StringBuilder sb = new StringBuilder();
+    for (int index = 0; index < indentFlag.size(); index++) {
+      if (indentFlag.get(index))
+        sb.append("|  ");
+      else
+        sb.append("   ");
+    }
+    int len = sb.length();
+    return sb.replace(len - tail.length(), len, tail).toString();
+  }
+
+  @Override
+  public void print(JSONObject inputObject, PrintStream outputStream) throws Exception {
+    LOG.info("JsonParser is parsing\n" + inputObject.toString());
+    this.inputObject = inputObject;
+    this.outputStream = outputStream;
+    this.extractStagesAndPlans();
+    // print out the cbo info
+    if (inputObject.has("cboInfo")) {
+      outputStream.println(inputObject.getString("cboInfo"));
+      outputStream.println();
+    }
+    // print out the vertex dependency in root stage
+    for (Stage candidate : this.stages.values()) {
+      if (candidate.tezStageDependency != null && candidate.tezStageDependency.size() > 0) {
+        outputStream.println("Vertex dependency in root stage");
+        for (Entry<Vertex, List<Connection>> entry : candidate.tezStageDependency.entrySet()) {
+          StringBuffer sb = new StringBuffer();
+          sb.append(entry.getKey().name);
+          sb.append(" <- ");
+          boolean printcomma = false;
+          for (Connection connection : entry.getValue()) {
+            if (printcomma) {
+              sb.append(", ");
+            } else {
+              printcomma = true;
+            }
+            sb.append(connection.from.name + " (" + connection.type + ")");
+          }
+          outputStream.println(sb.toString());
+        }
+        outputStream.println();
+      }
+    }
+    List<Boolean> indentFlag = new ArrayList<>();
+    // print out all the stages that have no childStages.
+    for (Stage candidate : this.stages.values()) {
+      if (candidate.childStages.isEmpty()) {
+        candidate.print(outputStream, indentFlag);
+      }
+    }
+  }
+
+  public static void addInline(Op op, Connection connection) {
+    List<Connection> list = inlineMap.get(op);
+    if (list == null) {
+      list = new ArrayList<>();
+      list.add(connection);
+      inlineMap.put(op, list);
+    } else {
+      list.add(connection);
+    }
+  }
+  public static boolean isInline(Vertex v) {
+    for(List<Connection> list : inlineMap.values()){
+      for (Connection connection : list) {
+        if(connection.from.equals(v)){
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java?rev=1671979&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Vertex.java Wed Apr  8 00:05:54 2015
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public class Vertex {
+  public String name;
+  // vertex's parent connections.
+  public List<Connection> parentConnections;
+  // vertex's children vertex.
+  public List<Vertex> children;
+  // the jsonObject for this vertex
+  public JSONObject vertexObject;
+  // whether this vertex is a union vertex
+  public boolean union;
+  // whether this vertex is dummy (which does not really exists but is created),
+  // e.g., a dummy vertex for a mergejoin branch
+  public boolean dummy;
+  // the rootOps in this vertex
+  public List<Op> rootOps;
+  // we create a dummy vertex for a mergejoin branch for a self join if this
+  // vertex is a mergejoin
+  public List<Vertex> mergeJoinDummyVertexs;
+  // whether this vertex has multiple reduce operators
+  boolean hasMultiReduceOp;
+
+  public Vertex(String name, JSONObject vertexObject) {
+    super();
+    this.name = name;
+    if (this.name != null && this.name.contains("Union")) {
+      this.union = true;
+    } else {
+      this.union = false;
+    }
+    this.dummy = false;
+    this.vertexObject = vertexObject;
+    this.parentConnections = new ArrayList<>();
+    this.children = new ArrayList<>();
+    this.rootOps = new ArrayList<>();
+    this.mergeJoinDummyVertexs = new ArrayList<>();
+    this.hasMultiReduceOp = false;
+  }
+
+  public void addDependency(Connection connection) throws JSONException {
+    this.parentConnections.add(connection);
+  }
+
+  /**
+   * @throws JSONException
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   * @throws Exception
+   *           We assume that there is a single top-level Map Operator Tree or a
+   *           Reduce Operator Tree in a vertex
+   */
+  public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException,
+      IOException, Exception {
+    if (vertexObject.length() != 0) {
+      for (String key : JSONObject.getNames(vertexObject)) {
+        if (key.equals("Map Operator Tree:")) {
+          extractOp(vertexObject.getJSONArray(key).getJSONObject(0));
+        } else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) {
+          extractOp(vertexObject.getJSONObject(key));
+        }
+        // this is the case when we have a map-side SMB join
+        // one input of the join is treated as a dummy vertex
+        else if (key.equals("Join:")) {
+          JSONArray array = vertexObject.getJSONArray(key);
+          for (int index = 0; index < array.length(); index++) {
+            JSONObject mpOpTree = array.getJSONObject(index);
+            Vertex v = new Vertex("", mpOpTree);
+            v.extractOpTree();
+            v.dummy = true;
+            mergeJoinDummyVertexs.add(v);
+          }
+        } else {
+          throw new Exception("unsupported operator tree in vertex " + this.name);
+        }
+      }
+    }
+  }
+
+  /**
+   * @param operator
+   * @param parent
+   * @return
+   * @throws JSONException
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   * @throws Exception
+   *           assumption: each operator only has one parent but may have many
+   *           children
+   */
+  Op extractOp(JSONObject operator) throws JSONException, JsonParseException, JsonMappingException,
+      IOException, Exception {
+    String[] names = JSONObject.getNames(operator);
+    if (names.length != 1) {
+      throw new Exception("Expect only one operator in " + operator.toString());
+    } else {
+      String opName = names[0];
+      JSONObject attrObj = (JSONObject) operator.get(opName);
+      List<Attr> attrs = new ArrayList<>();
+      List<Op> children = new ArrayList<>();
+      String id = null;
+      String outputVertexName = null;
+      for (String attrName : JSONObject.getNames(attrObj)) {
+        if (attrName.equals("children")) {
+          Object childrenObj = attrObj.get(attrName);
+          if (childrenObj instanceof JSONObject) {
+            if (((JSONObject) childrenObj).length() != 0) {
+              children.add(extractOp((JSONObject) childrenObj));
+            }
+          } else if (childrenObj instanceof JSONArray) {
+            if (((JSONArray) childrenObj).length() != 0) {
+              JSONArray array = ((JSONArray) childrenObj);
+              for (int index = 0; index < array.length(); index++) {
+                children.add(extractOp(array.getJSONObject(index)));
+              }
+            }
+          } else {
+            throw new Exception("Unsupported operator " + this.name
+                + "'s children operator is neither a jsonobject nor a jsonarray");
+          }
+        } else {
+          if (attrName.equals("OperatorId:")) {
+            id = attrObj.get(attrName).toString();
+          } else if (attrName.equals("outputname:")) {
+            outputVertexName = attrObj.get(attrName).toString();
+          } else {
+            attrs.add(new Attr(attrName, attrObj.get(attrName).toString()));
+          }
+        }
+      }
+      Op op = new Op(opName, id, outputVertexName, children, attrs, operator, this);
+      if (!children.isEmpty()) {
+        for (Op child : children) {
+          child.parent = op;
+        }
+      } else {
+        this.rootOps.add(op);
+      }
+      return op;
+    }
+  }
+
+  public void print(PrintStream out, List<Boolean> indentFlag, String type, Vertex callingVertex)
+      throws JSONException, Exception {
+    // print vertexname
+    if (TezJsonParser.printSet.contains(this) && !hasMultiReduceOp) {
+      if (type != null) {
+        out.println(TezJsonParser.prefixString(indentFlag, "|<-")
+            + " Please refer to the previous " + this.name + " [" + type + "]");
+      } else {
+        out.println(TezJsonParser.prefixString(indentFlag, "|<-")
+            + " Please refer to the previous " + this.name);
+      }
+      return;
+    }
+    TezJsonParser.printSet.add(this);
+    if (type != null) {
+      out.println(TezJsonParser.prefixString(indentFlag, "|<-") + this.name + " [" + type + "]");
+    } else if (this.name != null) {
+      out.println(TezJsonParser.prefixString(indentFlag) + this.name);
+    }
+    // print operators
+    if (hasMultiReduceOp) {
+      // find the right op
+      Op choose = null;
+      for (Op op : this.rootOps) {
+        if (op.outputVertexName.equals(callingVertex.name)) {
+          choose = op;
+        }
+      }
+      if (choose != null) {
+        choose.print(out, indentFlag, false);
+      } else {
+        throw new Exception("Can not find the right reduce output operator for vertex " + this.name);
+      }
+    } else {
+      for (Op op : this.rootOps) {
+        // dummy vertex is treated as a branch of a join operator
+        if (this.dummy) {
+          op.print(out, indentFlag, true);
+        } else {
+          op.print(out, indentFlag, false);
+        }
+      }
+    }
+    if (this.union) {
+      // print dependent vertexs
+      for (int index = 0; index < this.parentConnections.size(); index++) {
+        Connection connection = this.parentConnections.get(index);
+        List<Boolean> unionFlag = new ArrayList<>();
+        unionFlag.addAll(indentFlag);
+        if (index != this.parentConnections.size() - 1) {
+          unionFlag.add(true);
+        } else {
+          unionFlag.add(false);
+        }
+        connection.from.print(out, unionFlag, connection.type, this);
+      }
+    }
+  }
+
+  /**
+   * We check if a vertex has multiple reduce operators.
+   */
+  public void checkMultiReduceOperator() {
+    // check if it is a reduce vertex and its children is more than 1;
+    if (!this.name.contains("Reduce") || this.rootOps.size() < 2) {
+      return;
+    }
+    // check if all the child ops are reduce output operators
+    for (Op op : this.rootOps) {
+      if (!op.name.contains("Reduce"))
+        return;
+    }
+    this.hasMultiReduceOp = true;
+  }
+}

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Apr  8 00:05:54 2015
@@ -1620,6 +1620,9 @@ public class HiveConf extends Configurat
     HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false,
         "Whether to log explain output for every query.\n" +
         "When enabled, will log EXPLAIN EXTENDED output for the query at INFO log4j log level."),
+    HIVE_EXPLAIN_USER("hive.explain.user", false,
+        "Whether to show explain result at user level.\n" +
+        "When enabled, will log EXPLAIN output for the query at user level."),
 
     // prefix used to auto generated column aliases (this should be started with '_')
     HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL("hive.autogen.columnalias.prefix.label", "_c",

Modified: hive/trunk/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/trunk/itests/src/test/resources/testconfiguration.properties?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/trunk/itests/src/test/resources/testconfiguration.properties Wed Apr  8 00:05:54 2015
@@ -288,6 +288,8 @@ minitez.query.files=bucket_map_join_tez1
   bucket_map_join_tez2.q,\
   dynamic_partition_pruning.q,\
   dynamic_partition_pruning_2.q,\
+  explainuser_1.q,\
+  explainuser_2.q,\
   hybridhashjoin.q,\
   mapjoin_decimal.q,\
   lvj_mapjoin.q, \

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Wed Apr  8 00:05:54 2015
@@ -23,6 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
@@ -84,6 +85,7 @@ public class Context {
   private final Configuration conf;
   protected int pathid = 10000;
   protected boolean explain = false;
+  protected String cboInfo;
   protected boolean explainLogical = false;
   protected String cmd = "";
   // number of previous attempts
@@ -695,4 +697,13 @@ public class Context {
   public AcidUtils.Operation getAcidOperation() {
     return acidOperation;
   }
+
+  public String getCboInfo() {
+    return cboInfo;
+  }
+
+  public void setCboInfo(String cboInfo) {
+    this.cboInfo = cboInfo;
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Wed Apr  8 00:05:54 2015
@@ -40,6 +40,8 @@ import java.util.Set;
 import java.util.TreeMap;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.jsonexplain.JsonParser;
+import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -47,7 +49,9 @@ import org.apache.hadoop.hive.ql.hooks.R
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -288,9 +292,24 @@ public class ExplainTask extends Task<Ex
           JSONObject jsonDependencies = getJSONDependencies(work);
           out.print(jsonDependencies);
         } else {
-          JSONObject jsonPlan = getJSONPlan(out, work);
-          if (work.isFormatted()) {
-            out.print(jsonPlan);
+          if (work.isUserLevelExplain()) {
+            JsonParser jsonParser = JsonParserFactory.getParser(conf);
+            if (jsonParser != null) {
+              work.setFormatted(true);
+              JSONObject jsonPlan = getJSONPlan(out, work);
+              if (work.getCboInfo() != null) {
+                jsonPlan.put("cboInfo", work.getCboInfo());
+              }
+              jsonParser.print(jsonPlan, out);
+            } else {
+              throw new SemanticException(
+                  "Hive UserLevelExplain only supports tez engine right now.");
+            }
+          } else {
+            JSONObject jsonPlan = getJSONPlan(out, work);
+            if (work.isFormatted()) {
+              out.print(jsonPlan);
+            }
           }
         }
       }
@@ -566,7 +585,17 @@ public class ExplainTask extends Task<Ex
 
     if (note instanceof Explain) {
       Explain xpl_note = (Explain) note;
-      if (extended || xpl_note.normalExplain()) {
+      boolean invokeFlag = false;
+      if (this.work.isUserLevelExplain()) {
+        invokeFlag = Level.USER.in(xpl_note.explainLevels());
+      } else {
+        if (extended) {
+          invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
+        } else {
+          invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
+        }
+      }
+      if (invokeFlag) {
         keyJSONObject = xpl_note.displayName();
         if (out != null) {
           out.print(indentString(indent));
@@ -589,6 +618,12 @@ public class ExplainTask extends Task<Ex
         String appender = isLogical ? " (" + operator.getOperatorId() + ")" : "";
         JSONObject jsonOut = outputPlan(operator.getConf(), out, extended,
             jsonOutput, jsonOutput ? 0 : indent, appender);
+        if (this.work.isUserLevelExplain()) {
+          if (jsonOut != null && jsonOut.length() > 0) {
+            ((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("OperatorId:",
+                operator.getOperatorId());
+          }
+        }
         if (jsonOutput) {
             json = jsonOut;
         }
@@ -623,8 +658,17 @@ public class ExplainTask extends Task<Ex
 
       if (note instanceof Explain) {
         Explain xpl_note = (Explain) note;
-
-        if (extended || xpl_note.normalExplain()) {
+        boolean invokeFlag = false;
+        if (this.work.isUserLevelExplain()) {
+          invokeFlag = Level.USER.in(xpl_note.explainLevels());
+        } else {
+          if (extended) {
+            invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
+          } else {
+            invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
+          }
+        }
+        if (invokeFlag) {
 
           Object val = null;
           try {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java Wed Apr  8 00:05:54 2015
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.plan.Li
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.mapred.InputFormat;
 
 import java.io.IOException;
@@ -42,7 +43,7 @@ import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
 
-@Explain(displayName = "Merge File Operator")
+@Explain(displayName = "Merge File Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class MergeFileWork extends MapWork {
 
   private static final Log LOG = LogFactory.getLog(MergeFileWork.class);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java Wed Apr  8 00:05:54 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.io.rcfi
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.mapred.Mapper;
 
 /**
@@ -88,7 +89,7 @@ public class PartialScanWork extends Map
   /**
    * @return the aggKey
    */
-  @Explain(displayName = "Stats Aggregation Key Prefix", normalExplain = false)
+  @Explain(displayName = "Stats Aggregation Key Prefix", explainLevels = { Level.EXTENDED })
   public String getAggKey() {
     return aggKey;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java Wed Apr  8 00:05:54 2015
@@ -30,9 +30,10 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.mapred.Mapper;
 
-@Explain(displayName = "Column Truncate")
+@Explain(displayName = "Column Truncate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ColumnTruncateWork extends MapWork implements Serializable {
 
   private static final long serialVersionUID = 1L;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/CalciteSemanticException.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/CalciteSemanticException.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/CalciteSemanticException.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/CalciteSemanticException.java Wed Apr  8 00:05:54 2015
@@ -29,6 +29,16 @@ public class CalciteSemanticException ex
 
   private static final long serialVersionUID = 1L;
 
+  public enum UnsupportedFeature {
+    Distinct_without_an_aggreggation, Duplicates_in_RR, Filter_expression_with_non_boolean_return_type, 
+    Having_clause_without_any_groupby, Hint, Invalid_column_reference, Invalid_decimal, 
+    Less_than_equal_greater_than, Multi_insert, Others, Same_name_in_multiple_expressions, 
+    Schema_less_table, Select_alias_in_having_clause, Select_transform, Subquery, 
+    Table_sample_clauses, UDTF, Union_type, Unique_join
+  };
+
+  private UnsupportedFeature unsupportedFeature;
+
   public CalciteSemanticException() {
     super();
   }
@@ -37,6 +47,11 @@ public class CalciteSemanticException ex
     super(message);
   }
 
+  public CalciteSemanticException(String message, UnsupportedFeature feature) {
+    super(message);
+    this.setUnsupportedFeature(feature);
+  }
+
   public CalciteSemanticException(Throwable cause) {
     super(cause);
   }
@@ -48,4 +63,13 @@ public class CalciteSemanticException ex
   public CalciteSemanticException(ErrorMsg errorMsg, String... msgArgs) {
     super(errorMsg, msgArgs);
   }
+
+  public UnsupportedFeature getUnsupportedFeature() {
+    return unsupportedFeature;
+  }
+
+  public void setUnsupportedFeature(UnsupportedFeature unsupportedFeature) {
+    this.unsupportedFeature = unsupportedFeature;
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveProject.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveProject.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveProject.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveProject.java Wed Apr  8 00:05:54 2015
@@ -40,6 +40,7 @@ import org.apache.calcite.util.mapping.M
 import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
 import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil;
 import org.apache.hadoop.hive.ql.optimizer.calcite.TraitsUtil;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveCost;
 
 import com.google.common.collect.ImmutableList;
@@ -87,7 +88,7 @@ public class HiveProject extends Project
     // 1 Ensure columnNames are unique - CALCITE-411
     if (fieldNames != null && !Util.isDistinct(fieldNames)) {
       String msg = "Select list contains multiple expressions with the same name." + fieldNames;
-      throw new CalciteSemanticException(msg);
+      throw new CalciteSemanticException(msg, UnsupportedFeature.Same_name_in_multiple_expressions);
     }
     RelDataType rowType = RexUtil.createStructType(cluster.getTypeFactory(), exps, fieldNames);
     return create(cluster, child, exps, rowType, Collections.<RelCollation> emptyList());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java Wed Apr  8 00:05:54 2015
@@ -53,6 +53,7 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -148,7 +149,7 @@ public class RexNodeConverter {
       // This may happen for schema-less tables, where columns are dynamically
       // supplied by serdes.
       throw new CalciteSemanticException("Unexpected rexnode : "
-          + rexNode.getClass().getCanonicalName());
+          + rexNode.getClass().getCanonicalName(), UnsupportedFeature.Schema_less_table);
     }
   }
 
@@ -352,7 +353,7 @@ public class RexNodeConverter {
         // For now, we will not run CBO in the presence of invalid decimal
         // literals.
         throw new CalciteSemanticException("Expression " + literal.getExprString()
-            + " is not a valid decimal");
+            + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
         // TODO: return createNullLiteral(literal);
       }
       BigDecimal bd = (BigDecimal) value;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java Wed Apr  8 00:05:54 2015
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.ql.exec.Fu
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseDriver;
@@ -382,7 +383,7 @@ public class SqlFunctionConverter {
       // We can create Calcite IS_DISTINCT_FROM operator for this. But since our
       // join reordering algo cant handle this anyway there is no advantage of
       // this.So, bail out for now.
-      throw new CalciteSemanticException("<=> is not yet supported for cbo.");
+      throw new CalciteSemanticException("<=> is not yet supported for cbo.", UnsupportedFeature.Less_than_equal_greater_than);
     }
     SqlOperator calciteOp = hiveToCalcite.get(hiveUdfName);
     if (calciteOp == null) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java Wed Apr  8 00:05:54 2015
@@ -34,6 +34,7 @@ import org.apache.calcite.sql.type.SqlTy
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.optimizer.calcite.translator.SqlFunctionConverter.HiveToken;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
@@ -228,7 +229,7 @@ public class TypeConverter {
   public static RelDataType convert(UnionTypeInfo unionType, RelDataTypeFactory dtFactory)
     throws CalciteSemanticException{
     // Union type is not supported in Calcite.
-    throw new CalciteSemanticException("Union type is not supported");
+    throw new CalciteSemanticException("Union type is not supported", UnsupportedFeature.Union_type);
   }
 
   public static TypeInfo convert(RelDataType rType) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java Wed Apr  8 00:05:54 2015
@@ -26,8 +26,10 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName = "Alter Table Partition Merge Files")
+
+@Explain(displayName = "Alter Table Partition Merge Files", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class AlterTablePartMergeFilesDesc {
 
   private String tableName;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java Wed Apr  8 00:05:54 2015
@@ -121,6 +121,7 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.optimizer.calcite.HiveTypeSystemImpl;
 import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable;
 import org.apache.hadoop.hive.ql.optimizer.calcite.TraitsUtil;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveVolcanoPlanner;
 import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate;
 import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter;
@@ -249,13 +250,29 @@ public class CalcitePlanner extends Sema
           disableJoinMerge = false;
           sinkOp = genPlan(getQB());
           LOG.info("CBO Succeeded; optimized logical plan.");
+          this.ctx.setCboInfo("Plan optimized by CBO.");
           LOG.debug(newAST.dump());
         } catch (Exception e) {
           boolean isMissingStats = noColsMissingStats.get() > 0;
           if (isMissingStats) {
             LOG.error("CBO failed due to missing column stats (see previous errors), skipping CBO");
+            this.ctx
+                .setCboInfo("Plan not optimized by CBO due to missing statistics. Please check log for more details.");
           } else {
             LOG.error("CBO failed, skipping CBO. ", e);
+            if (e instanceof CalciteSemanticException) {
+              CalciteSemanticException calciteSemanticException = (CalciteSemanticException) e;
+              UnsupportedFeature unsupportedFeature = calciteSemanticException
+                  .getUnsupportedFeature();
+              if (unsupportedFeature != null) {
+                this.ctx.setCboInfo("Plan not optimized by CBO due to missing feature ["
+                    + unsupportedFeature + "].");
+              } else {
+                this.ctx.setCboInfo("Plan not optimized by CBO.");
+              }
+            } else {
+              this.ctx.setCboInfo("Plan not optimized by CBO.");
+            }
           }
           if (!conf.getBoolVar(ConfVars.HIVE_IN_TEST) || isMissingStats
               || e instanceof CalciteSemanticException) {
@@ -280,6 +297,7 @@ public class CalcitePlanner extends Sema
           }
         }
       } else {
+        this.ctx.setCboInfo("Plan not optimized by CBO.");
         skipCalcitePlan = true;
       }
     }
@@ -1081,7 +1099,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("UNIQUE JOIN is currently not supported in CBO,"
             + " turn off cbo to use UNIQUE JOIN.");
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Unique_join);
       }
 
       // 1. Determine Join Type
@@ -1164,7 +1182,7 @@ public class CalcitePlanner extends Sema
               + " Currently we don't support Table Sample clauses in CBO,"
               + " turn off cbo for queries on tableSamples.", tableAlias);
           LOG.debug(msg);
-          throw new CalciteSemanticException(msg);
+          throw new CalciteSemanticException(msg, UnsupportedFeature.Table_sample_clauses);
         }
 
         // 2. Get Table Metadata
@@ -1261,7 +1279,8 @@ public class CalcitePlanner extends Sema
         // fail on compile time
         // for such queries, its an arcane corner case, not worth of adding that
         // complexity.
-        throw new CalciteSemanticException("Filter expression with non-boolean return type.");
+        throw new CalciteSemanticException("Filter expression with non-boolean return type.",
+            UnsupportedFeature.Filter_expression_with_non_boolean_return_type);
       }
       ImmutableMap<String, Integer> hiveColNameCalcitePosMap = this.relToHiveColNameCalcitePosMap
           .get(srcRel);
@@ -1772,7 +1791,8 @@ public class CalcitePlanner extends Sema
                 grpbyExpr, new TypeCheckCtx(groupByInputRowResolver));
             ExprNodeDesc grpbyExprNDesc = astToExprNDescMap.get(grpbyExpr);
             if (grpbyExprNDesc == null)
-              throw new CalciteSemanticException("Invalid Column Reference: " + grpbyExpr.dump());
+              throw new CalciteSemanticException("Invalid Column Reference: " + grpbyExpr.dump(),
+                  UnsupportedFeature.Invalid_column_reference);
 
             addToGBExpr(groupByOutputRowResolver, groupByInputRowResolver, grpbyExpr,
                 grpbyExprNDesc, gbExprNDescLst, outputColumnNames);
@@ -1960,7 +1980,8 @@ public class CalcitePlanner extends Sema
           RowResolver obSyntheticProjectRR = new RowResolver();
           if (!RowResolver.add(obSyntheticProjectRR, inputRR)) {
             throw new CalciteSemanticException(
-                "Duplicates detected when adding columns to RR: see previous message");
+                "Duplicates detected when adding columns to RR: see previous message",
+                UnsupportedFeature.Duplicates_in_RR);
           }
           int vcolPos = inputRR.getRowSchema().getSignature().size();
           for (Pair<ASTNode, TypeInfo> astTypePair : vcASTTypePairs) {
@@ -1975,20 +1996,23 @@ public class CalcitePlanner extends Sema
           if (outermostOB) {
             if (!RowResolver.add(outputRR, inputRR)) {
               throw new CalciteSemanticException(
-                  "Duplicates detected when adding columns to RR: see previous message");
+                  "Duplicates detected when adding columns to RR: see previous message",
+                  UnsupportedFeature.Duplicates_in_RR);
             }
 
           } else {
             if (!RowResolver.add(outputRR, obSyntheticProjectRR)) {
               throw new CalciteSemanticException(
-                  "Duplicates detected when adding columns to RR: see previous message");
+                  "Duplicates detected when adding columns to RR: see previous message",
+                  UnsupportedFeature.Duplicates_in_RR);
             }
             originalOBChild = srcRel;
           }
         } else {
           if (!RowResolver.add(outputRR, inputRR)) {
             throw new CalciteSemanticException(
-                "Duplicates detected when adding columns to RR: see previous message");
+                "Duplicates detected when adding columns to RR: see previous message",
+                UnsupportedFeature.Duplicates_in_RR);
           }
         }
 
@@ -2026,7 +2050,8 @@ public class CalcitePlanner extends Sema
         RowResolver outputRR = new RowResolver();
         if (!RowResolver.add(outputRR, relToHiveRR.get(srcRel))) {
           throw new CalciteSemanticException(
-              "Duplicates detected when adding columns to RR: see previous message");
+              "Duplicates detected when adding columns to RR: see previous message",
+              UnsupportedFeature.Duplicates_in_RR);
         }
         ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap(
             outputRR, sortRel);
@@ -2317,7 +2342,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("Hint specified for %s."
             + " Currently we don't support hints in CBO, turn off cbo to use hints.", hint);
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Hint);
       }
 
       // 4. Bailout if select involves Transform
@@ -2326,7 +2351,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("SELECT TRANSFORM is currently not supported in CBO,"
             + " turn off cbo to use TRANSFORM.");
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Select_transform);
       }
 
       // 5. Bailout if select involves UDTF
@@ -2339,7 +2364,7 @@ public class CalcitePlanner extends Sema
           String msg = String.format("UDTF " + funcName + " is currently not supported in CBO,"
               + " turn off cbo to use UDTF " + funcName);
           LOG.debug(msg);
-          throw new CalciteSemanticException(msg);
+          throw new CalciteSemanticException(msg, UnsupportedFeature.UDTF);
         }
       }
 
@@ -2408,7 +2433,8 @@ public class CalcitePlanner extends Sema
         } else if (expr.toStringTree().contains("TOK_FUNCTIONDI")
             && !(srcRel instanceof HiveAggregate)) {
           // Likely a malformed query eg, select hash(distinct c1) from t1;
-          throw new CalciteSemanticException("Distinct without an aggreggation.");
+          throw new CalciteSemanticException("Distinct without an aggreggation.",
+              UnsupportedFeature.Distinct_without_an_aggreggation);
         } else {
           // Case when this is an expression
           TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
@@ -2427,7 +2453,8 @@ public class CalcitePlanner extends Sema
               .isSkewedCol() : false);
           if (!out_rwsch.putWithCheck(tabAlias, colAlias, null, colInfo)) {
             throw new CalciteSemanticException("Cannot add column to RR: " + tabAlias + "."
-                + colAlias + " => " + colInfo + " due to duplication, see previous warnings");
+                + colAlias + " => " + colInfo + " due to duplication, see previous warnings",
+                UnsupportedFeature.Duplicates_in_RR);
           }
 
           if (exp instanceof ExprNodeColumnDesc) {
@@ -2491,7 +2518,7 @@ public class CalcitePlanner extends Sema
         if (LOG.isDebugEnabled()) {
           LOG.debug(msg + " because it: " + reason);
         }
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Subquery);
       }
 
       // 1. Build Rel For Src (SubQuery, TS, Join)
@@ -2520,7 +2547,7 @@ public class CalcitePlanner extends Sema
         // table
         // So, for now lets just disable this. Anyway there is nothing much to
         // optimize in such cases.
-        throw new CalciteSemanticException("Unsupported");
+        throw new CalciteSemanticException("Unsupported", UnsupportedFeature.Others);
 
       }
       // 1.3 process join
@@ -2628,7 +2655,8 @@ public class CalcitePlanner extends Sema
       if (havingClause != null) {
         if (!(srcRel instanceof HiveAggregate)) {
           // ill-formed query like select * from t1 having c1 > 0;
-          throw new CalciteSemanticException("Having clause without any group-by.");
+          throw new CalciteSemanticException("Having clause without any group-by.",
+              UnsupportedFeature.Having_clause_without_any_groupby);
         }
         validateNoHavingReferenceToAlias(qb, (ASTNode) havingClause.getChild(0));
         gbFilter = genFilterRelNode(qb, (ASTNode) havingClause.getChild(0), srcRel, aliasToRel,
@@ -2690,7 +2718,7 @@ public class CalcitePlanner extends Sema
               + " This non standard behavior is not supported with cbo on."
               + " Turn off cbo for these queries.", aliasToCheck, havingClause);
           LOG.debug(msg);
-          throw new CalciteSemanticException(msg);
+          throw new CalciteSemanticException(msg, UnsupportedFeature.Select_alias_in_having_clause);
         }
       }
 
@@ -2726,7 +2754,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("Multi Insert is currently not supported in CBO,"
             + " turn off cbo to use Multi Insert.");
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Multi_insert);
       }
       return qbp;
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Wed Apr  8 00:05:54 2015
@@ -91,6 +91,8 @@ public class ExplainSemanticAnalyzer ext
       pCtx = ((SemanticAnalyzer)sem).getParseContext();
     }
 
+    boolean userLevelExplain = !extended && !formatted && !dependency && !logical && !authorize
+        && HiveConf.getBoolVar(ctx.getConf(), HiveConf.ConfVars.HIVE_EXPLAIN_USER);
     ExplainWork work = new ExplainWork(ctx.getResFile(),
         pCtx,
         tasks,
@@ -101,7 +103,9 @@ public class ExplainSemanticAnalyzer ext
         formatted,
         dependency,
         logical,
-        authorize);
+        authorize,
+        userLevelExplain,
+        ctx.getCboInfo());
 
     work.setAppendTaskType(
         HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1671979&r1=1671978&r2=1671979&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Apr  8 00:05:54 2015
@@ -115,6 +115,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.optimizer.Optimizer;
 import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec.SpecType;
 import org.apache.hadoop.hive.ql.parse.CalcitePlanner.ASTSearcher;
@@ -2995,7 +2996,8 @@ public class SemanticAnalyzer extends Ba
         if (ensureUniqueCols) {
           if (!output.putWithCheck(tmp[0], tmp[1], null, oColInfo)) {
             throw new CalciteSemanticException("Cannot add column to RR: " + tmp[0] + "." + tmp[1]
-                + " => " + oColInfo + " due to duplication, see previous warnings");
+                + " => " + oColInfo + " due to duplication, see previous warnings",
+                UnsupportedFeature.Duplicates_in_RR);
           }
         } else {
           output.put(tmp[0], tmp[1], oColInfo);



Mime
View raw message