Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 8E45372EC for ; Thu, 15 Dec 2011 07:03:29 +0000 (UTC) Received: (qmail 29425 invoked by uid 500); 15 Dec 2011 07:03:28 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 29399 invoked by uid 500); 15 Dec 2011 07:03:28 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 29390 invoked by uid 99); 15 Dec 2011 07:03:26 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 15 Dec 2011 07:03:26 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 15 Dec 2011 07:03:23 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 5760723888E7 for ; Thu, 15 Dec 2011 07:03:01 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1214636 - in /hive/trunk: jdbc/src/test/org/apache/hadoop/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ Date: Thu, 15 Dec 2011 07:03:01 -0000 To: commits@hive.apache.org From: cws@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20111215070301.5760723888E7@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: cws Date: Thu Dec 15 07:03:00 2011 New Revision: 1214636 URL: http://svn.apache.org/viewvc?rev=1214636&view=rev Log: HIVE-727. Hive Server getSchema() returns wrong schema for 'Explain' queries (Prasad Mujumdar via cws) Modified: hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Modified: hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1214636&r1=1214635&r2=1214636&view=diff ============================================================================== --- hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original) +++ hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Thu Dec 15 07:03:00 2011 @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.jdbc; +import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME; import static org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME; import java.sql.Connection; @@ -198,6 +199,24 @@ public class TestJdbcDriver extends Test expectedException); } + /** + * verify 'explain ...' resultset + * @throws SQLException + */ + public void testExplainStmt() throws SQLException { + Statement stmt = con.createStatement(); + + ResultSet res = stmt.executeQuery( + "explain select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " + + "c1*2, sentences(null, null, null) as b from " + dataTypeTableName + " limit 1"); + + ResultSetMetaData md = res.getMetaData(); + assertEquals(md.getColumnCount(), 1); // only one result column + assertEquals(md.getColumnLabel(1), EXPL_COLUMN_NAME); // verify the column name + //verify that there is data in the resultset + assertTrue("Nothing returned explain", res.next()); + } + public void testPrepareStatement() { String sql = "from (select count(1) from " @@ -1016,22 +1035,22 @@ public class TestJdbcDriver extends Test * validate schema generated by "set" command * @throws SQLException */ -public void testSetCommand() throws SQLException { - // execute set command - String sql = "set -v"; - Statement stmt = con.createStatement(); - ResultSet res = stmt.executeQuery(sql); - - // Validate resultset columns - ResultSetMetaData md = res.getMetaData() ; - assertEquals(1, md.getColumnCount()); - assertEquals(SET_COLUMN_NAME, md.getColumnLabel(1)); + public void testSetCommand() throws SQLException { + // execute set command + String sql = "set -v"; + Statement stmt = con.createStatement(); + ResultSet res = stmt.executeQuery(sql); + + // Validate resultset columns + ResultSetMetaData md = res.getMetaData() ; + assertEquals(1, md.getColumnCount()); + assertEquals(SET_COLUMN_NAME, md.getColumnLabel(1)); - //check if there is data in the resultset - assertTrue("Nothing returned by set -v", res.next()); + //check if there is data in the resultset + assertTrue("Nothing returned by set -v", res.next()); - res.close(); - stmt.close(); + res.close(); + stmt.close(); } } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1214636&r1=1214635&r2=1214636&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Thu Dec 15 07:03:00 2011 @@ -18,21 +18,25 @@ package org.apache.hadoop.hive.ql.exec; +import static org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME; + import java.io.OutputStream; import java.io.PrintStream; import java.io.Serializable; import java.lang.annotation.Annotation; import java.lang.reflect.Method; +import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; -import java.util.Map.Entry; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.plan.Explain; @@ -50,7 +54,7 @@ import java.lang.reflect.InvocationTarge **/ public class ExplainTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - + public static final String EXPL_COLUMN_NAME = "Explain"; public ExplainTask() { super(); } @@ -647,4 +651,14 @@ public class ExplainTask extends Task getResultSchema() { + FieldSchema tmpFieldSchema = new FieldSchema(); + List colList = new ArrayList(); + + tmpFieldSchema.setName(EXPL_COLUMN_NAME); + tmpFieldSchema.setType(STRING_TYPE_NAME); + + colList.add(tmpFieldSchema); + return colList; + } } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java?rev=1214636&r1=1214635&r2=1214636&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java Thu Dec 15 07:03:00 2011 @@ -29,6 +29,7 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; @@ -507,4 +508,8 @@ public abstract class Task getResultSchema() { + return null; + } } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1214636&r1=1214635&r2=1214636&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Thu Dec 15 07:03:00 2011 @@ -24,6 +24,7 @@ import java.util.List; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.ExplainWork; @@ -33,6 +34,7 @@ import org.apache.hadoop.hive.ql.plan.Ex * */ public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer { + List fieldList; public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); @@ -47,7 +49,7 @@ public class ExplainSemanticAnalyzer ext .getChild(0)); sem.analyze((ASTNode) ast.getChild(0), ctx); sem.validate(); - + boolean extended = false; boolean formatted = false; if (ast.getChildCount() == 2) { @@ -68,12 +70,20 @@ public class ExplainSemanticAnalyzer ext tasks.add(fetchTask); } - rootTasks.add( - TaskFactory.get(new ExplainWork(ctx.getResFile().toString(), - tasks, - ((ASTNode) ast.getChild(0)).toStringTree(), + Task explTask = + TaskFactory.get(new ExplainWork(ctx.getResFile().toString(), + tasks, + ((ASTNode) ast.getChild(0)).toStringTree(), extended, - formatted), - conf)); + formatted), + conf); + + fieldList = explTask.getResultSchema(); + rootTasks.add(explTask); + } + + @Override + public List getResultSchema() { + return fieldList; } }