Return-Path: Delivered-To: apmail-hadoop-core-commits-archive@www.apache.org Received: (qmail 67103 invoked from network); 18 Sep 2008 17:39:21 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 18 Sep 2008 17:39:21 -0000 Received: (qmail 65016 invoked by uid 500); 18 Sep 2008 17:39:13 -0000 Delivered-To: apmail-hadoop-core-commits-archive@hadoop.apache.org Received: (qmail 64989 invoked by uid 500); 18 Sep 2008 17:39:13 -0000 Mailing-List: contact core-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: core-dev@hadoop.apache.org Delivered-To: mailing list core-commits@hadoop.apache.org Received: (qmail 64589 invoked by uid 99); 18 Sep 2008 17:39:11 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 18 Sep 2008 10:39:11 -0700 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 18 Sep 2008 17:38:09 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 172122388A1E; Thu, 18 Sep 2008 10:38:12 -0700 (PDT) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r696736 [1/7] - in /hadoop/core/trunk: ./ src/contrib/hive/ src/contrib/hive/ql/ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ src/contrib/hive/ql/src/java/org/apache/ha... Date: Thu, 18 Sep 2008 17:38:04 -0000 To: core-commits@hadoop.apache.org From: dhruba@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20080918173812.172122388A1E@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: dhruba Date: Thu Sep 18 10:37:59 2008 New Revision: 696736 URL: http://svn.apache.org/viewvc?rev=696736&view=rev Log: HADOOP-4084. Add explain plan capabilities to Hive Query Language. (Ashish Thusoo via dhruba) Added: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java Removed: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFRegistry.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFRegistry.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionInfo.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeRegistry.java Modified: hadoop/core/trunk/CHANGES.txt hadoop/core/trunk/src/contrib/hive/build-common.xml hadoop/core/trunk/src/contrib/hive/ql/build.xml hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeInfo.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestJEXL.java hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input10.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input12.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input13.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input5.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input6.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input7.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input8.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input9.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join2.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join3.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join4.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join5.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join6.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join7.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join8.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf1.q hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/union.q hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input10.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input12.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input13.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input14.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input15.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input2.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input3.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input4.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input5.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input6.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input7.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input8.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input9.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input_part1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input_testsequencefile.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input_testxpath.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl2.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl3.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join2.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join3.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join4.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join5.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join6.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join7.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/join8.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample2.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample4.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample5.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample6.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/sample7.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/subq.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/udf1.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/union.q.out hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/cast1.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby1.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby2.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby3.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby4.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby5.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/groupby6.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input1.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input2.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input3.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input4.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input5.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input6.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input7.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input8.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input9.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input_part1.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/input_testxpath.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join1.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join2.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join3.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join4.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join5.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join6.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join7.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/join8.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample1.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample2.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample3.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample4.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample5.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample6.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/sample7.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/subq.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/udf1.q.xml hadoop/core/trunk/src/contrib/hive/ql/src/test/results/compiler/plan/union.q.xml Modified: hadoop/core/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/CHANGES.txt (original) +++ hadoop/core/trunk/CHANGES.txt Thu Sep 18 10:37:59 2008 @@ -625,6 +625,9 @@ HADOOP-4200. Fix a bug in the test-patch.sh script. (Ramya R via nigel) + HADOOP-4084. Add explain plan capabilities to Hive Query Language. + (Ashish Thusoo via dhruba) + Release 0.18.1 - 2008-09-17 IMPROVEMENTS Modified: hadoop/core/trunk/src/contrib/hive/build-common.xml URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/build-common.xml?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/build-common.xml (original) +++ hadoop/core/trunk/src/contrib/hive/build-common.xml Thu Sep 18 10:37:59 2008 @@ -205,7 +205,7 @@ + errorProperty="tests.failed" failureProperty="tests.failed" filtertrace="off"> Modified: hadoop/core/trunk/src/contrib/hive/ql/build.xml URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/build.xml?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/build.xml (original) +++ hadoop/core/trunk/src/contrib/hive/ql/build.xml Thu Sep 18 10:37:59 2008 @@ -61,7 +61,8 @@ templatePath="${ql.test.template.dir}" template="TestCliDriver.vm" queryDirectory="${ql.test.query.dir}/clientpositive" queryFile="${qfile}" - resultsDirectory="${ql.test.results.dir}/clientpositive" className="TestCliDriver"/> + resultsDirectory="${ql.test.results.dir}/clientpositive" className="TestCliDriver" + logFile="${test.log.dir}/testclidrivergen.log"/> Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Thu Sep 18 10:37:59 2008 @@ -18,12 +18,9 @@ package org.apache.hadoop.hive.ql; -import java.io.File; import java.io.InputStream; -import java.io.FileNotFoundException; -import java.io.IOException; +import java.io.Serializable; import java.util.*; - import org.antlr.runtime.tree.CommonTree; import org.apache.commons.lang.StringUtils; @@ -37,6 +34,7 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.MapRedTask; +import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.conf.HiveConf; @@ -65,15 +63,15 @@ return terminator; } - public int countJobs(Collection tasks) { + public int countJobs(List> tasks) { if (tasks == null) return 0; int jobs = 0; - for (Object task: tasks) { + for (Task task: tasks) { if ((task instanceof ExecDriver) || (task instanceof MapRedTask)) { jobs++; } - jobs += countJobs(((Task) task).getChildTasks()); + jobs += countJobs(task.getChildTasks()); } return jobs; } @@ -104,11 +102,13 @@ conf.setVar(HiveConf.ConfVars.HIVEQUERYID, command); try { + + TaskFactory.resetId(); + BaseSemanticAnalyzer sem; LOG.info("Starting command: " + command); - if (resStream != null) - { + if (resStream != null) { resStream.close(); resStream = null; } @@ -126,21 +126,32 @@ // Do semantic analysis and plan generation sem.analyze(tree, ctx); LOG.info("Semantic Analysis Completed"); - for(Task rootTask: sem.getRootTasks()) { - rootTask.initialize(conf); - } jobs = countJobs(sem.getRootTasks()); if (jobs > 0) { console.printInfo("Total MapReduce jobs = " + jobs); } + + + String jobname = Utilities.abbreviate(command, maxlen - 6); + int curJob = 0; + for(Task rootTask: sem.getRootTasks()) { + // assumption that only top level tasks are map-reduce tasks + if ((rootTask instanceof ExecDriver) || (rootTask instanceof MapRedTask)) { + curJob ++; + if(noName) { + conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "(" + curJob + "/" + jobs + ")"); + } + } + rootTask.initialize(conf); + } // A very simple runtime that keeps putting runnable takss // on a list and when a job completes, it puts the children at the back of the list // while taking the job to run from the front of the list - Queue runnable = new LinkedList(); + Queue> runnable = new LinkedList>(); - for(Task rootTask:sem.getRootTasks()) { + for(Task rootTask:sem.getRootTasks()) { if (runnable.offer(rootTask) == false) { LOG.error("Could not insert the first task into the queue"); return (1); @@ -148,11 +159,7 @@ } while(runnable.peek() != null) { - Task tsk = runnable.remove(); - - if(noName) { - conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, Utilities.abbreviate(command, maxlen)); - } + Task tsk = runnable.remove(); int exitVal = tsk.execute(); if (exitVal != 0) { @@ -166,13 +173,13 @@ continue; } - for(Object child: tsk.getChildTasks()) { + for(Task child: tsk.getChildTasks()) { // Check if the child is runnable - if (!((Task)child).isRunnable()) { + if (!child.isRunnable()) { continue; } - if (runnable.offer((Task)child) == false) { + if (runnable.offer(child) == false) { LOG.error("Could not add child task to queue"); } } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java Thu Sep 18 10:37:59 2008 @@ -24,8 +24,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.plan.copyWork; import org.apache.hadoop.hive.ql.parse.LoadSemanticAnalyzer; import org.apache.hadoop.util.StringUtils; Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Thu Sep 18 10:37:59 2008 @@ -47,8 +47,6 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.hive.ql.metadata.InvalidTableException; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.facebook.thrift.TException; Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Thu Sep 18 10:37:59 2008 @@ -214,7 +214,7 @@ } return (returnVal); } - + private static void printUsage() { System.out.println("ExecDriver -plan [-jobconf k1=v1 [-jobconf k2=v2] ...]"); System.exit(1); Added: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=696736&view=auto ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (added) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Thu Sep 18 10:37:59 2008 @@ -0,0 +1,330 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.io.FileOutputStream; +import java.io.PrintStream; +import java.io.Serializable; +import java.lang.annotation.Annotation; +import java.lang.reflect.Method; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.hadoop.hive.ql.plan.explain; +import org.apache.hadoop.hive.ql.plan.explainWork; +import org.apache.hadoop.util.StringUtils; + + +/** + * ExplainTask implementation + * + **/ +public class ExplainTask extends Task implements Serializable { + private static final long serialVersionUID = 1L; + + public int execute() { + + try { + // If this is an explain plan then return from here + PrintStream out = new PrintStream(new FileOutputStream(work.getResFile())); + + // Print out the parse AST + outputAST(work.getAstStringTree(), out, 0); + out.println(); + + outputDependencies(out, work.getRootTasks(), 0); + out.println(); + + // Go over all the tasks and dump out the plans + outputStagePlans(out, work.getRootTasks(), 0); + + return (0); + } + catch (Exception e) { + console.printError("Failed with exception " + e.getMessage(), "\n" + StringUtils.stringifyException(e)); + return (1); + } + } + + private String indentString(int indent) { + StringBuilder sb = new StringBuilder(); + for(int i=0; i mp, String header, + PrintStream out, boolean extended, int indent) + throws Exception { + + boolean first_el = true; + for(Entry ent: mp.entrySet()) { + if (first_el) { + out.println(header); + } + first_el = false; + + // Print the key + out.print(indentString(indent)); + out.printf("%s ", ent.getKey().toString()); + // Print the value + if (isPrintable(ent.getValue())) { + out.print(ent.getValue()); + out.println(); + } + else if (ent.getValue() instanceof Serializable) { + out.println(); + outputPlan((Serializable)ent.getValue(), out, extended, indent+2); + } + } + } + + private void outputList(List l, String header, + PrintStream out, boolean extended, int indent) + throws Exception { + + boolean first_el = true; + boolean nl = false; + for(Object o: l) { + if (first_el) { + out.print(header); + } + + if (isPrintable(o)) { + if (!first_el) { + out.print(", "); + } else { + out.print(" "); + } + + out.print(o); + nl = true; + } + else if (o instanceof Serializable) { + if (first_el) { + out.println(); + } + outputPlan((Serializable)o, out, extended, indent+2); + } + + first_el = false; + } + + if (nl) { + out.println(); + } + } + + private boolean isPrintable(Object val) { + if (val instanceof String || + val instanceof Integer || + val instanceof Byte || + val instanceof Float || + val instanceof Double) { + return true; + } + + if (val.getClass().isPrimitive()) { + return true; + } + + return false; + } + + private void outputPlan(Serializable work, PrintStream out, boolean extended, int indent) + throws Exception { + // Check if work has an explain annotation + Annotation note = work.getClass().getAnnotation(explain.class); + + if (note instanceof explain) { + explain xpl_note = (explain)note; + if (extended || xpl_note.normalExplain()) { + out.print(indentString(indent)); + out.println(xpl_note.displayName()); + } + } + + // If this is an operator then we need to call the plan generation on the conf and then + // the children + if (work instanceof Operator) { + Operator operator = (Operator) work; + if (operator.getConf() != null) { + outputPlan(operator.getConf(), out, extended, indent); + } + if (operator.getChildOperators() != null) { + for(Operator op: operator.getChildOperators()) { + outputPlan(op, out, extended, indent+2); + } + } + return; + } + + // We look at all methods that generate values for explain + for(Method m: work.getClass().getMethods()) { + int prop_indents = indent+2; + note = m.getAnnotation(explain.class); + + if (note instanceof explain) { + explain xpl_note = (explain)note; + + if (extended || xpl_note.normalExplain()) { + + Object val = m.invoke(work); + + if (val == null) { + continue; + } + + String header = null; + if (!xpl_note.displayName().equals("")){ + header = indentString(prop_indents) + xpl_note.displayName() +":"; + } else { + prop_indents = indent; + header = indentString(prop_indents); + } + + if (isPrintable(val)) { + + out.printf("%s ", header); + out.println(val); + continue; + } + // Try this as a map + try { + // Go through the map and print out the stuff + Map mp = (Map)val; + outputMap(mp, header, out, extended, prop_indents+2); + continue; + } + catch (ClassCastException ce) { + // Ignore - all this means is that this is not a map + } + + // Try this as a list + try { + List l = (List)val; + outputList(l, header, out, extended, prop_indents+2); + + continue; + } + catch (ClassCastException ce) { + // Ignore + } + + + // Finally check if it is serializable + try { + Serializable s = (Serializable)val; + out.println(header); + outputPlan(s, out, extended, prop_indents+2); + + continue; + } + catch (ClassCastException ce) { + // Ignore + } + } + } + } + } + + private void outputPlan(Task task, PrintStream out, + boolean extended, HashSet> displayedSet, + int indent) + throws Exception { + + if (displayedSet.contains(task)) { + return; + } + displayedSet.add(task); + + out.print(indentString(indent)); + out.printf("Stage: %s\n", task.getId()); + // Start by getting the work part of the task and call the output plan for the work + outputPlan(task.getWork(), out, extended, indent+2); + out.println(); + if (task.getChildTasks() != null) { + for(Task child: task.getChildTasks()) { + outputPlan(child, out, extended, displayedSet, indent); + } + } + } + + private void outputDependencies(Task task, PrintStream out, int indent) + throws Exception { + + out.print(indentString(indent)); + out.printf("%s", task.getId()); + if (task.getParentTasks() == null || task.getParentTasks().isEmpty()) { + out.print(" is a root stage"); + } + else { + out.print(" depends on stages: "); + boolean first = true; + for(Task parent: task.getParentTasks()) { + if (!first) { + out.print(", "); + } + first = false; + out.print(parent.getId()); + } + } + out.println(); + + if (task.getChildTasks() != null) { + for(Task child: task.getChildTasks()) { + outputDependencies(child, out, indent); + } + } + } + + public void outputAST(String treeString, PrintStream out, int indent) { + out.print(indentString(indent)); + out.println("ABSTRACT SYNTAX TREE:"); + out.print(indentString(indent+2)); + out.println(treeString); + } + + public void outputDependencies(PrintStream out, + List> rootTasks, + int indent) + throws Exception { + out.print(indentString(indent)); + out.println("STAGE DEPENDENCIES:"); + for(Task rootTask: rootTasks) { + outputDependencies(rootTask, out, indent+2); + } + } + + public void outputStagePlans(PrintStream out, + List> rootTasks, + int indent) + throws Exception { + out.print(indentString(indent)); + out.println("STAGE PLANS:"); + for(Task rootTask: rootTasks) { + outputPlan(rootTask, out, work.getExtended(), + new HashSet>(), indent+2); + } + } +} Added: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java?rev=696736&view=auto ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java (added) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java Thu Sep 18 10:37:59 2008 @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +public class FunctionInfo { + + private String displayName; + + private OperatorType opType; + + private boolean isOperator; + + private Class udfClass; + + private Class udafClass; + + public static enum OperatorType { NO_OP, PREFIX, INFIX, POSTFIX }; + + public FunctionInfo(String displayName, Class udfClass, Class udafClass) { + assert(udfClass == null || udafClass == null); + this.displayName = displayName; + opType = OperatorType.NO_OP; + isOperator = false; + this.udfClass = udfClass; + this.udafClass = udafClass; + } + + public FunctionInfo(String displayName, OperatorType opType, Class udfClass) { + this.displayName = displayName; + this.opType = opType; + this.udfClass = udfClass; + this.udafClass = null; + } + + public boolean isAggFunction() { + return (udafClass != null && udfClass == null); + } + + public boolean isOperator() { + return isOperator; + } + + public void setIsOperator(boolean val) { + isOperator = val; + } + + public void setOpType(OperatorType opt) { + opType = opt; + } + + public OperatorType getOpType() { + return opType; + } + + public Class getUDFClass() { + return udfClass; + } + + public Class getUDAFClass() { + return udafClass; + } + + public String getDisplayName() { + return displayName; + } +} Added: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=696736&view=auto ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (added) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Thu Sep 18 10:37:59 2008 @@ -0,0 +1,322 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.lang.Void; + +import org.apache.hadoop.hive.ql.exec.FunctionInfo.OperatorType; +import org.apache.hadoop.hive.ql.parse.TypeInfo; +import org.apache.hadoop.hive.ql.udf.*; + +public class FunctionRegistry { + + private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.exec.FunctionRegistry"); + + /** + * The mapping from expression function names to expression classes. + */ + static HashMap mFunctions; + static { + mFunctions = new HashMap(); + registerUDF("default_sample_hashfn", UDFDefaultSampleHashFn.class, + OperatorType.PREFIX, false); + registerUDF("concat", UDFConcat.class, OperatorType.PREFIX, false); + registerUDF("substr", UDFSubstr.class, OperatorType.PREFIX, false); + registerUDF("str_eq", UDFStrEq.class, OperatorType.PREFIX, false); + registerUDF("str_ne", UDFStrNe.class, OperatorType.PREFIX, false); + registerUDF("str_gt", UDFStrGt.class, OperatorType.PREFIX, false); + registerUDF("str_lt", UDFStrLt.class, OperatorType.PREFIX, false); + registerUDF("str_ge", UDFStrGe.class, OperatorType.PREFIX, false); + registerUDF("str_le", UDFStrLe.class, OperatorType.PREFIX, false); + + registerUDF("upper", UDFUpper.class, OperatorType.PREFIX, false); + registerUDF("lower", UDFLower.class, OperatorType.PREFIX, false); + registerUDF("ucase", UDFUpper.class, OperatorType.PREFIX, false); + registerUDF("lcase", UDFLower.class, OperatorType.PREFIX, false); + registerUDF("trim", UDFTrim.class, OperatorType.PREFIX, false); + registerUDF("ltrim", UDFLTrim.class, OperatorType.PREFIX, false); + registerUDF("rtrim", UDFRTrim.class, OperatorType.PREFIX, false); + + registerUDF("like", UDFLike.class, OperatorType.INFIX, true); + registerUDF("rlike", UDFRegExp.class, OperatorType.INFIX, true); + registerUDF("regexp", UDFRegExp.class, OperatorType.INFIX, true); + registerUDF("regexp_replace", UDFRegExpReplace.class, OperatorType.PREFIX, false); + + registerUDF("+", UDFOPPlus.class, OperatorType.INFIX, true); + registerUDF("-", UDFOPMinus.class, OperatorType.INFIX, true); + registerUDF("*", UDFOPMultiply.class, OperatorType.INFIX, true); + registerUDF("/", UDFOPDivide.class, OperatorType.INFIX, true); + registerUDF("%", UDFOPMod.class, OperatorType.INFIX, true); + + registerUDF("&", UDFOPBitAnd.class, OperatorType.INFIX, true); + registerUDF("|", UDFOPBitOr.class, OperatorType.INFIX, true); + registerUDF("^", UDFOPBitXor.class, OperatorType.INFIX, true); + registerUDF("~", UDFOPBitNot.class, OperatorType.INFIX, true); + + registerUDF("=", UDFOPEqual.class, OperatorType.INFIX, true); + registerUDF("==", UDFOPEqual.class, OperatorType.INFIX, true, "="); + registerUDF("<>", UDFOPNotEqual.class, OperatorType.INFIX, true); + registerUDF("<", UDFOPLessThan.class, OperatorType.INFIX, true); + registerUDF("<=", UDFOPEqualOrLessThan.class, OperatorType.INFIX, true); + registerUDF(">", UDFOPGreaterThan.class, OperatorType.INFIX, true); + registerUDF(">=", UDFOPEqualOrGreaterThan.class, OperatorType.INFIX, true); + + registerUDF("and", UDFOPAnd.class, OperatorType.INFIX, true); + registerUDF("&&", UDFOPAnd.class, OperatorType.INFIX, true, "and"); + registerUDF("or", UDFOPOr.class, OperatorType.INFIX, true); + registerUDF("||", UDFOPOr.class, OperatorType.INFIX, true, "or"); + registerUDF("not", UDFOPNot.class, OperatorType.INFIX, true); + registerUDF("!", UDFOPNot.class, OperatorType.INFIX, true, "not"); + + registerUDF("isnull", UDFOPNull.class, OperatorType.POSTFIX, true, "is null"); + registerUDF("isnotnull", UDFOPNotNull.class, OperatorType.POSTFIX, true, "is not null"); + + // Aliases for Java Class Names + // These are used in getImplicitConvertUDFMethod + registerUDF(Boolean.class.getName(), UDFToBoolean.class, OperatorType.PREFIX, false, + UDFToBoolean.class.getSimpleName()); + registerUDF(Byte.class.getName(), UDFToByte.class, OperatorType.PREFIX, false, + UDFToByte.class.getSimpleName()); + registerUDF(Integer.class.getName(), UDFToInteger.class, OperatorType.PREFIX, false, + UDFToInteger.class.getSimpleName()); + registerUDF(Long.class.getName(), UDFToLong.class, OperatorType.PREFIX, false, + UDFToLong.class.getSimpleName()); + registerUDF(Float.class.getName(), UDFToFloat.class, OperatorType.PREFIX, false, + UDFToFloat.class.getSimpleName()); + registerUDF(Double.class.getName(), UDFToDouble.class, OperatorType.PREFIX, false, + UDFToDouble.class.getSimpleName()); + registerUDF(String.class.getName(), UDFToString.class, OperatorType.PREFIX, false, + UDFToString.class.getSimpleName()); + registerUDF(java.sql.Date.class.getName(), UDFToDate.class, OperatorType.PREFIX, false, + UDFToDate.class.getSimpleName()); + + // Aggregate functions + registerUDAF("sum", UDAFSum.class); + registerUDAF("count", UDAFCount.class); + registerUDAF("max", UDAFMax.class); + registerUDAF("min", UDAFMin.class); + registerUDAF("avg", UDAFAvg.class); + } + + public static FunctionInfo getInfo(Class fClass) { + for(Map.Entry ent: mFunctions.entrySet()) { + FunctionInfo val = ent.getValue(); + if (val.getUDFClass() == fClass || + val.getUDAFClass() == fClass) { + return val; + } + } + + return null; + } + + public static void registerUDF(String functionName, Class UDFClass, + FunctionInfo.OperatorType opt, boolean isOperator) { + if (UDF.class.isAssignableFrom(UDFClass)) { + FunctionInfo fI = new FunctionInfo(functionName.toLowerCase(), UDFClass, null); + fI.setIsOperator(isOperator); + fI.setOpType(opt); + mFunctions.put(functionName.toLowerCase(), fI); + } else { + throw new RuntimeException("Registering UDF Class " + UDFClass + " which does not extends " + UDF.class); + } + } + + public static void registerUDF(String functionName, Class UDFClass, + FunctionInfo.OperatorType opt, boolean isOperator, + String displayName) { + if (UDF.class.isAssignableFrom(UDFClass)) { + FunctionInfo fI = new FunctionInfo(displayName, UDFClass, null); + fI.setIsOperator(isOperator); + fI.setOpType(opt); + mFunctions.put(functionName.toLowerCase(), fI); + } else { + throw new RuntimeException("Registering UDF Class " + UDFClass + " which does not extends " + UDF.class); + } + } + + public static Class getUDFClass(String functionName) { + LOG.debug("Looking up: " + functionName); + FunctionInfo finfo = mFunctions.get(functionName.toLowerCase()); + if (finfo == null) { + return null; + } + Class result = finfo.getUDFClass(); + return result; + } + + static Map, Integer> numericTypes; + static { + numericTypes = new HashMap, Integer>(); + numericTypes.put(Byte.class, 1); + numericTypes.put(Integer.class, 2); + numericTypes.put(Long.class, 3); + numericTypes.put(Float.class, 4); + numericTypes.put(Double.class, 5); + numericTypes.put(String.class, 6); + } + + /** + * Find a common class that objects of both Class a and Class b can convert to. + * @return null if no common class could be found. + */ + public static Class getCommonClass(Class a, Class b) { + // Equal + if (a.equals(b)) return a; + // Java class inheritance hierarchy + if (a.isAssignableFrom(b)) return a; + if (b.isAssignableFrom(a)) return b; + // Prefer String to Number conversion before implicit conversions + if (Number.class.isAssignableFrom(a) && b.equals(String.class)) return Double.class; + if (Number.class.isAssignableFrom(b) && a.equals(String.class)) return Double.class; + // implicit conversions + if (FunctionRegistry.implicitConvertable(a, b)) return b; + if (FunctionRegistry.implicitConvertable(b, a)) return a; + return null; + } + + /** Returns whether it is possible to implicitly convert an object of Class from to Class to. + */ + public static boolean implicitConvertable(Class from, Class to) { + assert(!from.equals(to)); + // Allow implicit String to Double conversion + if (from.equals(String.class) && to.equals(Double.class)) { + return true; + } + if (from.equals(String.class) && to.equals(java.sql.Date.class)) { + return true; + } + if (from.equals(java.sql.Date.class) && to.equals(String.class)) { + return true; + } + // Allow implicit conversion from Byte -> Integer -> Long -> Float -> Double -> String + Integer f = numericTypes.get(from); + Integer t = numericTypes.get(to); + if (f == null || t == null) return false; + if (f.intValue() > t.intValue()) return false; + return true; + } + + /** + * Get the UDF method for the name and argumentClasses. + * @param name the name of the UDF + * @param argumentClasses + * @param exact if true, we don't allow implicit type conversions. + * @return + */ + public static Method getUDFMethod(String name, boolean exact, List> argumentClasses) { + Class udf = getUDFClass(name); + if (udf == null) return null; + return getMethodInternal(udf, "evaluate", exact, argumentClasses); + } + + /** + * This method is shared between UDFRegistry and UDAFRegistry. + * methodName will be "evaluate" for UDFRegistry, and "aggregate" for UDAFRegistry. + */ + public static Method getMethodInternal(Class udfClass, String methodName, boolean exact, List> argumentClasses) { + int leastImplicitConversions = Integer.MAX_VALUE; + Method udfMethod = null; + + for(Method m: Arrays.asList(udfClass.getMethods())) { + if (m.getName().equals(methodName)) { + + Class[] argumentTypeInfos = m.getParameterTypes(); + + boolean match = (argumentTypeInfos.length == argumentClasses.size()); + int implicitConversions = 0; + + for(int i=0; i accepted = TypeInfo.generalizePrimitive(argumentTypeInfos[i]); + if (accepted.isAssignableFrom(argumentClasses.get(i))) { + // do nothing if match + } else if (!exact && implicitConvertable(argumentClasses.get(i), accepted)) { + implicitConversions ++; + } else { + match = false; + } + } + + if (match) { + // Always choose the function with least implicit conversions. + if (implicitConversions < leastImplicitConversions) { + udfMethod = m; + leastImplicitConversions = implicitConversions; + // Found an exact match + if (leastImplicitConversions == 0) break; + } else if (implicitConversions == leastImplicitConversions){ + // Ambiguous call: two methods with the same number of implicit conversions + udfMethod = null; + } else { + // do nothing if implicitConversions > leastImplicitConversions + } + } + } + } + return udfMethod; + } + + public static Method getUDFMethod(String name, boolean exact, Class ... argumentClasses) { + return getUDFMethod(name, exact, Arrays.asList(argumentClasses)); + } + + public static void registerUDAF(String functionName, Class UDAFClass) { + + if (UDAF.class.isAssignableFrom(UDAFClass)) { + mFunctions.put(functionName.toLowerCase(), new FunctionInfo(functionName + .toLowerCase(), null, UDAFClass)); + } else { + throw new RuntimeException("Registering UDAF Class " + UDAFClass + + " which does not extends " + UDAF.class); + } + mFunctions.put(functionName.toLowerCase(), new FunctionInfo(functionName + .toLowerCase(), null, UDAFClass)); + } + + public static Class getUDAF(String functionName) { + LOG.debug("Looking up UDAF: " + functionName); + FunctionInfo finfo = mFunctions.get(functionName.toLowerCase()); + if (finfo == null) { + return null; + } + Class result = finfo.getUDAFClass(); + return result; + } + + public static Method getUDAFMethod(String name, List> argumentClasses) { + Class udaf = getUDAF(name); + if (udaf == null) + return null; + return FunctionRegistry.getMethodInternal(udaf, "aggregate", false, + argumentClasses); + } + + public static Method getUDAFMethod(String name, Class... argumentClasses) { + return getUDAFMethod(name, Arrays.asList(argumentClasses)); + } +} Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Thu Sep 18 10:37:59 2008 @@ -28,7 +28,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - +import org.apache.hadoop.hive.ql.plan.explain; /** * Base operator implementation @@ -38,6 +38,7 @@ // Bean methods private static final long serialVersionUID = 1L; + protected List> childOperators; public Operator() {} @@ -57,6 +58,7 @@ this.conf = conf; } + @explain public T getConf() { return conf; } @@ -96,7 +98,7 @@ if(childOperators == null) return; - for(Operator op: childOperators) { + for(Operator op: childOperators) { op.setOutputCollector(out); } } @@ -148,9 +150,9 @@ - public Map getStats() { - HashMap ret = new HashMap (); - for(Enum one: statsMap.keySet()) { + public Map, Long> getStats() { + HashMap, Long> ret = new HashMap, Long> (); + for(Enum one: statsMap.keySet()) { ret.put(one, Long.valueOf(statsMap.get(one).get())); } return(ret); @@ -226,7 +228,7 @@ } public void resetStats() { - for(Enum e: statsMap.keySet()) { + for(Enum e: statsMap.keySet()) { statsMap.get(e).set(0L); } } @@ -246,7 +248,7 @@ } public void logStats () { - for(Enum e: statsMap.keySet()) { + for(Enum e: statsMap.keySet()) { l4j.info(e.toString() + ":" + statsMap.get(e).toString()); } } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java Thu Sep 18 10:37:59 2008 @@ -41,22 +41,32 @@ public static ArrayList> taskvec; static { + id = 0; taskvec = new ArrayList>(); taskvec.add(new taskTuple(moveWork.class, MoveTask.class)); taskvec.add(new taskTuple(copyWork.class, CopyTask.class)); taskvec.add(new taskTuple(DDLWork.class, DDLTask.class)); + taskvec.add(new taskTuple(explainWork.class, ExplainTask.class)); // we are taking this out to allow us to instantiate either MapRedTask or // ExecDriver dynamically at run time based on configuration // taskvec.add(new taskTuple(mapredWork.class, ExecDriver.class)); } + private static int id; + + public static void resetId() { + id = 0; + } + @SuppressWarnings("unchecked") public static Task get(Class workClass, HiveConf conf) { for(taskTuple t: taskvec) { if(t.workClass == workClass) { try { - return (Task)t.taskClass.newInstance(); + Task ret = (Task)t.taskClass.newInstance(); + ret.setId("Stage-" + Integer.toString(id++)); + return ret; } catch (Exception e) { throw new RuntimeException(e); } @@ -71,12 +81,15 @@ // in local mode - or if otherwise so configured - always submit // jobs via separate jvm + Task ret = null; if(conf.getVar(HiveConf.ConfVars.HADOOPJT).equals("local") || viachild.equals("true")) { - return (Task)MapRedTask.class.newInstance(); + ret = (Task)MapRedTask.class.newInstance(); } else { - return (Task)ExecDriver.class.newInstance(); + ret = (Task)ExecDriver.class.newInstance(); } + ret.setId("Stage-" + Integer.toString(id++)); + return ret; } catch (Exception e) { throw new RuntimeException (e.getMessage(), e); } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Thu Sep 18 10:37:59 2008 @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.plan.*; import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes; import org.apache.hadoop.hive.ql.metadata.Table; @@ -90,6 +91,19 @@ } } + public static List getFieldSchemaString(List fl) { + if (fl == null) { + return null; + } + + ArrayList ret = new ArrayList(); + for(FieldSchema f: fl) { + ret.add(f.getName() + " " + f.getType() + + (f.getComment() != null ? (" " + f.getComment()) : "")); + } + return ret; + } + /** * Java 1.5 workaround. * From http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5015403 Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Thu Sep 18 10:37:59 2008 @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; public abstract class BaseSemanticAnalyzer { @@ -44,6 +45,7 @@ protected final LogHelper console; protected Context ctx; + public BaseSemanticAnalyzer(HiveConf conf) throws SemanticException { try { this.conf = conf; @@ -62,7 +64,7 @@ } public abstract void analyze(CommonTree ast, Context ctx) throws SemanticException; - + public List> getRootTasks() { return rootTasks; } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Thu Sep 18 10:37:59 2008 @@ -65,6 +65,7 @@ super(conf); } + @Override public void analyze(CommonTree ast, Context ctx) throws SemanticException { this.ctx = ctx; if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE) Added: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=696736&view=auto ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (added) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Thu Sep 18 10:37:59 2008 @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import java.io.File; + +import org.antlr.runtime.tree.CommonTree; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.plan.explainWork; + +public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer { + + + public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException { + super(conf); + } + + public void analyze(CommonTree ast, Context ctx) throws SemanticException { + + // Create a semantic analyzer for the query + BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (CommonTree)ast.getChild(0)); + sem.analyze((CommonTree)ast.getChild(0), ctx); + + boolean extended = false; + if (ast.getChildCount() > 1) { + extended = true; + } + + ctx.setResFile(new File(getTmpFileName())); + + rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), + sem.getRootTasks(), + ((CommonTree)ast.getChild(0)).toStringTree(), + extended), this.conf)); + } +} Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Thu Sep 18 10:37:59 2008 @@ -97,6 +97,7 @@ TOK_TABLELOCATION; TOK_TABLESAMPLE; TOK_TMP_FILE; +TOK_EXPLAIN; } @@ -115,9 +116,18 @@ // starting rule statement - : queryStatementExpression EOF - | loadStatement EOF - | ddlStatement EOF + : explainStatement EOF + | execStatement EOF + ; + +explainStatement + : KW_EXPLAIN (isExtended=KW_EXTENDED)? execStatement -> ^(TOK_EXPLAIN execStatement $isExtended?) + ; + +execStatement + : queryStatementExpression + | loadStatement + | ddlStatement ; loadStatement @@ -766,6 +776,8 @@ KW_COLUMNS: 'COLUMNS'; KW_RLIKE: 'RLIKE'; KW_REGEXP: 'REGEXP'; +KW_EXPLAIN: 'EXPLAIN'; +KW_EXTENDED: 'EXTENDED'; // Operators Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Thu Sep 18 10:37:59 2008 @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.parse; import java.io.IOException; +import java.io.Serializable; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -28,8 +29,6 @@ import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -156,6 +155,7 @@ } } + @Override public void analyze(CommonTree ast, Context ctx) throws SemanticException { isLocal = isOverWrite = false; Tree from_t = ast.getChild(0); @@ -191,7 +191,7 @@ // make sure the arguments make sense applyConstraints(fromURI, toURI, from_t, isLocal); - Task rTask = null; + Task rTask = null; // create copy work if(isLocal) { Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java Thu Sep 18 10:37:59 2008 @@ -29,14 +29,11 @@ import java.util.*; import org.antlr.runtime.tree.*; -import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.HiveObject; import org.apache.hadoop.hive.ql.exec.LabeledCompositeHiveObject; import org.apache.hadoop.hive.ql.exec.PrimitiveHiveObject; -import org.apache.hadoop.hive.ql.exec.UDF; -import org.apache.hadoop.hive.ql.exec.UDFRegistry; import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc; Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Thu Sep 18 10:37:59 2008 @@ -143,12 +143,12 @@ public String toString() { StringBuffer sb = new StringBuffer(); - for(Map.Entry e: rslvMap.entrySet()) { + for(Map.Entry> e: rslvMap.entrySet()) { String tab = (String)e.getKey(); sb.append(tab + "{"); HashMap f_map = (HashMap)e.getValue(); if (f_map != null) - for(Map.Entry entry: f_map.entrySet()) { + for(Map.Entry entry: f_map.entrySet()) { sb.append("(" + (String)entry.getKey() + "," + entry.getValue().toString() + ")"); } sb.append("} "); Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Sep 18 10:37:59 2008 @@ -134,7 +134,7 @@ assert (expressionTree.getChildCount() != 0); assert (expressionTree.getChild(0).getType() == HiveParser.Identifier); String functionName = expressionTree.getChild(0).getText(); - if (UDAFRegistry.getUDAF(functionName) != null) { + if (FunctionRegistry.getUDAF(functionName) != null) { aggregations.put(expressionTree.toStringTree(), expressionTree); return; } @@ -987,7 +987,7 @@ for (Map.Entry entry : aggregationTrees.entrySet()) { CommonTree value = entry.getValue(); String aggName = value.getChild(0).getText(); - Class aggClass = UDAFRegistry.getUDAF(aggName); + Class aggClass = FunctionRegistry.getUDAF(aggName); assert (aggClass != null); ArrayList aggParameters = new ArrayList(); ArrayList> aggClasses = new ArrayList>(); @@ -1006,7 +1006,7 @@ aggClasses.add(paraExprInfo.getType().getPrimitiveClass()); } - if (null == UDAFRegistry.getUDAFMethod(aggName, aggClasses)) { + if (null == FunctionRegistry.getUDAFMethod(aggName, aggClasses)) { String reason = "Looking for UDAF \"" + aggName + "\" with parameters " + aggClasses; throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((CommonTree)value.getChild(0), reason)); } @@ -1059,7 +1059,7 @@ for (Map.Entry entry : aggregationTrees.entrySet()) { CommonTree value = entry.getValue(); String aggName = value.getChild(0).getText(); - Class aggClass = UDAFRegistry.getUDAF(aggName); + Class aggClass = FunctionRegistry.getUDAF(aggName); assert (aggClass != null); ArrayList aggParameters = new ArrayList(); ArrayList> aggClasses = new ArrayList>(); @@ -1078,7 +1078,7 @@ aggClasses.add(paraExprInfo.getType().getPrimitiveClass()); } - if (null == UDAFRegistry.getUDAFMethod(aggName, aggClasses)) { + if (null == FunctionRegistry.getUDAFMethod(aggName, aggClasses)) { String reason = "Looking for UDAF \"" + aggName + "\" with parameters " + aggClasses; throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((CommonTree)value.getChild(0), reason)); } @@ -1288,8 +1288,7 @@ int inputField = reduceKeys.size(); HashMap aggregationTrees = parseInfo .getAggregationExprsForClause(dest); - for (Map.Entry entry : aggregationTrees.entrySet()) { - String key = (String)entry.getKey(); + for (Map.Entry entry : aggregationTrees.entrySet()) { reduceValues.add(new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class), (Integer.valueOf(inputField)).toString())); inputField++; @@ -1336,7 +1335,7 @@ for (Map.Entry entry : aggregationTrees.entrySet()) { CommonTree value = entry.getValue(); String aggName = value.getChild(0).getText(); - Class aggClass = UDAFRegistry.getUDAF(aggName); + Class aggClass = FunctionRegistry.getUDAF(aggName); assert (aggClass != null); ArrayList aggParameters = new ArrayList(); String text = entry.getKey(); @@ -1535,7 +1534,7 @@ // We have the table object here - go over the row resolver // and check all the types are the same - Vector srcOpns = input.get(0).getRowResolver().getColumnInfos(); + input.get(0).getRowResolver().getColumnInfos(); Vector insOpns = new Vector(); for (SerDeField field : dest_tab.getFields(null)) { @@ -1836,7 +1835,7 @@ for(int i=1; i a = commonClass; Class b = keys.get(i).get(k).getTypeInfo().getPrimitiveClass(); - commonClass = UDFRegistry.getCommonClass(a, b); + commonClass = FunctionRegistry.getCommonClass(a, b); if (commonClass == null) { throw new SemanticException("Cannot do equality join on different types: " + a.getClass() + " and " + b.getClass()); } @@ -2693,7 +2692,7 @@ assert(childTypeInfo != null); argumentClasses.add(childTypeInfo.getPrimitiveClass()); } - Method udfMethod = UDFRegistry.getUDFMethod(udfName, false, argumentClasses); + Method udfMethod = FunctionRegistry.getUDFMethod(udfName, false, argumentClasses); if (udfMethod == null) return null; ArrayList ch = new ArrayList(); @@ -2713,10 +2712,10 @@ // must be implicit type conversion Class from = argumentClasses.get(i); Class to = pType; - assert(UDFRegistry.implicitConvertable(from, to)); - Method m = UDFRegistry.getUDFMethod(to.getName(), true, from); + assert(FunctionRegistry.implicitConvertable(from, to)); + Method m = FunctionRegistry.getUDFMethod(to.getName(), true, from); assert(m != null); - Class c = UDFRegistry.getUDFClass(to.getName()); + Class c = FunctionRegistry.getUDFClass(to.getName()); assert(c != null); // get the conversion method @@ -2730,7 +2729,7 @@ exprNodeFuncDesc desc = new exprNodeFuncDesc( TypeInfo.getPrimitiveTypeInfo(TypeInfo.generalizePrimitive(udfMethod.getReturnType())), - UDFRegistry.getUDFClass(udfName), + FunctionRegistry.getUDFClass(udfName), udfMethod, ch); return desc; } @@ -2895,7 +2894,7 @@ desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); } else { // other operators or functions - Class udf = UDFRegistry.getUDFClass(funcText); + Class udf = FunctionRegistry.getUDFClass(funcText); if (udf == null) { throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((CommonTree)expr.getChild(0))); } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Thu Sep 18 10:37:59 2008 @@ -29,6 +29,7 @@ throw new RuntimeException ("Empty Syntax Tree"); } else { switch (tree.getToken().getType()) { + case HiveParser.TOK_EXPLAIN: return new ExplainSemanticAnalyzer(conf); case HiveParser.TOK_LOAD: return new LoadSemanticAnalyzer(conf); case HiveParser.TOK_CREATETABLE: case HiveParser.TOK_CREATEEXTTABLE: Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeInfo.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeInfo.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeInfo.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeInfo.java Thu Sep 18 10:37:59 2008 @@ -232,7 +232,7 @@ public static Class generalizePrimitive(Class primitiveClass) { - if (primitiveClass == Boolean.TYPE) primitiveClass = Boolean.class; + if (primitiveClass == Boolean.TYPE) primitiveClass = Boolean.class; if (primitiveClass == Byte.TYPE) primitiveClass = Byte.class; if (primitiveClass == Character.TYPE) primitiveClass = Character.class; if (primitiveClass == Short.TYPE) primitiveClass = Short.class; @@ -244,4 +244,8 @@ return primitiveClass; } + public String getTypeString() { + // TODO: Change this to print out a better user visible string + return toString(); + } } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Thu Sep 18 10:37:59 2008 @@ -68,6 +68,7 @@ /** * @return the createTblDesc */ + @explain(displayName="Create Table Operator") public createTableDesc getCreateTblDesc() { return createTblDesc; } @@ -82,6 +83,7 @@ /** * @return the dropTblDesc */ + @explain(displayName="Drop Table Operator") public dropTableDesc getDropTblDesc() { return dropTblDesc; } @@ -96,6 +98,7 @@ /** * @return the alterTblDesc */ + @explain(displayName="Alter Table Operator") public alterTableDesc getAlterTblDesc() { return alterTblDesc; } @@ -110,6 +113,7 @@ /** * @return the showTblsDesc */ + @explain(displayName="Show Table Operator") public showTablesDesc getShowTblsDesc() { return showTblsDesc; } @@ -124,6 +128,7 @@ /** * @return the descTblDesc */ + @explain(displayName="Describe Table Operator") public descTableDesc getDescTblDesc() { return descTblDesc; } @@ -133,5 +138,6 @@ */ public void setDescTblDesc(descTableDesc descTblDesc) { this.descTblDesc = descTblDesc; - } + } + } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java Thu Sep 18 10:37:59 2008 @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.plan; +import org.apache.hadoop.hive.ql.exec.FunctionInfo; +import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDAF; public class aggregationDesc implements java.io.Serializable { @@ -52,4 +54,27 @@ public void setDistinct(final boolean distinct) { this.distinct = distinct; } + + @explain(displayName="expr") + public String getExprString() { + FunctionInfo fI = FunctionRegistry.getInfo(aggregationClass); + StringBuilder sb = new StringBuilder(); + sb.append(fI.getDisplayName()); + sb.append("("); + if (distinct) { + sb.append("DISTINCT "); + } + boolean first = true; + for(exprNodeDesc exp: parameters) { + if (!first) { + sb.append(", "); + } + + sb.append(exp.getExprString()); + first = false; + } + sb.append(")"); + + return sb.toString(); + } } Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java Thu Sep 18 10:37:59 2008 @@ -19,10 +19,13 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.Utilities; +@explain(displayName="Alter Table") public class alterTableDesc extends ddlDesc implements Serializable { private static final long serialVersionUID = 1L; @@ -56,6 +59,7 @@ /** * @return the old name of the table */ + @explain(displayName="old name") public String getOldName() { return oldName; } @@ -70,6 +74,7 @@ /** * @return the newName */ + @explain(displayName="new name") public String getNewName() { return newName; } @@ -88,6 +93,17 @@ return op; } + @explain(displayName="type") + public String getAlterTableTypeString() { + switch(op) { + case RENAME: + return "rename"; + case ADDCOLS: + return "add columns"; + } + + return "unknown"; + } /** * @param op the op to set */ @@ -102,6 +118,10 @@ return newCols; } + @explain(displayName="new columns") + public List getNewColsString() { + return Utilities.getFieldSchemaString(getNewCols()); + } /** * @param newCols the newCols to set */ Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java Thu Sep 18 10:37:59 2008 @@ -20,6 +20,7 @@ import java.io.Serializable; +@explain(displayName="Collect") public class collectDesc implements Serializable { private static final long serialVersionUID = 1L; Integer bufferSize; Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java?rev=696736&r1=696735&r2=696736&view=diff ============================================================================== --- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java (original) +++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java Thu Sep 18 10:37:59 2008 @@ -20,7 +20,7 @@ import java.io.Serializable; - +@explain(displayName="Copy") public class copyWork implements Serializable { private static final long serialVersionUID = 1L; private String fromPath; @@ -33,12 +33,16 @@ this.fromPath = fromPath; this.toPath = toPath; } + + @explain(displayName="source") public String getFromPath() { return this.fromPath; } public void setFromPath(final String fromPath) { this.fromPath = fromPath; } + + @explain(displayName="destination") public String getToPath() { return this.toPath; }