Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 4089B17930 for ; Sat, 4 Oct 2014 01:03:55 +0000 (UTC) Received: (qmail 73678 invoked by uid 500); 4 Oct 2014 01:03:55 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 73627 invoked by uid 500); 4 Oct 2014 01:03:55 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 73613 invoked by uid 99); 4 Oct 2014 01:03:55 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 04 Oct 2014 01:03:55 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 04 Oct 2014 01:03:53 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 330BC23889BB; Sat, 4 Oct 2014 01:03:33 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1629344 - in /hive/branches/spark/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/exec/mr/ java/org/apache/hadoop/hive/ql/exec/spark/ java/org/apache/hadoop/hive/ql/io/ test/org/apache/hadoop/hive/ql/exec/ test/... Date: Sat, 04 Oct 2014 01:03:32 -0000 To: commits@hive.apache.org From: xuefu@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20141004010333.330BC23889BB@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: xuefu Date: Sat Oct 4 01:03:32 2014 New Revision: 1629344 URL: http://svn.apache.org/r1629344 Log: HIVE-8331: HIVE-8303 followup, investigate result diff [Spark Branch] (Chao via Xuefu) Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java Sat Oct 4 01:03:32 2014 @@ -76,7 +76,7 @@ public class FilterOperator extends Oper statsMap.put(Counter.FILTERED, filtered_count); statsMap.put(Counter.PASSED, passed_count); conditionInspector = null; - ioContext = IOContext.get(hconf.get(Utilities.INPUT_NAME)); + ioContext = IOContext.get(hconf); } catch (Throwable e) { throw new HiveException(e); } Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Sat Oct 4 01:03:32 2014 @@ -339,7 +339,7 @@ public class MapOperator extends Operato } public void setChildren(Configuration hconf) throws HiveException { - Path fpath = IOContext.get(hconf.get(Utilities.INPUT_NAME)).getInputPath(); + Path fpath = IOContext.get(hconf).getInputPath(); boolean schemeless = fpath.toUri().getScheme() == null; Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java Sat Oct 4 01:03:32 2014 @@ -63,7 +63,7 @@ public class ExecMapperContext { public ExecMapperContext(JobConf jc) { this.jc = jc; - ioCxt = IOContext.get(jc.get(Utilities.INPUT_NAME)); + ioCxt = IOContext.get(jc); } public void clear() { Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java Sat Oct 4 01:03:32 2014 @@ -88,7 +88,6 @@ public class SparkMapRecordHandler exten } mo.setConf(mrwork); l4j.info("Main input name is " + mrwork.getName()); - jc.set(Utilities.INPUT_NAME, mrwork.getName()); // initialize map operator mo.setChildren(job); l4j.info(mo.dump(0)); Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java Sat Oct 4 01:03:32 2014 @@ -197,7 +197,6 @@ public class SparkPlanGenerator { } if (work instanceof MapWork) { List inputPaths = Utilities.getInputPaths(cloned, (MapWork) work, scratchDir, context, false); - cloned.set(Utilities.INPUT_NAME, work.getName()); Utilities.setInputPaths(cloned, inputPaths); Utilities.setMapWork(cloned, (MapWork) work, scratchDir, false); Utilities.createTmpDirs(cloned, (MapWork) work); Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java Sat Oct 4 01:03:32 2014 @@ -161,7 +161,7 @@ public abstract class HiveContextAwareRe } public IOContext getIOContext() { - return IOContext.get(jobConf.get(Utilities.INPUT_NAME)); + return IOContext.get(jobConf); } private void initIOContext(long startPos, boolean isBlockPointer, Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/IOContext.java Sat Oct 4 01:03:32 2014 @@ -21,7 +21,11 @@ package org.apache.hadoop.hive.ql.io; import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.mapred.JobConf; /** @@ -49,13 +53,17 @@ public class IOContext { return inputNameIOContextMap; } - public static IOContext get(String inputName) { - if (inputNameIOContextMap.containsKey(inputName) == false) { - IOContext ioContext = new IOContext(); - inputNameIOContextMap.put(inputName, ioContext); + public static IOContext get(Configuration conf) { + if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")) { + return get(); + } else { + String inputName = conf.get(Utilities.INPUT_NAME); + if (inputNameIOContextMap.containsKey(inputName) == false) { + IOContext ioContext = new IOContext(); + inputNameIOContextMap.put(inputName, ioContext); + } + return inputNameIOContextMap.get(inputName); } - - return inputNameIOContextMap.get(inputName); } public static void clear() { Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original) +++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Sat Oct 4 01:03:32 2014 @@ -331,7 +331,7 @@ public class TestOperators extends TestC Configuration hconf = new JobConf(TestOperators.class); HiveConf.setVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME, "hdfs:///testDir/testFile"); - IOContext.get(hconf.get(Utilities.INPUT_NAME)).setInputPath( + IOContext.get(hconf).setInputPath( new Path("hdfs:///testDir/testFile")); // initialize pathToAliases Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java (original) +++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java Sat Oct 4 01:03:32 2014 @@ -116,7 +116,7 @@ public class TestHiveBinarySearchRecordR private void resetIOContext() { conf.set(Utilities.INPUT_NAME, "TestHiveBinarySearchRecordReader"); - ioContext = IOContext.get(conf.get(Utilities.INPUT_NAME)); + ioContext = IOContext.get(conf); ioContext.setUseSorted(false); ioContext.setIsBinarySearching(false); ioContext.setEndBinarySearch(false); Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out?rev=1629344&r1=1629343&r2=1629344&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_18.q.out Sat Oct 4 01:03:32 2014 @@ -306,15 +306,15 @@ POSTHOOK: Input: default@outputtbl1@ds=1 POSTHOOK: Input: default@outputtbl1@ds=18 POSTHOOK: Input: default@outputtbl1@ds=28 #### A masked pattern was here #### -1 1 13 -1 1 13 -2 1 13 -2 1 13 +1 1 11 +1 1 11 +2 1 12 +2 1 12 3 1 13 3 1 13 -7 1 13 -7 1 13 -8 1 28 -8 1 28 +7 1 17 +7 1 17 +8 1 18 +8 1 18 8 1 28 8 1 28