hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amareshw...@apache.org
Subject svn commit: r1233262 - in /hive/branches/branch-0.8-r2: ./ shims/ shims/src/0.20S/java/org/apache/hadoop/hive/shims/ shims/src/0.20S/java/org/apache/hadoop/hive/thrift/ shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/ shims/src/0.20S/java/org...
Date Thu, 19 Jan 2012 09:43:39 GMT
Author: amareshwari
Date: Thu Jan 19 09:43:38 2012
New Revision: 1233262

URL: http://svn.apache.org/viewvc?rev=1233262&view=rev
Log:
HIVE-2629. Merge -r 1230390:1230391 from trunk.

Added:
    hive/branches/branch-0.8-r2/shims/src/common-secure/
      - copied from r1230391, hive/trunk/shims/src/common-secure/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/shims/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/client/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/client/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/security/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/security/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/security/token/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/security/token/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/security/token/delegation/
      - copied from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/security/token/delegation/
    hive/branches/branch-0.8-r2/shims/src/common-secure/java/org/apache/hadoop/security/token/delegation/HiveDelegationTokenSupport.java
      - copied unchanged from r1230391, hive/trunk/shims/src/common-secure/java/org/apache/hadoop/security/token/delegation/HiveDelegationTokenSupport.java
Removed:
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/security/token/delegation/HiveDelegationTokenSupport.java
    hive/branches/branch-0.8-r2/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java
    hive/branches/branch-0.8-r2/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java
Modified:
    hive/branches/branch-0.8-r2/build-common.xml
    hive/branches/branch-0.8-r2/build.properties
    hive/branches/branch-0.8-r2/shims/build.xml
    hive/branches/branch-0.8-r2/shims/ivy.xml
    hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/branches/branch-0.8-r2/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/branch-0.8-r2/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java

Modified: hive/branches/branch-0.8-r2/build-common.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/build-common.xml?rev=1233262&r1=1233261&r2=1233262&view=diff
==============================================================================
--- hive/branches/branch-0.8-r2/build-common.xml (original)
+++ hive/branches/branch-0.8-r2/build-common.xml Thu Jan 19 09:43:38 2012
@@ -115,14 +115,14 @@
       log="${ivyresolvelog}"/>
   </target>
 
-
-  <target name="ivy-retrieve-hadoop-source" depends="ivy-init-settings"
+  <target name="ivy-retrieve-hadoop-source"
     description="Retrieve Ivy-managed Hadoop source artifacts" unless="ivy.skip">
     <echo message="Project: ${ant.project.name}"/>
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+  	<echo message="hadoop.version.ant-internal: ${hadoop.version.ant-internal}"/>
+  	<ivy:settings id="${ant.project.name}-${hadoop.version.ant-internal}.ivy.settings" file="${ivysettings.xml}"/>
+  	<ivy:retrieve settingsRef="${ant.project.name}-${hadoop.version.ant-internal}.ivy.settings"
       pattern="${build.dir.hadoop}/[artifact]-[revision].[ext]"/>
   </target>
-
   
   <available property="hadoopcore.${hadoop.version.ant-internal}.install.done"
     file="${build.dir.hadoop}/hadoop-${hadoop.version.ant-internal}.installed"/>

Modified: hive/branches/branch-0.8-r2/build.properties
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/build.properties?rev=1233262&r1=1233261&r2=1233262&view=diff
==============================================================================
--- hive/branches/branch-0.8-r2/build.properties (original)
+++ hive/branches/branch-0.8-r2/build.properties Thu Jan 19 09:43:38 2012
@@ -10,9 +10,11 @@ javac.deprecation=off
 javac.args=
 javac.args.warnings=
 
-hadoop.version=0.20.1
-hadoop.security.version=0.20.3-CDH3-SNAPSHOT
-hadoop.security.version.prefix=0.20S
+hadoop-0.20.version=0.20.1
+hadoop-0.20S.version=0.20.3-CDH3-SNAPSHOT
+hadoop-0.23.version=0.23.0
+hadoop.version=${hadoop-0.20.version}
+hadoop.security.version=${hadoop-0.20S.version}
 hadoop.mirror=http://mirror.facebook.net/facebook/hive-deps
 hadoop.mirror2=http://archive.cloudera.com/hive-deps
 

Modified: hive/branches/branch-0.8-r2/shims/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/shims/build.xml?rev=1233262&r1=1233261&r2=1233262&view=diff
==============================================================================
--- hive/branches/branch-0.8-r2/shims/build.xml (original)
+++ hive/branches/branch-0.8-r2/shims/build.xml Thu Jan 19 09:43:38 2012
@@ -51,50 +51,49 @@ to call at top-level: ant deploy-contrib
     <path refid="common-classpath"/>
   </path>
 
-  <target name="build_shims" depends="install-hadoopcore-internal, ivy-retrieve-hadoop-source"
+  <!-- default list of shims to build -->
+  <property name="shims.include" value="0.20,0.20S,0.23"/>
+  <property name="shims.common.sources" value="${basedir}/src/common/java"/>	
+  <property name="shims.common.secure.sources" value="${basedir}/src/common/java;${basedir}/src/common-secure/java"/>
+  <!-- sources and hadoop version for each shim -->
+  <property name="shims.0.20.sources" value="${shims.common.sources};${basedir}/src/0.20/java" />	
+  <property name="shims.0.20.version" value="${hadoop-0.20.version}" />	
+  <property name="shims.0.20S.sources" value="${shims.common.secure.sources};${basedir}/src/0.20S/java" />	
+  <property name="shims.0.20S.version" value="${hadoop-0.20S.version}" />	
+  <property name="shims.0.23.sources" value="${shims.common.secure.sources};${basedir}/src/0.23/java" />	
+  <property name="shims.0.23.version" value="${hadoop-0.23.version}" />	
+	
+  <target name="build_shims" depends="install-hadoopcore-internal"
           description="Build shims against a particular hadoop version">
     <echo message="Project: ${ant.project.name}"/>
-    <getversionpref property="hadoop.version.ant-internal.prefix" input="${hadoop.version.ant-internal}" />
-    <echo message="Compiling shims against hadoop ${hadoop.version.ant-internal} (${hadoop.root})"/>
+    <echo message="Compiling ${sources} against hadoop ${hadoop.version.ant-internal} (${hadoop.root})"/>
     <javac
      encoding="${build.encoding}"
      includes="**/*.java"
-     excludes="**/Proxy*.java"
      destdir="${build.classes}"
      debug="${javac.debug}"
      deprecation="${javac.deprecation}"
+     srcdir="${sources}"
      includeantruntime="false">
       <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="classpath"/>
-      <src path="${basedir}/src/${hadoop.version.ant-internal.prefix}/java" />
-      <src path="${basedir}/src/common/java" />
     </javac>
   </target>
-
+	
   <target name="compile" depends="init,ivy-retrieve">
-    <echo message="Project: ${ant.project.name}"/>
-    <antcall target="build_shims" inheritRefs="false" inheritAll="false">
-      <param name="hadoop.version.ant-internal" value="${hadoop.version}" />
-    </antcall>
-    <antcall target="build_shims" inheritRefs="false" inheritAll="false">
-      <param name="hadoop.version.ant-internal" value="${hadoop.security.version}" />
-      <param name="hadoop.version.ant-internal.prefix" value="${hadoop.security.version.prefix}" />
-    </antcall>
-    <getversionpref property="hadoop.version.ant-internal.prefix" input="${hadoop.version}" />
-    <javac
-     encoding="${build.encoding}"
-     includes="**/Proxy*.java"
-     destdir="${build.classes}"
-     debug="${javac.debug}"
-     deprecation="${javac.deprecation}"
-     includeantruntime="false">
-      <compilerarg line="${javac.args} ${javac.args.warnings}" />
-      <classpath refid="classpath"/>
-      <src path="${basedir}/src/common/java" />
-    </javac>
+  		<echo message="Project: ${ant.project.name}"/>
+		<for param="shimName" list="${shims.include}">
+		  <sequential>
+		    <echo>Building shims @{shimName}</echo>
+		  	<antcall target="build_shims" inheritRefs="false" inheritAll="false">
+		      <param name="hadoop.version.ant-internal" value="${shims.@{shimName}.version}" />
+		      <param name="sources" value="${shims.@{shimName}.sources}" />
+		    </antcall>
+		  </sequential>
+	  	</for>  	
   </target>
   
-  <target name="compile_secure_test" depends="install-hadoopcore-internal, ivy-retrieve-hadoop-source" 
+  <target name="compile_secure_test" depends="install-hadoopcore-internal" 
           description="Test shims against a particular hadoop version">
     <echo message="Project: ${ant.project.name}"/>
     <getversionpref property="hadoop.version.ant-internal.prefix" input="${hadoop.version.ant-internal}" />
@@ -133,9 +132,8 @@ to call at top-level: ant deploy-contrib
   <target name="compile-test" depends="compile">
     <echo message="Project: ${ant.project.name}"/>
     <!-- TODO: move tests to version directory -->
-    <!--antcall target="compile_secure_test" inheritRefs="false" inheritAll="false">
+    <antcall target="compile_secure_test" inheritRefs="false" inheritAll="false">
       <param name="hadoop.version.ant-internal" value="${hadoop.security.version}" />
-      <param name="hadoop.version.ant-internal.prefix" value="${hadoop.security.version.prefix}" />
-    </antcall-->
+    </antcall>
   </target>
 </project>

Modified: hive/branches/branch-0.8-r2/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/shims/ivy.xml?rev=1233262&r1=1233261&r2=1233262&view=diff
==============================================================================
--- hive/branches/branch-0.8-r2/shims/ivy.xml (original)
+++ hive/branches/branch-0.8-r2/shims/ivy.xml Thu Jan 19 09:43:38 2012
@@ -27,10 +27,7 @@
     <include file="${ivy.conf.dir}/common-configurations.xml"/>
   </configurations>
   <dependencies>
-    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop.version}">
-      <artifact name="hadoop" type="source" ext="tar.gz"/>
-    </dependency> 
-    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop.security.version}">
+    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop.version.ant-internal}">
       <artifact name="hadoop" type="source" ext="tar.gz"/>
     </dependency>
     <dependency org="org.apache.zookeeper" name="zookeeper"

Modified: hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1233262&r1=1233261&r2=1233262&view=diff
==============================================================================
--- hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/branch-0.8-r2/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Thu Jan 19 09:43:38 2012
@@ -17,510 +17,17 @@
  */
 package org.apache.hadoop.hive.shims;
 
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
-import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.mapred.ClusterStatus;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobContext;
-import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.OutputCommitter;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.RunningJob;
-import org.apache.hadoop.mapred.TaskAttemptContext;
-import org.apache.hadoop.mapred.TaskCompletionEvent;
-import org.apache.hadoop.mapred.TaskID;
-import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
-import org.apache.hadoop.mapred.lib.CombineFileSplit;
-import org.apache.hadoop.mapred.lib.NullOutputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.security.token.TokenSelector;
-import org.apache.hadoop.tools.HadoopArchives;
 import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.ToolRunner;
 
 /**
- * Implemention of shims against Hadoop 0.20.0.
+ * Implemention of shims against Hadoop 0.20 with Security.
  */
-public class Hadoop20SShims implements HadoopShims {
-  public boolean usesJobShell() {
-    return false;
-  }
-
-  public boolean fileSystemDeleteOnExit(FileSystem fs, Path path)
-      throws IOException {
-
-    return fs.deleteOnExit(path);
-  }
-
-  public void inputFormatValidateInput(InputFormat fmt, JobConf conf)
-      throws IOException {
-    // gone in 0.18+
-  }
-
-  public boolean isJobPreparing(RunningJob job) throws IOException {
-    return job.getJobState() == JobStatus.PREP;
-  }
-  /**
-   * Workaround for hadoop-17 - jobclient only looks at commandlineconfig.
-   */
-  public void setTmpFiles(String prop, String files) {
-    // gone in 20+
-  }
-
-  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
-      int numDataNodes,
-      boolean format,
-      String[] racks) throws IOException {
-    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
-  }
-
-  /**
-   * MiniDFSShim.
-   *
-   */
-  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
-    private final MiniDFSCluster cluster;
-
-    public MiniDFSShim(MiniDFSCluster cluster) {
-      this.cluster = cluster;
-    }
-
-    public FileSystem getFileSystem() throws IOException {
-      return cluster.getFileSystem();
-    }
-
-    public void shutdown() {
-      cluster.shutdown();
-    }
-  }
-
-  /**
-   * We define this function here to make the code compatible between
-   * hadoop 0.17 and hadoop 0.20.
-   *
-   * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
-   * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
-   * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
-   * references that class, which is not available in hadoop 0.17.
-   */
-  public int compareText(Text a, Text b) {
-    return a.compareTo(b);
-  }
-
-  @Override
-  public long getAccessTime(FileStatus file) {
-    return file.getAccessTime();
-  }
-
-  public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
-    return new CombineFileInputFormatShim() {
-      @Override
-      public RecordReader getRecordReader(InputSplit split,
-          JobConf job, Reporter reporter) throws IOException {
-        throw new IOException("CombineFileInputFormat.getRecordReader not needed.");
-      }
-    };
-  }
-
-  public static class InputSplitShim extends CombineFileSplit implements HadoopShims.InputSplitShim {
-    long shrinkedLength;
-    boolean _isShrinked;
-    public InputSplitShim() {
-      super();
-      _isShrinked = false;
-    }
-
-    public InputSplitShim(CombineFileSplit old) throws IOException {
-      super(old);
-      _isShrinked = false;
-    }
-
-    @Override
-    public void shrinkSplit(long length) {
-      _isShrinked = true;
-      shrinkedLength = length;
-    }
-
-    public boolean isShrinked() {
-      return _isShrinked;
-    }
-
-    public long getShrinkedLength() {
-      return shrinkedLength;
-    }
-
-    @Override
-    public void readFields(DataInput in) throws IOException {
-      super.readFields(in);
-      _isShrinked = in.readBoolean();
-      if (_isShrinked) {
-        shrinkedLength = in.readLong();
-      }
-    }
-
-    @Override
-    public void write(DataOutput out) throws IOException {
-      super.write(out);
-      out.writeBoolean(_isShrinked);
-      if (_isShrinked) {
-        out.writeLong(shrinkedLength);
-      }
-    }
-  }
-
-  /* This class should be replaced with org.apache.hadoop.mapred.lib.CombineFileRecordReader class, once
-   * https://issues.apache.org/jira/browse/MAPREDUCE-955 is fixed. This code should be removed - it is a copy
-   * of org.apache.hadoop.mapred.lib.CombineFileRecordReader
-   */
-  public static class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
-
-    static final Class[] constructorSignature = new Class[] {
-        InputSplit.class,
-        Configuration.class,
-        Reporter.class,
-        Integer.class
-        };
-
-    protected CombineFileSplit split;
-    protected JobConf jc;
-    protected Reporter reporter;
-    protected Class<RecordReader<K, V>> rrClass;
-    protected Constructor<RecordReader<K, V>> rrConstructor;
-    protected FileSystem fs;
-
-    protected int idx;
-    protected long progress;
-    protected RecordReader<K, V> curReader;
-    protected boolean isShrinked;
-    protected long shrinkedLength;
-
-    public boolean next(K key, V value) throws IOException {
-
-      while ((curReader == null)
-          || !doNextWithExceptionHandler((K) ((CombineHiveKey) key).getKey(),
-              value)) {
-        if (!initNextRecordReader(key)) {
-          return false;
-        }
-      }
-      return true;
-    }
-
-    public K createKey() {
-      K newKey = curReader.createKey();
-      return (K)(new CombineHiveKey(newKey));
-    }
-
-    public V createValue() {
-      return curReader.createValue();
-    }
-
-    /**
-     * Return the amount of data processed.
-     */
-    public long getPos() throws IOException {
-      return progress;
-    }
-
-    public void close() throws IOException {
-      if (curReader != null) {
-        curReader.close();
-        curReader = null;
-      }
-    }
-
-    /**
-     * Return progress based on the amount of data processed so far.
-     */
-    public float getProgress() throws IOException {
-      return Math.min(1.0f, progress / (float) (split.getLength()));
-    }
-
-    /**
-     * A generic RecordReader that can hand out different recordReaders
-     * for each chunk in the CombineFileSplit.
-     */
-    public CombineFileRecordReader(JobConf job, CombineFileSplit split,
-        Reporter reporter,
-        Class<RecordReader<K, V>> rrClass)
-        throws IOException {
-      this.split = split;
-      this.jc = job;
-      this.rrClass = rrClass;
-      this.reporter = reporter;
-      this.idx = 0;
-      this.curReader = null;
-      this.progress = 0;
-
-      isShrinked = false;
-
-      assert (split instanceof Hadoop20Shims.InputSplitShim);
-      if (((InputSplitShim) split).isShrinked()) {
-        isShrinked = true;
-        shrinkedLength = ((InputSplitShim) split).getShrinkedLength();
-      }      
-      
-      try {
-        rrConstructor = rrClass.getDeclaredConstructor(constructorSignature);
-        rrConstructor.setAccessible(true);
-      } catch (Exception e) {
-        throw new RuntimeException(rrClass.getName() +
-            " does not have valid constructor", e);
-      }
-      initNextRecordReader(null);
-    }
-    
-    /**
-     * do next and handle exception inside it. 
-     * @param key
-     * @param value
-     * @return
-     * @throws IOException
-     */
-    private boolean doNextWithExceptionHandler(K key, V value) throws IOException {
-      try {
-        return curReader.next(key, value);
-      } catch (Exception e) {
-        return HiveIOExceptionHandlerUtil
-            .handleRecordReaderNextException(e, jc);
-      }
-    }
-
-    /**
-     * Get the record reader for the next chunk in this CombineFileSplit.
-     */
-    protected boolean initNextRecordReader(K key) throws IOException {
-
-      if (curReader != null) {
-        curReader.close();
-        curReader = null;
-        if (idx > 0) {
-          progress += split.getLength(idx - 1); // done processing so far
-        }
-      }
-
-      // if all chunks have been processed, nothing more to do.
-      if (idx == split.getNumPaths() || (isShrinked && progress > shrinkedLength)) {
-        return false;
-      }
-
-      // get a record reader for the idx-th chunk
-      try {
-        curReader = rrConstructor.newInstance(new Object[]
-            {split, jc, reporter, Integer.valueOf(idx)});
-
-        // change the key if need be
-        if (key != null) {
-          K newKey = curReader.createKey();
-          ((CombineHiveKey)key).setKey(newKey);
-        }
-
-        // setup some helper config variables.
-        jc.set("map.input.file", split.getPath(idx).toString());
-        jc.setLong("map.input.start", split.getOffset(idx));
-        jc.setLong("map.input.length", split.getLength(idx));
-      } catch (Exception e) {
-        curReader = HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(
-            e, jc);
-      }
-      idx++;
-      return true;
-    }
-  }
-
-  public abstract static class CombineFileInputFormatShim<K, V> extends
-      CombineFileInputFormat<K, V>
-      implements HadoopShims.CombineFileInputFormatShim<K, V> {
-
-    public Path[] getInputPathsShim(JobConf conf) {
-      try {
-        return FileInputFormat.getInputPaths(conf);
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-    }
-
-    @Override
-    public void createPool(JobConf conf, PathFilter... filters) {
-      super.createPool(conf, filters);
-    }
-
-    @Override
-    public InputSplitShim[] getSplits(JobConf job, int numSplits) throws IOException {
-      long minSize = job.getLong("mapred.min.split.size", 0);
-
-      // For backward compatibility, let the above parameter be used
-      if (job.getLong("mapred.min.split.size.per.node", 0) == 0) {
-        super.setMinSplitSizeNode(minSize);
-      }
-
-      if (job.getLong("mapred.min.split.size.per.rack", 0) == 0) {
-        super.setMinSplitSizeRack(minSize);
-      }
-
-      if (job.getLong("mapred.max.split.size", 0) == 0) {
-        super.setMaxSplitSize(minSize);
-      }
-
-      CombineFileSplit[] splits = (CombineFileSplit[]) super.getSplits(job, numSplits);
-
-      InputSplitShim[] isplits = new InputSplitShim[splits.length];
-      for (int pos = 0; pos < splits.length; pos++) {
-        isplits[pos] = new InputSplitShim(splits[pos]);
-      }
-
-      return isplits;
-    }
-
-    public InputSplitShim getInputSplitShim() throws IOException {
-      return new InputSplitShim();
-    }
-
-    public RecordReader getRecordReader(JobConf job, HadoopShims.InputSplitShim split,
-        Reporter reporter,
-        Class<RecordReader<K, V>> rrClass)
-        throws IOException {
-      CombineFileSplit cfSplit = (CombineFileSplit) split;
-      return new CombineFileRecordReader(job, cfSplit, reporter, rrClass);
-    }
-
-  }
-
-  public String getInputFormatClassName() {
-    return "org.apache.hadoop.hive.ql.io.CombineHiveInputFormat";
-  }
-
-  String[] ret = new String[2];
-
-  @Override
-  public String[] getTaskJobIDs(TaskCompletionEvent t) {
-    TaskID tid = t.getTaskAttemptId().getTaskID();
-    ret[0] = tid.toString();
-    ret[1] = tid.getJobID().toString();
-    return ret;
-  }
-
-  public void setFloatConf(Configuration conf, String varName, float val) {
-    conf.setFloat(varName, val);
-  }
-
-  @Override
-  public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
-      String archiveName) throws Exception {
-
-    HadoopArchives har = new HadoopArchives(conf);
-    List<String> args = new ArrayList<String>();
-
-    if (conf.get("hive.archive.har.parentdir.settable") == null) {
-      throw new RuntimeException("hive.archive.har.parentdir.settable is not set");
-    }
-    boolean parentSettable =
-      conf.getBoolean("hive.archive.har.parentdir.settable", false);
-
-    if (parentSettable) {
-      args.add("-archiveName");
-      args.add(archiveName);
-      args.add("-p");
-      args.add(sourceDir.toString());
-      args.add(destDir.toString());
-    } else {
-      args.add("-archiveName");
-      args.add(archiveName);
-      args.add(sourceDir.toString());
-      args.add(destDir.toString());
-    }
-
-    return ToolRunner.run(har, args.toArray(new String[0]));
-  }
-
-  public static class NullOutputCommitter extends OutputCommitter {
-    @Override
-    public void setupJob(JobContext jobContext) { }
-    @Override
-    public void cleanupJob(JobContext jobContext) { }
-
-    @Override
-    public void setupTask(TaskAttemptContext taskContext) { }
-    @Override
-    public boolean needsTaskCommit(TaskAttemptContext taskContext) {
-      return false;
-    }
-    @Override
-    public void commitTask(TaskAttemptContext taskContext) { }
-    @Override
-    public void abortTask(TaskAttemptContext taskContext) { }
-  }
-
-  public void setNullOutputFormat(JobConf conf) {
-    conf.setOutputFormat(NullOutputFormat.class);
-    conf.setOutputCommitter(Hadoop20Shims.NullOutputCommitter.class);
-
-    // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463)
-    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
-    conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
-
-    // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206)
-    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
-    conf.setBoolean("mapreduce.job.committer.task.cleanup.needed", false);
-  }
-
-  @Override
-  public UserGroupInformation getUGIForConf(Configuration conf) throws IOException {
-    return UserGroupInformation.getCurrentUser();
-  }
-  
-  @Override
-  public boolean isSecureShimImpl() {
-    return true;
-  }
-  
-  @Override
-  public String getShortUserName(UserGroupInformation ugi) {
-    return ugi.getShortUserName();
-  }
-
-  @Override
-  public String getTokenStrForm(String tokenSignature) throws IOException {
-    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-    TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
-
-    Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
-        tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
-    return token != null ? token.encodeToUrlString() : null;
-  }
-  
-  @Override
-  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws IOException, InterruptedException {
-    ugi.doAs(pvea);
-  }
-
-  @Override
-  public UserGroupInformation createRemoteUser(String userName, List<String> groupNames) {
-    return UserGroupInformation.createRemoteUser(userName);
-  }
+public class Hadoop20SShims extends HadoopShimsSecure {
 
   @Override
   public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
@@ -535,7 +42,7 @@ public class Hadoop20SShims implements H
       throw new Exception(errorMsg);
     }
   }
-  
+
   @Override
   public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
     return new org.apache.hadoop.mapreduce.TaskAttemptContext(conf, new TaskAttemptID()) {

Modified: hive/branches/branch-0.8-r2/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1233262&r1=1233261&r2=1233262&view=diff
==============================================================================
--- hive/branches/branch-0.8-r2/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/branch-0.8-r2/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Thu Jan 19 09:43:38 2012
@@ -17,505 +17,22 @@
  */
 package org.apache.hadoop.hive.shims;
 
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.security.auth.login.LoginException;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain;
-import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
-import org.apache.hadoop.hive.thrift.DelegationTokenSelector23;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.mapred.ClusterStatus;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobContext;
-import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.OutputCommitter;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.RunningJob;
-import org.apache.hadoop.mapred.TaskAttemptContext;
-import org.apache.hadoop.mapred.TaskCompletionEvent;
-import org.apache.hadoop.mapred.TaskID;
-import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
-import org.apache.hadoop.mapred.lib.CombineFileSplit;
-import org.apache.hadoop.mapred.lib.NullOutputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.security.token.TokenSelector;
-import org.apache.hadoop.tools.HadoopArchives;
 import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.ToolRunner;
 
 /**
  * Implemention of shims against Hadoop 0.23.0.
  */
-public class Hadoop23Shims implements HadoopShims {
-  public boolean usesJobShell() {
-    return false;
-  }
-
-  public boolean fileSystemDeleteOnExit(FileSystem fs, Path path)
-      throws IOException {
-
-    return fs.deleteOnExit(path);
-  }
-
-  public void inputFormatValidateInput(InputFormat fmt, JobConf conf)
-      throws IOException {
-    // gone in 0.18+
-  }
-
-  public boolean isJobPreparing(RunningJob job) throws IOException {
-    return job.getJobState() == JobStatus.PREP;
-  }
-  /**
-   * Workaround for hadoop-17 - jobclient only looks at commandlineconfig.
-   */
-  public void setTmpFiles(String prop, String files) {
-    // gone in 20+
-  }
-
-  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
-      int numDataNodes,
-      boolean format,
-      String[] racks) throws IOException {
-    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
-  }
-
-  /**
-   * MiniDFSShim.
-   *
-   */
-  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
-    private final MiniDFSCluster cluster;
-
-    public MiniDFSShim(MiniDFSCluster cluster) {
-      this.cluster = cluster;
-    }
-
-    public FileSystem getFileSystem() throws IOException {
-      return cluster.getFileSystem();
-    }
-
-    public void shutdown() {
-      cluster.shutdown();
-    }
-  }
-
-  /**
-   * We define this function here to make the code compatible between
-   * hadoop 0.17 and hadoop 0.20.
-   *
-   * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
-   * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
-   * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
-   * references that class, which is not available in hadoop 0.17.
-   */
-  public int compareText(Text a, Text b) {
-    return a.compareTo(b);
-  }
-
-  @Override
-  public long getAccessTime(FileStatus file) {
-    return file.getAccessTime();
-  }
-
-  public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
-    return new CombineFileInputFormatShim() {
-      @Override
-      public RecordReader getRecordReader(InputSplit split,
-          JobConf job, Reporter reporter) throws IOException {
-        throw new IOException("CombineFileInputFormat.getRecordReader not needed.");
-      }
-    };
-  }
-
-  public static class InputSplitShim extends CombineFileSplit implements HadoopShims.InputSplitShim {
-    long shrinkedLength;
-    boolean _isShrinked;
-    public InputSplitShim() {
-      super();
-      _isShrinked = false;
-    }
-
-    public InputSplitShim(CombineFileSplit old) throws IOException {
-      super(old);
-      _isShrinked = false;
-    }
-
-    @Override
-    public void shrinkSplit(long length) {
-      _isShrinked = true;
-      shrinkedLength = length;
-    }
-
-    public boolean isShrinked() {
-      return _isShrinked;
-    }
-
-    public long getShrinkedLength() {
-      return shrinkedLength;
-    }
-
-    @Override
-    public void readFields(DataInput in) throws IOException {
-      super.readFields(in);
-      _isShrinked = in.readBoolean();
-      if (_isShrinked) {
-        shrinkedLength = in.readLong();
-      }
-    }
-
-    @Override
-    public void write(DataOutput out) throws IOException {
-      super.write(out);
-      out.writeBoolean(_isShrinked);
-      if (_isShrinked) {
-        out.writeLong(shrinkedLength);
-      }
-    }
-  }
-
-  /* This class should be replaced with org.apache.hadoop.mapred.lib.CombineFileRecordReader class, once
-   * https://issues.apache.org/jira/browse/MAPREDUCE-955 is fixed. This code should be removed - it is a copy
-   * of org.apache.hadoop.mapred.lib.CombineFileRecordReader
-   */
-  public static class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
-
-    static final Class[] constructorSignature = new Class[] {
-        InputSplit.class,
-        Configuration.class,
-        Reporter.class,
-        Integer.class
-        };
-
-    protected CombineFileSplit split;
-    protected JobConf jc;
-    protected Reporter reporter;
-    protected Class<RecordReader<K, V>> rrClass;
-    protected Constructor<RecordReader<K, V>> rrConstructor;
-    protected FileSystem fs;
-
-    protected int idx;
-    protected long progress;
-    protected RecordReader<K, V> curReader;
-    protected boolean isShrinked;
-    protected long shrinkedLength;
-    
-    public boolean next(K key, V value) throws IOException {
-
-      while ((curReader == null)
-          || !doNextWithExceptionHandler((K) ((CombineHiveKey) key).getKey(),
-              value)) {
-        if (!initNextRecordReader(key)) {
-          return false;
-        }
-      }
-      return true;
-    }
-
-    public K createKey() {
-      K newKey = curReader.createKey();
-      return (K)(new CombineHiveKey(newKey));
-    }
-
-    public V createValue() {
-      return curReader.createValue();
-    }
-
-    /**
-     * Return the amount of data processed.
-     */
-    public long getPos() throws IOException {
-      return progress;
-    }
-
-    public void close() throws IOException {
-      if (curReader != null) {
-        curReader.close();
-        curReader = null;
-      }
-    }
-
-    /**
-     * Return progress based on the amount of data processed so far.
-     */
-    public float getProgress() throws IOException {
-      return Math.min(1.0f, progress / (float) (split.getLength()));
-    }
-
-    /**
-     * A generic RecordReader that can hand out different recordReaders
-     * for each chunk in the CombineFileSplit.
-     */
-    public CombineFileRecordReader(JobConf job, CombineFileSplit split,
-        Reporter reporter,
-        Class<RecordReader<K, V>> rrClass)
-        throws IOException {
-      this.split = split;
-      this.jc = job;
-      this.rrClass = rrClass;
-      this.reporter = reporter;
-      this.idx = 0;
-      this.curReader = null;
-      this.progress = 0;
-
-      isShrinked = false;
-
-      assert (split instanceof InputSplitShim);
-      if (((InputSplitShim) split).isShrinked()) {
-        isShrinked = true;
-        shrinkedLength = ((InputSplitShim) split).getShrinkedLength();
-      }
-
-      try {
-        rrConstructor = rrClass.getDeclaredConstructor(constructorSignature);
-        rrConstructor.setAccessible(true);
-      } catch (Exception e) {
-        throw new RuntimeException(rrClass.getName() +
-            " does not have valid constructor", e);
-      }
-      initNextRecordReader(null);
-    }
-    
-    /**
-     * do next and handle exception inside it. 
-     * @param key
-     * @param value
-     * @return
-     * @throws IOException
-     */
-    private boolean doNextWithExceptionHandler(K key, V value) throws IOException {
-      try {
-        return curReader.next(key, value);
-      } catch (Exception e) {
-        return HiveIOExceptionHandlerUtil.handleRecordReaderNextException(e, jc);
-      }
-    }
-
-    /**
-     * Get the record reader for the next chunk in this CombineFileSplit.
-     */
-    protected boolean initNextRecordReader(K key) throws IOException {
-
-      if (curReader != null) {
-        curReader.close();
-        curReader = null;
-        if (idx > 0) {
-          progress += split.getLength(idx - 1); // done processing so far
-        }
-      }
-
-      // if all chunks have been processed or reached the length, nothing more to do.
-      if (idx == split.getNumPaths() || (isShrinked && progress > shrinkedLength)) {
-        return false;
-      }
-
-      // get a record reader for the idx-th chunk
-      try {
-        curReader = rrConstructor.newInstance(new Object[]
-            {split, jc, reporter, Integer.valueOf(idx)});
-
-        // change the key if need be
-        if (key != null) {
-          K newKey = curReader.createKey();
-          ((CombineHiveKey)key).setKey(newKey);
-        }
-
-        // setup some helper config variables.
-        jc.set("map.input.file", split.getPath(idx).toString());
-        jc.setLong("map.input.start", split.getOffset(idx));
-        jc.setLong("map.input.length", split.getLength(idx));
-      } catch (Exception e) {
-        curReader=HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(e, jc);
-      }
-      idx++;
-      return true;
-    }
-  }
-
-  public abstract static class CombineFileInputFormatShim<K, V> extends
-      CombineFileInputFormat<K, V>
-      implements HadoopShims.CombineFileInputFormatShim<K, V> {
-
-    public Path[] getInputPathsShim(JobConf conf) {
-      try {
-        return FileInputFormat.getInputPaths(conf);
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-    }
-
-    @Override
-    public void createPool(JobConf conf, PathFilter... filters) {
-      super.createPool(conf, filters);
-    }
-
-    @Override
-    public InputSplitShim[] getSplits(JobConf job, int numSplits) throws IOException {
-      long minSize = job.getLong("mapred.min.split.size", 0);
-
-      // For backward compatibility, let the above parameter be used
-      if (job.getLong("mapred.min.split.size.per.node", 0) == 0) {
-        super.setMinSplitSizeNode(minSize);
-      }
-
-      if (job.getLong("mapred.min.split.size.per.rack", 0) == 0) {
-        super.setMinSplitSizeRack(minSize);
-      }
-
-      if (job.getLong("mapred.max.split.size", 0) == 0) {
-        super.setMaxSplitSize(minSize);
-      }
-
-      InputSplit[] splits = super.getSplits(job, numSplits);
-
-      InputSplitShim[] isplits = new InputSplitShim[splits.length];
-      for (int pos = 0; pos < splits.length; pos++) {
-        isplits[pos] = new InputSplitShim((CombineFileSplit) splits[pos]);
-      }
-
-      return isplits;
-    }
-
-    public InputSplitShim getInputSplitShim() throws IOException {
-      return new InputSplitShim();
-    }
-
-    public RecordReader getRecordReader(JobConf job, HadoopShims.InputSplitShim split,
-        Reporter reporter,
-        Class<RecordReader<K, V>> rrClass)
-        throws IOException {
-      CombineFileSplit cfSplit = (CombineFileSplit) split;
-      return new CombineFileRecordReader(job, cfSplit, reporter, rrClass);
-    }
-
-  }
-
-  public String getInputFormatClassName() {
-    return "org.apache.hadoop.hive.ql.io.CombineHiveInputFormat";
-  }
-
-  String[] ret = new String[2];
-
-  @Override
-  public String[] getTaskJobIDs(TaskCompletionEvent t) {
-    TaskID tid = t.getTaskAttemptId().getTaskID();
-    ret[0] = tid.toString();
-    ret[1] = tid.getJobID().toString();
-    return ret;
-  }
-
-  public void setFloatConf(Configuration conf, String varName, float val) {
-    conf.setFloat(varName, val);
-  }
+public class Hadoop23Shims extends HadoopShimsSecure {
 
   @Override
-  public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
-      String archiveName) throws Exception {
-
-    HadoopArchives har = new HadoopArchives(conf);
-    List<String> args = new ArrayList<String>();
-
-    if (conf.get("hive.archive.har.parentdir.settable") == null) {
-      throw new RuntimeException("hive.archive.har.parentdir.settable is not set");
-    }
-    boolean parentSettable =
-      conf.getBoolean("hive.archive.har.parentdir.settable", false);
-
-    if (parentSettable) {
-      args.add("-archiveName");
-      args.add(archiveName);
-      args.add("-p");
-      args.add(sourceDir.toString());
-      args.add(destDir.toString());
-    } else {
-      args.add("-archiveName");
-      args.add(archiveName);
-      args.add(sourceDir.toString());
-      args.add(destDir.toString());
-    }
-
-    return ToolRunner.run(har, args.toArray(new String[0]));
-  }
-
-  public static class NullOutputCommitter extends OutputCommitter {
-    @Override
-    public void setupJob(JobContext jobContext) { }
-    @Override
-    public void cleanupJob(JobContext jobContext) { }
-
-    @Override
-    public void setupTask(TaskAttemptContext taskContext) { }
-    @Override
-    public boolean needsTaskCommit(TaskAttemptContext taskContext) {
-      return false;
-    }
-    @Override
-    public void commitTask(TaskAttemptContext taskContext) { }
-    @Override
-    public void abortTask(TaskAttemptContext taskContext) { }
-  }
-
-  public void setNullOutputFormat(JobConf conf) {
-    conf.setOutputFormat(NullOutputFormat.class);
-    conf.setOutputCommitter(Hadoop23Shims.NullOutputCommitter.class);
-
-    // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463)
-    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
-    conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
-
-    // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206)
-    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
-    conf.setBoolean("mapreduce.job.committer.task.cleanup.needed", false);
-  }
-
-  @Override
-  public UserGroupInformation getUGIForConf(Configuration conf) throws IOException {
-    return UserGroupInformation.getCurrentUser();
-  }
-
-  @Override
-  public boolean isSecureShimImpl() {
-    return true;
-  }
-
-  @Override
-  public String getShortUserName(UserGroupInformation ugi) {
-    return ugi.getShortUserName();
-  }
-
-  @Override
-  public String getTokenStrForm(String tokenSignature) throws IOException {
-    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-    TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector23();
-
-    Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
-        tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
-    return token != null ? token.encodeToUrlString() : null;
-  }
-  
-  @Override
   public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
     JobTrackerState state;
     switch (clusterStatus.getJobTrackerStatus()) {
@@ -528,7 +45,7 @@ public class Hadoop23Shims implements Ha
       throw new Exception(errorMsg);
     }
   }
-  
+
   @Override
   public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
     return new TaskAttemptContextImpl(conf, new TaskAttemptID()) {

Modified: hive/branches/branch-0.8-r2/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java?rev=1233262&r1=1233261&r2=1233262&view=diff
==============================================================================
--- hive/branches/branch-0.8-r2/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java (original)
+++ hive/branches/branch-0.8-r2/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java Thu Jan 19 09:43:38 2012
@@ -79,7 +79,7 @@ public abstract class ShimLoader {
   }
 
   public static synchronized HadoopThriftAuthBridge getHadoopThriftAuthBridge() {
-        if ("0.20S".equals(getMajorVersion())) {
+      if (getHadoopShims().isSecureShimImpl()) {
           return createShim("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge20S",
                             HadoopThriftAuthBridge.class);
         } else {
@@ -87,7 +87,6 @@ public abstract class ShimLoader {
         }
       }
 
-  @SuppressWarnings("unchecked")
   private static <T> T loadShims(Map<String, String> classMap, Class<T> xface) {
     String vers = getMajorVersion();
     String className = classMap.get(vers);
@@ -96,7 +95,7 @@ public abstract class ShimLoader {
 
     private static <T> T createShim(String className, Class<T> xface) {
     try {
-      Class clazz = Class.forName(className);
+      Class<?> clazz = Class.forName(className);
       return xface.cast(clazz.newInstance());
     } catch (Exception e) {
       throw new RuntimeException("Could not load shims in class " +



Mime
View raw message