hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1495462 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/io/ test/org/apache/hadoop/hive/ql/io/
Date Fri, 21 Jun 2013 14:56:09 GMT
Author: hashutosh
Date: Fri Jun 21 14:56:08 2013
New Revision: 1495462

URL: http://svn.apache.org/r1495462
Log:
HIVE-4743 : Improve test coverage of package org.apache.hadoop.hive.ql.io (Ivan Veselovsky
via Ashutosh Chauhan)

Added:
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestPerformTestRCFileAndSeqFile.java
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java?rev=1495462&r1=1495461&r2=1495462&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java Fri Jun
21 14:56:08 2013
@@ -118,6 +118,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final boolean readBoolean() throws IOException {
     int temp = in.read();
     if (temp < 0) {
@@ -135,6 +136,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final byte readByte() throws IOException {
     int temp = in.read();
     if (temp < 0) {
@@ -165,6 +167,7 @@ public class NonSyncDataInputBuffer exte
     return offset;
   }
 
+  @Override
   public final char readChar() throws IOException {
     if (readToBuff(2) < 0) {
       throw new EOFException();
@@ -182,6 +185,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final double readDouble() throws IOException {
     return Double.longBitsToDouble(readLong());
   }
@@ -195,6 +199,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final float readFloat() throws IOException {
     return Float.intBitsToFloat(readInt());
   }
@@ -211,6 +216,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final void readFully(byte[] buffer) throws IOException {
     readFully(buffer, 0, buffer.length);
   }
@@ -233,6 +239,7 @@ public class NonSyncDataInputBuffer exte
    *           if reaches the end of the stream before enough bytes have been
    *           read
    */
+  @Override
   public final void readFully(byte[] buffer, int offset, int length)
       throws IOException {
     if (length < 0) {
@@ -267,6 +274,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final int readInt() throws IOException {
     if (readToBuff(4) < 0) {
       throw new EOFException();
@@ -292,6 +300,7 @@ public class NonSyncDataInputBuffer exte
    * @deprecated Use BufferedReader
    */
   @Deprecated
+  @Override
   public final String readLine() throws IOException {
     StringBuilder line = new StringBuilder(80); // Typical line length
     boolean foundTerminator = false;
@@ -335,6 +344,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final long readLong() throws IOException {
     if (readToBuff(8) < 0) {
       throw new EOFException();
@@ -356,6 +366,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final short readShort() throws IOException {
     if (readToBuff(2) < 0) {
       throw new EOFException();
@@ -373,6 +384,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final int readUnsignedByte() throws IOException {
     int temp = in.read();
     if (temp < 0) {
@@ -391,6 +403,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final int readUnsignedShort() throws IOException {
     if (readToBuff(2) < 0) {
       throw new EOFException();
@@ -407,6 +420,7 @@ public class NonSyncDataInputBuffer exte
    *           If a problem occurs reading from this DataInputStream.
    * 
    */
+  @Override
   public final String readUTF() throws IOException {
     return decodeUTF(readUnsignedShort());
   }
@@ -450,6 +464,7 @@ public class NonSyncDataInputBuffer exte
    * @throws IOException
    *           If the stream is already closed or another IOException occurs.
    */
+  @Override
   public final int skipBytes(int count) throws IOException {
     int skipped = 0;
     long skip;

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java?rev=1495462&r1=1495461&r2=1495462&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java Fri
Jun 21 14:56:08 2013
@@ -20,8 +20,6 @@ package org.apache.hadoop.hive.ql.io;
 import java.io.IOException;
 import java.util.Random;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,12 +32,13 @@ import org.apache.hadoop.io.SequenceFile
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
+import static org.junit.Assert.*;
 
 /**
  * PerformTestRCFileAndSeqFile.
  *
  */
-public class PerformTestRCFileAndSeqFile extends TestCase {
+public class PerformTestRCFileAndSeqFile {
 
   private final Configuration conf = new Configuration();
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java?rev=1495462&r1=1495461&r2=1495462&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java Fri
Jun 21 14:56:08 2013
@@ -17,7 +17,13 @@
  */
 package org.apache.hadoop.hive.ql.io;
 
+import static org.junit.Assert.assertArrayEquals;
+
+import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Random;
+
+import org.junit.Test;
 
 import junit.framework.TestCase;
 
@@ -27,17 +33,196 @@ import junit.framework.TestCase;
  */
 public class TestHiveInputOutputBuffer extends TestCase {
 
+  private static final int numCases = 14; 
+  
+  private static final String asciiLine1 = "Foo 12345 moo";
+  private static final String asciiLine2 = "Line two";
+  private static final String asciiString = asciiLine1 + "\n" + asciiLine2 + "\r\n";
+
   public void testReadAndWrite() throws IOException {
     String testString = "test_hive_input_output_number_0";
     byte[] string_bytes = testString.getBytes();
     NonSyncDataInputBuffer inBuffer = new NonSyncDataInputBuffer();
     NonSyncDataOutputBuffer outBuffer = new NonSyncDataOutputBuffer();
-    outBuffer.write(string_bytes);
-    inBuffer.reset(outBuffer.getData(), 0, outBuffer.getLength());
-    byte[] readBytes = new byte[string_bytes.length];
-    inBuffer.read(readBytes);
-    String readString = new String(readBytes);
-    assertEquals("Field testReadAndWrite()", readString, testString);
+    try {
+      outBuffer.write(string_bytes);
+      inBuffer.reset(outBuffer.getData(), 0, outBuffer.getLength());
+      byte[] readBytes = new byte[string_bytes.length];
+      inBuffer.read(readBytes);
+      String readString = new String(readBytes);
+      assertEquals("Field testReadAndWrite()", readString, testString);
+    } finally {
+      inBuffer.close();
+      outBuffer.close();
+    }
   }
 
+  @SuppressWarnings("deprecation")
+  private static void readJunk(NonSyncDataInputBuffer in, Random r, long seed, int iter)

+      throws IOException {
+    r.setSeed(seed);
+    for (int i = 0; i < iter; ++i) {
+      switch (r.nextInt(numCases)) {
+        case 0:
+          assertEquals((byte)(r.nextInt() & 0xFF), in.readByte()); break;
+        case 1:
+          assertEquals((short)(r.nextInt() & 0xFFFF), in.readShort()); break;
+        case 2:
+          assertEquals(r.nextInt(), in.readInt()); break;
+        case 3:
+          assertEquals(r.nextLong(), in.readLong()); break;
+        case 4:
+          assertEquals(Double.doubleToLongBits(r.nextDouble()),
+                       Double.doubleToLongBits(in.readDouble())); break;
+        case 5:
+          assertEquals(Float.floatToIntBits(r.nextFloat()),
+                       Float.floatToIntBits(in.readFloat())); break;
+        case 6:
+          int len = r.nextInt(1024);
+          // 1 (test #readFully(3)):
+          final byte[] vb = new byte[len];
+          r.nextBytes(vb);
+          final byte[] b = new byte[len];
+          in.readFully(b, 0, len);
+          assertArrayEquals(vb, b);
+          // 2 (test #read(3)):
+          r.nextBytes(vb);
+          in.read(b, 0, len);
+          assertArrayEquals(vb, b);
+          // 3 (test #readFully(1)):
+          r.nextBytes(vb);
+          in.readFully(b);
+          assertArrayEquals(vb, b);
+          break;
+        case 7:
+          assertEquals(r.nextBoolean(), in.readBoolean());
+          break;
+        case 8:
+          assertEquals((char)r.nextInt(), in.readChar());
+          break;
+        case 9:
+          int actualUB = in.readUnsignedByte();
+          assertTrue(actualUB >= 0);
+          assertTrue(actualUB <= 255);
+          assertEquals(r.nextInt() & 0xFF, actualUB);
+          break;
+        case 10:
+          int actualUS = in.readUnsignedShort();
+          assertTrue(actualUS >= 0);
+          assertTrue(actualUS <= 0xFFFF);
+          assertEquals(r.nextInt() & 0xFFFF, actualUS);
+          break;
+        case 11:
+          String expectedString1 = composeString(1024, r);
+          assertEquals(expectedString1, in.readUTF());
+          String expectedString2 = composeString(1024, r);
+          assertEquals(expectedString2, NonSyncDataInputBuffer.readUTF(in));
+          break;
+        case 12:
+          assertEquals(asciiLine1, in.readLine());
+          assertEquals(asciiLine2, in.readLine());
+          break;
+        case 13:
+          in.skipBytes(8);
+          r.nextLong(); // ignore
+          assertEquals(r.nextLong(), in.readLong());
+          break;
+      }
+    }
+  }
+  
+  private static void writeJunk(DataOutput out, Random r, long seed, int iter)
+      throws IOException  {
+    r.setSeed(seed);
+    for (int i = 0; i < iter; ++i) {
+      switch (r.nextInt(numCases)) {
+        case 0: out.writeByte(r.nextInt()); break;
+        case 1: out.writeShort((short)(r.nextInt() & 0xFFFF)); break;
+        case 2: out.writeInt(r.nextInt()); break;
+        case 3: out.writeLong(r.nextLong()); break;
+        case 4: out.writeDouble(r.nextDouble()); break;
+        case 5: out.writeFloat(r.nextFloat()); break;
+        case 6:
+          byte[] b = new byte[r.nextInt(1024)];
+          // 1:
+          r.nextBytes(b);
+          out.write(b);
+          // 2:
+          r.nextBytes(b);
+          out.write(b);
+          // 3:
+          r.nextBytes(b);
+          out.write(b);
+          break;
+        case 7:
+          out.writeBoolean(r.nextBoolean());
+          break;
+        case 8:
+          out.writeChar((char)r.nextInt());
+          break;
+        case 9:
+          out.writeByte((byte)r.nextInt());
+          break;
+        case 10:
+          out.writeShort((short)r.nextInt());
+          break;
+        case 11:
+          String string = composeString(1024, r);
+          out.writeUTF(string);
+          String string2 = composeString(1024, r);
+          out.writeUTF(string2);
+          break;
+        case 12:
+          byte[] bb = asciiString.getBytes("UTF-8");
+          out.write(bb);
+          break;
+        case 13:
+          out.writeLong(r.nextLong());
+          out.writeLong(r.nextLong());
+          break;
+      }
+    }
+  }
+
+  private static String composeString(int len, Random r) {
+    char[] cc = new char[len];
+    char ch;
+    for (int i = 0; i<len; i++) {
+      do {
+        ch = (char)r.nextInt();
+      } while (!Character.isDefined(ch) 
+          || Character.isHighSurrogate(ch)
+          || Character.isLowSurrogate(ch));
+      cc[i] = ch;
+    }
+    return new String(cc);
+  }
+  
+  /**
+   * Tests methods of {@link NonSyncDataInputBuffer}.
+   * @throws IOException
+   */
+  @Test
+  public void testBaseBuffers() throws IOException {
+    NonSyncDataOutputBuffer dob = new NonSyncDataOutputBuffer();
+    final Random r = new Random();
+    final long seed = 0x0123456789ABCDEFL; // hardcoded for reproducibility.
+    r.setSeed(seed);
+    System.out.println("SEED: " + seed);
+    
+    writeJunk(dob, r, seed, 1000);
+    NonSyncDataInputBuffer dib = new NonSyncDataInputBuffer();
+    dib.reset(dob.getData(), 0, dob.getLength());
+    assertEquals(0, dib.getPosition());
+    assertEquals(dob.getLength(), dib.getLength());
+    readJunk(dib, r, seed, 1000);
+
+    dob.reset();
+    writeJunk(dob, r, seed, 1000);
+    dib.reset(dob.getData(), dob.getLength());
+    assertEquals(0, dib.getPosition());
+    assertEquals(dob.getLength(), dib.getLength());
+    readJunk(dib, r, seed, 1000);
+  }
+  
 }

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestPerformTestRCFileAndSeqFile.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestPerformTestRCFileAndSeqFile.java?rev=1495462&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestPerformTestRCFileAndSeqFile.java
(added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestPerformTestRCFileAndSeqFile.java
Fri Jun 21 14:56:08 2013
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import org.junit.Test;
+
+public class TestPerformTestRCFileAndSeqFile {
+  
+  /**
+   * Runs {@link PerformTestRCFileAndSeqFile} with empty arguments.
+   * @throws Exception
+   */
+  @Test
+  public void testPerformTestRCFileAndSeqFileNoArgs() throws Exception {
+    PerformTestRCFileAndSeqFile.main(new String[0]);
+  }
+}

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=1495462&r1=1495461&r2=1495462&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Fri Jun 21 14:56:08
2013
@@ -23,11 +23,13 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.RandomAccessFile;
 import java.io.UnsupportedEncodingException;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 import java.util.Random;
 
 import junit.framework.TestCase;
+import static org.junit.Assert.*;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -219,6 +221,94 @@ public class TestRCFile extends TestCase
 
     reader.close();
   }
+  
+  /**
+   * Tests {@link RCFile.Reader#getColumn(int, BytesRefArrayWritable) } method.
+   * @throws IOException
+   */
+  public void testGetColumn() throws IOException {
+    fs.delete(file, true);
+
+    RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
+    RCFile.Writer writer =
+      new RCFile.Writer(fs, conf, file, null,
+                        RCFile.createMetadata(new Text("apple"),
+                                              new Text("block"),
+                                              new Text("cat"),
+                                              new Text("dog")),
+                        new DefaultCodec());
+    
+    byte[][] record_1 = {
+        "123".getBytes("UTF-8"), 
+        "456".getBytes("UTF-8"),
+        "789".getBytes("UTF-8"), 
+        "1000".getBytes("UTF-8"),
+        "5.3".getBytes("UTF-8"), 
+        "hive and hadoop".getBytes("UTF-8"),
+        new byte[0], 
+        "NULL".getBytes("UTF-8") };
+    byte[][] record_2 = {
+        "100".getBytes("UTF-8"), 
+        "200".getBytes("UTF-8"),
+        "123".getBytes("UTF-8"), 
+        "1000".getBytes("UTF-8"),
+        "5.3".getBytes("UTF-8"), 
+        "hive and hadoop".getBytes("UTF-8"),
+        new byte[0], 
+        "NULL".getBytes("UTF-8")};
+    
+    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
+    for (int i = 0; i < record_1.length; i++) {
+      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
+          record_1[i].length);
+      bytes.set(i, cu);
+    }
+    writer.append(bytes);
+    bytes.clear();
+    for (int i = 0; i < record_2.length; i++) {
+      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
+          record_2[i].length);
+      bytes.set(i, cu);
+    }
+    writer.append(bytes);
+    writer.close();
+
+    RCFile.Reader reader = new RCFile.Reader(fs, file, conf);
+    
+    LongWritable rowID = new LongWritable();
+    assertTrue(reader.next(rowID));
+    assertEquals(rowID.get(), 0L);
+    
+    assertTrue(reader.next(rowID));
+    assertEquals(rowID.get(), 1L);
+    
+    BytesRefArrayWritable result = null;
+    BytesRefWritable brw;
+    for (int col=0; col < 8; col++) {
+      BytesRefArrayWritable result2 = reader.getColumn(col, result);
+      if (result == null) {
+        assertNotNull(result2);
+        result = result2;
+      } else {
+        // #getColumn(2) should return the instance passed in: 
+        assertSame(result2, result);
+      }
+      // each column has height of 2: 
+      assertEquals(2, result.size());
+      for (int row=0; row<result.size(); row++) {
+        brw = result.get(row);
+        int start = brw.getStart();
+        int len = brw.getLength();
+        byte[] actualData = Arrays.copyOfRange(brw.getData(), start, start + len);
+        byte[] expectedData = (row == 0) ? record_1[col] : record_2[col];
+        assertArrayEquals("col="+col+" : row="+row,  expectedData, actualData);
+      }
+      
+      result.clear();
+    }
+    
+    reader.close();
+  }
 
   public void testReadCorruptFile() throws IOException, SerDeException {
     fs.delete(file, true);

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java?rev=1495462&r1=1495461&r2=1495462&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java Fri
Jun 21 14:56:08 2013
@@ -20,9 +20,6 @@ package org.apache.hadoop.hive.ql.io;
 import java.io.File;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
-import java.io.Serializable;
-import java.net.URL;
-import java.net.URLClassLoader;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -35,20 +32,12 @@ import org.apache.hadoop.fs.ContentSumma
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStore;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.MapRedTask;
-import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
@@ -184,6 +173,9 @@ public class TestSymlinkTextInputFormat 
       
       CombineHiveInputFormat combineInputFormat = ReflectionUtils.newInstance(
           CombineHiveInputFormat.class, newJob);
+      
+      combineInputFormat.validateInput(newJob);
+      
       InputSplit[] retSplits = combineInputFormat.getSplits(newJob, 1);
       assertEquals(1, retSplits.length);
     } catch (Exception e) {



Mime
View raw message