hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aengin...@apache.org
Subject [31/50] [abbrv] hadoop git commit: HADOOP-12564. Upgrade JUnit3 TestCase to JUnit 4 in org.apache.hadoop.io package. Contributed by Dustin Cote.
Date Sat, 21 Nov 2015 07:56:35 GMT
http://git-wip-us.apache.org/repos/asf/hadoop/blob/989b9e3e/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
index 2f7f2de..235e5e4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 
+import org.junit.After;
 import org.junit.Assert;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -29,8 +29,10 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.file.tfile.TFile.Reader;
 import org.apache.hadoop.io.file.tfile.TFile.Writer;
 import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner;
+import org.junit.Before;
+import org.junit.Test;
 
-public class TestTFileUnsortedByteArrays extends TestCase {
+public class TestTFileUnsortedByteArrays {
   private static String ROOT =
       System.getProperty("test.build.data", "/tmp/tfile-test");
 
@@ -61,7 +63,7 @@ public class TestTFileUnsortedByteArrays extends TestCase {
     this.records2ndBlock = numRecords2ndBlock;
   }
 
-  @Override
+  @Before
   public void setUp() throws IOException {
     conf = new Configuration();
     path = new Path(ROOT, outputFile);
@@ -75,12 +77,13 @@ public class TestTFileUnsortedByteArrays extends TestCase {
     closeOutput();
   }
 
-  @Override
+  @After
   public void tearDown() throws IOException {
     fs.delete(path, true);
   }
 
   // we still can scan records in an unsorted TFile
+  @Test
   public void testFailureScannerWithKeys() throws IOException {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -101,6 +104,7 @@ public class TestTFileUnsortedByteArrays extends TestCase {
   }
 
   // we still can scan records in an unsorted TFile
+  @Test
   public void testScan() throws IOException {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -142,6 +146,7 @@ public class TestTFileUnsortedByteArrays extends TestCase {
   }
 
   // we still can scan records in an unsorted TFile
+  @Test
   public void testScanRange() throws IOException {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -182,6 +187,7 @@ public class TestTFileUnsortedByteArrays extends TestCase {
     }
   }
 
+  @Test
   public void testFailureSeek() throws IOException {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/989b9e3e/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
index a2bb219..9efd271 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
@@ -21,16 +21,18 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 import java.util.Random;
 
+import org.junit.After;
 import org.junit.Assert;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.junit.Before;
+import org.junit.Test;
 
-public class TestVLong extends TestCase {
+public class TestVLong {
   private static String ROOT =
       System.getProperty("test.build.data", "/tmp/tfile-test");
   private Configuration conf;
@@ -38,7 +40,7 @@ public class TestVLong extends TestCase {
   private Path path;
   private String outputFile = "TestVLong";
 
-  @Override
+  @Before
   public void setUp() throws IOException {
     conf = new Configuration();
     path = new Path(ROOT, outputFile);
@@ -48,13 +50,14 @@ public class TestVLong extends TestCase {
     }
   }
 
-  @Override
+  @After
   public void tearDown() throws IOException {
     if (fs.exists(path)) {
       fs.delete(path, false);
     }
   }
 
+  @Test
   public void testVLongByte() throws IOException {
     FSDataOutputStream out = fs.create(path);
     for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) {
@@ -90,7 +93,8 @@ public class TestVLong extends TestCase {
     fs.delete(path, false);
     return ret;
   }
-  
+
+  @Test
   public void testVLongShort() throws IOException {
     long size = writeAndVerify(0);
     Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 2
@@ -98,18 +102,21 @@ public class TestVLong extends TestCase {
         * (1 << Byte.SIZE) - 128 - 32, size);
   }
 
+  @Test
   public void testVLong3Bytes() throws IOException {
     long size = writeAndVerify(Byte.SIZE);
     Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 3
         + ((1 << Byte.SIZE) - 32) * (1 << Byte.SIZE) - 40 - 1, size);
   }
 
+  @Test
   public void testVLong4Bytes() throws IOException {
     long size = writeAndVerify(Byte.SIZE * 2);
     Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 4
         + ((1 << Byte.SIZE) - 16) * (1 << Byte.SIZE) - 32 - 2, size);
   }
 
+  @Test
   public void testVLong5Bytes() throws IOException {
     long size = writeAndVerify(Byte.SIZE * 3);
      Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 6 - 256
@@ -121,18 +128,23 @@ public class TestVLong extends TestCase {
     Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE)
         * (bytes + 1) - 256 - bytes + 1, size);
   }
+
+  @Test
   public void testVLong6Bytes() throws IOException {
     verifySixOrMoreBytes(6);
   }
-  
+
+  @Test
   public void testVLong7Bytes() throws IOException {
     verifySixOrMoreBytes(7);
   }
 
+  @Test
   public void testVLong8Bytes() throws IOException {
     verifySixOrMoreBytes(8);
   }
 
+  @Test
   public void testVLongRandom() throws IOException {
     int count = 1024 * 1024;
     long data[] = new long[count];

http://git-wip-us.apache.org/repos/asf/hadoop/blob/989b9e3e/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
index 1926ec5..b2d2a8d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
@@ -18,16 +18,19 @@
 
 package org.apache.hadoop.io.serializer.avro;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.SerializationTestUtil;
+import org.junit.Test;
 
-public class TestAvroSerialization extends TestCase {
+public class TestAvroSerialization {
 
   private static final Configuration conf = new Configuration();
 
+  @Test
   public void testSpecific() throws Exception {
     AvroRecord before = new AvroRecord();
     before.intField = 5;
@@ -35,6 +38,7 @@ public class TestAvroSerialization extends TestCase {
     assertEquals(before, after);
   }
 
+  @Test
   public void testReflectPkg() throws Exception {
     Record before = new Record();
     before.x = 10;
@@ -44,12 +48,14 @@ public class TestAvroSerialization extends TestCase {
     assertEquals(before, after);
   }
 
+  @Test
   public void testAcceptHandlingPrimitivesAndArrays() throws Exception {
     SerializationFactory factory = new SerializationFactory(conf);
     assertNull(factory.getSerializer(byte[].class));
     assertNull(factory.getSerializer(byte.class));
   }
 
+  @Test
   public void testReflectInnerClass() throws Exception {
     InnerRecord before = new InnerRecord();
     before.x = 10;
@@ -59,6 +65,7 @@ public class TestAvroSerialization extends TestCase {
     assertEquals(before, after);
   }
 
+  @Test
   public void testReflect() throws Exception {
     RefSerializable before = new RefSerializable();
     before.x = 10;


Mime
View raw message