drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j...@apache.org
Subject [1/2] drill git commit: DRILL-2923: Ensure all unit tests pass without assertions enabled Modified a number of tests not to use assert, but to instead use one of junit's assertTrue(), assertFalse(), or some other form. Modified test support code that use
Date Thu, 28 May 2015 00:41:26 GMT
Repository: drill
Updated Branches:
  refs/heads/master 8e0f7039f -> 6f54223e4


http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
index 6326478..df74f7a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
@@ -17,7 +17,8 @@
  ******************************************************************************/
 package org.apache.drill.exec.store.parquet;
 
-import static junit.framework.Assert.assertEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.UnsupportedEncodingException;
 import java.util.Arrays;
@@ -43,18 +44,19 @@ import com.google.common.util.concurrent.SettableFuture;
 public class ParquetResultListener implements UserResultsListener {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetResultListener.class);
 
-  private SettableFuture<Void> future = SettableFuture.create();
+  private final SettableFuture<Void> future = SettableFuture.create();
   int count = 0;
   int totalRecords;
 
-  boolean testValues;
-  BufferAllocator allocator;
+  private boolean testValues;
+  private final BufferAllocator allocator;
 
   int batchCounter = 1;
-  HashMap<String, Integer> valuesChecked = new HashMap<>();
-  ParquetTestProperties props;
+  private final HashMap<String, Integer> valuesChecked = new HashMap<>();
+  private final ParquetTestProperties props;
 
-  ParquetResultListener(BufferAllocator allocator, ParquetTestProperties props, int numberOfTimesRead,
boolean testValues){
+  ParquetResultListener(BufferAllocator allocator, ParquetTestProperties props,
+      int numberOfTimesRead, boolean testValues) {
     this.allocator = allocator;
     this.props = props;
     this.totalRecords = props.recordsPerRowGroup * props.numberRowGroups * numberOfTimesRead;
@@ -72,31 +74,31 @@ public class ParquetResultListener implements UserResultsListener {
     checkLastChunk();
   }
 
-  private <T> void assertField(ValueVector valueVector, int index, TypeProtos.MinorType
expectedMinorType, Object value, String name) {
+  private <T> void assertField(ValueVector valueVector, int index,
+      TypeProtos.MinorType expectedMinorType, Object value, String name) {
     assertField(valueVector, index, expectedMinorType, value, name, 0);
   }
 
   @SuppressWarnings("unchecked")
-  private <T> void assertField(ValueVector valueVector, int index, TypeProtos.MinorType
expectedMinorType, T value, String name, int parentFieldId) {
+  private <T> void assertField(ValueVector valueVector, int index,
+      TypeProtos.MinorType expectedMinorType, T value, String name, int parentFieldId) {
 
     if (expectedMinorType == TypeProtos.MinorType.MAP) {
       return;
     }
 
-    T val;
+    final T val;
     try {
-    val = (T) valueVector.getAccessor().getObject(index);
-    if (val instanceof byte[]) {
-      assert(Arrays.equals((byte[]) value, (byte[]) val));
+      val = (T) valueVector.getAccessor().getObject(index);
+    } catch (Throwable ex) {
+      throw ex;
     }
-    else if (val instanceof String) {
-      assert(val.equals(value));
+
+    if (val instanceof byte[]) {
+      assertTrue(Arrays.equals((byte[]) value, (byte[]) val));
     } else {
       assertEquals(value, val);
     }
-    } catch (Throwable ex) {
-      throw ex;
-    }
   }
 
   @Override
@@ -106,7 +108,7 @@ public class ParquetResultListener implements UserResultsListener {
     FieldInfo currentField;
     count += result.getHeader().getRowCount();
     boolean schemaChanged = false;
-    RecordBatchLoader batchLoader = new RecordBatchLoader(allocator);
+    final RecordBatchLoader batchLoader = new RecordBatchLoader(allocator);
     try {
       schemaChanged = batchLoader.load(result.getHeader().getDef(), result.getData());
       // TODO:  Clean:  DRILL-2933:  That load(...) no longer throws
@@ -122,10 +124,10 @@ public class ParquetResultListener implements UserResultsListener {
     if (schemaChanged) {
     } // do not believe any change is needed for when the schema changes, with the current
mock scan use case
 
-    for (VectorWrapper vw : batchLoader) {
-      ValueVector vv = vw.getValueVector();
+    for (final VectorWrapper vw : batchLoader) {
+      final ValueVector vv = vw.getValueVector();
       currentField = props.fields.get(vv.getField().getPath().getRootSegment().getPath());
-      if ( ! valuesChecked.containsKey(vv.getField().getPath().getRootSegment().getPath())){
+      if (!valuesChecked.containsKey(vv.getField().getPath().getRootSegment().getPath()))
{
         valuesChecked.put(vv.getField().getPath().getRootSegment().getPath(), 0);
         columnValCounter = 0;
       } else {
@@ -133,7 +135,7 @@ public class ParquetResultListener implements UserResultsListener {
       }
       printColumnMajor(vv);
 
-      if (testValues){
+      if (testValues) {
         for (int j = 0; j < vv.getAccessor().getValueCount(); j++) {
           assertField(vv, j, currentField.type,
               currentField.values[columnValCounter % 3], currentField.name + "/");
@@ -163,7 +165,7 @@ public class ParquetResultListener implements UserResultsListener {
     if (testValues) {
       assertEquals( "Unexpected number of output columns from parquet scan.", props.fields.keySet().size(),
valuesChecked.keySet().size() );
     }
-    for (String s : valuesChecked.keySet()) {
+    for (final String s : valuesChecked.keySet()) {
       try {
         if (recordsInBatch == -1 ){
           recordsInBatch = valuesChecked.get(s);
@@ -176,7 +178,7 @@ public class ParquetResultListener implements UserResultsListener {
       }
     }
 
-    assert valuesChecked.keySet().size() > 0;
+    assertTrue(valuesChecked.keySet().size() > 0);
     future.set(null);
   }
 
@@ -198,14 +200,14 @@ public class ParquetResultListener implements UserResultsListener {
         System.out.print(", " + (j % 25 == 0 ? "\n batch:" + batchCounter + " v:" + j + "
- " : ""));
       }
     }
-    if (ParquetRecordReaderTest.VERBOSE_DEBUG){
+    if (ParquetRecordReaderTest.VERBOSE_DEBUG) {
       System.out.println("\n" + vv.getAccessor().getValueCount());
     }
   }
 
   public void printRowMajor(RecordBatchLoader batchLoader) {
     for (int i = 0; i < batchLoader.getRecordCount(); i++) {
-      if (i % 50 == 0){
+      if (i % 50 == 0) {
         System.out.println();
         for (VectorWrapper vw : batchLoader) {
           ValueVector v = vw.getValueVector();
@@ -216,8 +218,8 @@ public class ParquetResultListener implements UserResultsListener {
         System.out.println();
       }
 
-      for (VectorWrapper vw : batchLoader) {
-        ValueVector v = vw.getValueVector();
+      for (final VectorWrapper vw : batchLoader) {
+        final ValueVector v = vw.getValueVector();
         Object o = v.getAccessor().getObject(i);
         if (o instanceof byte[]) {
           try {
@@ -244,10 +246,10 @@ public class ParquetResultListener implements UserResultsListener {
     }
   }
 
-  public void getResults() throws RpcException{
-    try{
+  public void getResults() throws RpcException {
+    try {
       future.get();
-    }catch(Throwable t){
+    } catch(Throwable t) {
       throw RpcException.mapException(t);
     }
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
index d674d47..544b962 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
@@ -17,9 +17,9 @@
  */
 package org.apache.drill.exec.vector.complex.fn;
 
+import static org.junit.Assert.assertEquals;
 
 import java.util.List;
-import java.util.Objects;
 
 import org.apache.drill.BaseTestQuery;
 import org.apache.drill.exec.record.RecordBatchLoader;
@@ -30,24 +30,20 @@ import org.apache.drill.exec.vector.ValueVector;
 import org.junit.Test;
 
 public class TestJsonReaderWithSparseFiles extends BaseTestQuery {
-
-  static interface Function<T> {
+  private static interface Function<T> {
     void apply(T param);
   }
 
-  static class TypeConverter {
-
+  private static class TypeConverter {
     public Object convert(Object obj) {
       if (obj instanceof JsonStringArrayList || obj instanceof JsonStringHashMap) {
         return obj.toString();
       }
       return obj;
     }
-
   }
 
-  static class Verifier implements Function<RecordBatchLoader> {
-
+  private static class Verifier implements Function<RecordBatchLoader> {
     private final int count;
     private final Object[][] values;
     private final TypeConverter converter = new TypeConverter();
@@ -59,38 +55,36 @@ public class TestJsonReaderWithSparseFiles extends BaseTestQuery {
 
     @Override
     public void apply(RecordBatchLoader loader) {
-      assert loader.getRecordCount() == count : "invalid record count returned";
-
-      Object[] row;
-      Object expected;
-      Object actual;
-      for (int r=0;r<values.length;r++) {
-        row = values[r];
-        for (int c=0; c<values[r].length; c++) {
-          expected = row[c];
-          actual = loader.getValueAccessorById(ValueVector.class, c).getValueVector().getAccessor().getObject(r);
-          actual = converter.convert(actual);
-          assert Objects.equals(actual, expected) : String.format("row:%d - col:%d - expected:%s[%s]
- actual:%s[%s]",
-              r, c,
-              expected,
-              expected==null?"null":expected.getClass().getSimpleName(),
-              actual,
-              actual==null?"null":actual.getClass().getSimpleName());
+      assertEquals("invalid record count returned", count, loader.getRecordCount());
+
+      for (int r = 0; r < values.length; r++) {
+        final Object[] row = values[r];
+        for (int c = 0; c<values[r].length; c++) {
+          final Object expected = row[c];
+          final Object unconverted = loader.getValueAccessorById(ValueVector.class, c)
+              .getValueVector().getAccessor().getObject(r);
+          final Object actual = converter.convert(unconverted);
+          assertEquals(String.format("row:%d - col:%d - expected:%s[%s] - actual:%s[%s]",
+                r, c, expected,
+                expected == null ? "null" : expected.getClass().getSimpleName(),
+                actual,
+                actual == null ? "null" : actual.getClass().getSimpleName()),
+              actual, expected);
         }
       }
     }
   }
 
   protected void query(final String query, final Function<RecordBatchLoader> testBody)
throws Exception {
-    List<QueryDataBatch> batches = testSqlWithResults(query);
-    RecordBatchLoader loader = new RecordBatchLoader(client.getAllocator());
+    final List<QueryDataBatch> batches = testSqlWithResults(query);
+    final RecordBatchLoader loader = new RecordBatchLoader(client.getAllocator());
     try {
       // first batch at index 0 is empty and used for fast schema return. Load the second
one for the tests
-      QueryDataBatch batch = batches.get(0);
+      final QueryDataBatch batch = batches.get(0);
       loader.load(batch.getHeader().getDef(), batch.getData());
       testBody.apply(loader);
     } finally {
-      for (QueryDataBatch batch:batches) {
+      for (final QueryDataBatch batch:batches) {
         batch.release();
       }
       loader.clear();
@@ -132,5 +126,4 @@ public class TestJsonReaderWithSparseFiles extends BaseTestQuery {
     };
     query(sql, new Verifier(1, values));
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
index 6e2a2b5..5640c8e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
@@ -39,22 +39,22 @@ import com.fasterxml.jackson.databind.ObjectWriter;
 import com.google.common.base.Charsets;
 
 public class TestRepeated {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestRepeated.class);
+  // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestRepeated.class);
 
   private static BufferAllocator allocator;
 
   @BeforeClass
-  public static void setupAllocator(){
+  public static void setupAllocator() {
     allocator = new TopLevelAllocator();
   }
 
   @AfterClass
-  public static void destroyAllocator(){
+  public static void destroyAllocator() {
     allocator.close();
   }
 //
 //  @Test
-//  public void repeatedMap(){
+//  public void repeatedMap() {
 //
 //    /**
 //     * We're going to try to create an object that looks like:
@@ -105,14 +105,14 @@ public class TestRepeated {
 //    map.end();
 //
 //
-//    assert writer.ok();
+//    assertTrue(writer.ok());
 //
 //    System.out.println(v.getAccessor().getObject(0));
 //
 //  }
 
   @Test
-  public void listOfList() throws IOException{
+  public void listOfList() throws IOException {
     /**
      * We're going to try to create an object that looks like:
      *
@@ -130,21 +130,21 @@ public class TestRepeated {
      *
      */
 
-    MapVector v = new MapVector("", allocator, null);
-    ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
+    final MapVector v = new MapVector("", allocator, null);
+    final ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
     writer.allocate();
 
     {
-      MapWriter map = writer.rootAsMap();
-      ListWriter list = map.list("a");
+      final MapWriter map = writer.rootAsMap();
+      final ListWriter list = map.list("a");
       list.start();
 
-      ListWriter innerList = list.list();
-      IntWriter innerInt = innerList.integer();
+      final ListWriter innerList = list.list();
+      final IntWriter innerInt = innerList.integer();
 
       innerList.start();
 
-      IntHolder holder = new IntHolder();
+      final IntHolder holder = new IntHolder();
 
       holder.value = 1;
       innerInt.write(holder);
@@ -164,11 +164,11 @@ public class TestRepeated {
       innerList.end();
       list.end();
 
-      IntWriter numCol = map.integer("nums");
+      final IntWriter numCol = map.integer("nums");
       holder.value = 14;
       numCol.write(holder);
 
-      MapWriter repeatedMap = map.list("b").map();
+      final MapWriter repeatedMap = map.list("b").map();
       repeatedMap.start();
       holder.value = 1;
       repeatedMap.integer("c").write(holder);
@@ -177,7 +177,7 @@ public class TestRepeated {
       repeatedMap.start();
       holder.value = 2;
       repeatedMap.integer("c").write(holder);
-      BigIntHolder h = new BigIntHolder();
+      final BigIntHolder h = new BigIntHolder();
       h.value = 15;
       repeatedMap.bigInt("x").write(h);
       repeatedMap.end();
@@ -188,16 +188,16 @@ public class TestRepeated {
     {
       writer.setPosition(1);
 
-      MapWriter map = writer.rootAsMap();
-      ListWriter list = map.list("a");
+      final MapWriter map = writer.rootAsMap();
+      final ListWriter list = map.list("a");
       list.start();
 
-      ListWriter innerList = list.list();
-      IntWriter innerInt = innerList.integer();
+      final ListWriter innerList = list.list();
+      final IntWriter innerInt = innerList.integer();
 
       innerList.start();
 
-      IntHolder holder = new IntHolder();
+      final IntHolder holder = new IntHolder();
 
       holder.value = -1;
       innerInt.write(holder);
@@ -217,11 +217,11 @@ public class TestRepeated {
       innerList.end();
       list.end();
 
-      IntWriter numCol = map.integer("nums");
+      final IntWriter numCol = map.integer("nums");
       holder.value = -28;
       numCol.write(holder);
 
-      MapWriter repeatedMap = map.list("b").map();
+      final MapWriter repeatedMap = map.list("b").map();
       repeatedMap.start();
       holder.value = -1;
       repeatedMap.integer("c").write(holder);
@@ -230,7 +230,7 @@ public class TestRepeated {
       repeatedMap.start();
       holder.value = -2;
       repeatedMap.integer("c").write(holder);
-      BigIntHolder h = new BigIntHolder();
+      final BigIntHolder h = new BigIntHolder();
       h.value = -30;
       repeatedMap.bigInt("x").write(h);
       repeatedMap.end();
@@ -238,16 +238,14 @@ public class TestRepeated {
       map.end();
     }
 
-
-    ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
+    final ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
 
     System.out.println("Map of Object[0]: " + ow.writeValueAsString(v.getAccessor().getObject(0)));
     System.out.println("Map of Object[1]: " + ow.writeValueAsString(v.getAccessor().getObject(1)));
 
-
-    ByteArrayOutputStream stream = new ByteArrayOutputStream();
-    JsonWriter jsonWriter = new JsonWriter(stream, true, true);
-    FieldReader reader = v.getChild("col", MapVector.class).getReader();
+    final ByteArrayOutputStream stream = new ByteArrayOutputStream();
+    final JsonWriter jsonWriter = new JsonWriter(stream, true, true);
+    final FieldReader reader = v.getChild("col", MapVector.class).getReader();
     reader.setPosition(0);
     jsonWriter.write(reader);
     reader.setPosition(1);
@@ -256,7 +254,5 @@ public class TestRepeated {
     System.out.println(new String(stream.toByteArray(), Charsets.UTF_8));
 
     writer.clear();
-
-
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
index a6c2da8..a4fccf6 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
@@ -18,7 +18,9 @@
 package org.apache.drill.jdbc;
 
 import static org.junit.Assert.fail;
+import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
 import static org.hamcrest.CoreMatchers.*;
 
 import org.apache.drill.jdbc.Driver;
@@ -153,15 +155,11 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
                                      final String columnName ) throws SQLException
   {
     System.out.println( "(Setting up row for " + tableOrViewName + "." + columnName + ".)");
-    assert null != dbMetadata
-        : "dbMetadata is null; must be set before calling setUpRow(...)";
+    assertNotNull("dbMetadata is null; must be set before calling setUpRow(...)", dbMetadata);
     final ResultSet testRow =
         dbMetadata.getColumns( "DRILL", schemaName, tableOrViewName, columnName );
-    if ( ! testRow.next() ) {
-      assert false
-          : "Test setup error:  No row for column DRILL . `" + schemaName + "` . `"
-            + tableOrViewName + "` . `" + columnName + "`";
-    }
+    assertTrue("Test setup error:  No row for column DRILL . `" + schemaName + "` . `"
+            + tableOrViewName + "` . `" + columnName + "`", testRow.next());
     return testRow;
   }
 
@@ -174,7 +172,7 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
     // class uses some objects across methods.)
     connection = new Driver().connect( "jdbc:drill:zk=local", JdbcAssert.getDefaultProperties());
     dbMetadata = connection.getMetaData();
-    Statement stmt = connection.createStatement();
+    final Statement stmt = connection.createStatement();
 
     ResultSet util;
 
@@ -200,9 +198,8 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
     } else if ( 17 == hiveTestColumnRowCount ) {
       // Hive data seems to exist already--skip recreating it.
     } else {
-      assert false
-          : "Expected 17 Hive test columns see " + hiveTestColumnRowCount + "."
-            + "  Test code is out of date or Hive data is corrupted.";
+      fail("Expected 17 Hive test columns see " + hiveTestColumnRowCount + "."
+            + "  Test code is out of date or Hive data is corrupted.");
     }
     TODO(end) */
 
@@ -210,9 +207,8 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
 
     // Create temporary test-columns view:
     util = stmt.executeQuery( "USE dfs_test.tmp" );
-    assert util.next();
-    assert util.getBoolean( 1 )
-        : "Error setting schema for test: " + util.getString( 2 );
+    assertTrue( util.next() );
+    assertTrue( "Error setting schema for test: " + util.getString( 2 ), util.getBoolean(
1 ) );
     util = stmt.executeQuery(
         ""
         +   "CREATE OR REPLACE VIEW " + VIEW_NAME + " AS SELECT  "
@@ -245,10 +241,9 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
         + "\n  '' "
         + "\nFROM INFORMATION_SCHEMA.COLUMNS "
         + "\nLIMIT 1 " );
-    assert util.next();
-    assert util.getBoolean( 1 )
-        : "Error creating temporary test-columns view " + VIEW_NAME + ": "
-          + util.getString( 2 );
+    assertTrue( util.next() );
+    assertTrue("Error creating temporary test-columns view " + VIEW_NAME + ": "
+          + util.getString( 2 ), util.getBoolean( 1 ) );
 
     // Set up result rows for temporary test view and Hivetest columns:
 
@@ -298,14 +293,12 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
 
   @AfterClass
   public static void tearDownConnection() throws SQLException {
-
-    ResultSet util =
+    final ResultSet util =
         connection.createStatement().executeQuery( "DROP VIEW " + VIEW_NAME + "" );
-    assert util.next();
-    // DRILL-2439:  assert util.getBoolean( 1 ) : ...;
-    assert util.getBoolean( 1 )
-       : "Error dropping temporary test-columns view " + VIEW_NAME + ": "
-         + util.getString( 2 );
+    assertTrue( util.next() );
+    // DRILL-2439:  assertTrue( ..., util.getBoolean( 1 ) );
+    assertTrue("Error dropping temporary test-columns view " + VIEW_NAME + ": "
+         + util.getString( 2 ), util.getBoolean( 1 ) );
     connection.close();
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/jdbc/src/test/java/org/apache/drill/jdbc/ResultSetGetMethodConversionsTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/ResultSetGetMethodConversionsTest.java
b/exec/jdbc/src/test/java/org/apache/drill/jdbc/ResultSetGetMethodConversionsTest.java
index 4ad80d1..1c528f9 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/ResultSetGetMethodConversionsTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/ResultSetGetMethodConversionsTest.java
@@ -17,8 +17,8 @@
  */
 package org.apache.drill.jdbc;
 
-import static org.junit.Assert.fail;
 import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
 import static org.hamcrest.CoreMatchers.*;
 
 import org.junit.AfterClass;
@@ -28,7 +28,6 @@ import org.junit.Test;
 
 import java.math.BigDecimal;
 import java.sql.Connection;
-import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -73,7 +72,7 @@ public class ResultSetGetMethodConversionsTest extends JdbcTestBase {
     connection = new Driver().connect( "jdbc:drill:zk=local", null );
 
     // Set up result row with values of various types.
-    Statement stmt = connection.createStatement();
+    final Statement stmt = connection.createStatement();
     testDataRow = stmt.executeQuery(
         ""
         +   "SELECT  "
@@ -98,7 +97,7 @@ public class ResultSetGetMethodConversionsTest extends JdbcTestBase {
         + "\nFROM INFORMATION_SCHEMA.CATALOGS "
         + "\nLIMIT 1 " );
     // Note: Assertions must be enabled (as they have been so far in tests).
-    assert testDataRow.next();
+    assertTrue( testDataRow.next() );
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/jdbc/src/test/java/org/apache/drill/jdbc/proxy/TracingProxyDriverTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/proxy/TracingProxyDriverTest.java
b/exec/jdbc/src/test/java/org/apache/drill/jdbc/proxy/TracingProxyDriverTest.java
index 389cbac..6e8a17c 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/proxy/TracingProxyDriverTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/proxy/TracingProxyDriverTest.java
@@ -20,10 +20,7 @@ package org.apache.drill.jdbc.proxy;
 import org.apache.drill.test.DrillTest;
 
 import java.io.ByteArrayOutputStream;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
 import java.io.PrintStream;
-import java.io.StringWriter;
 import java.nio.charset.StandardCharsets;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -35,11 +32,10 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.Properties;
 
-import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -64,10 +60,6 @@ public class TracingProxyDriverTest extends DrillTest {
         DriverManager.getConnection( "jdbc:proxy::jdbc:drill:zk=local" );
   }
 
-  @AfterClass
-  public static void tearDownTestCase() {
-  }
-
   @Test
   public void testBasicProxying() throws SQLException {
     try ( final Statement stmt = proxyConnection.createStatement() ) {
@@ -89,24 +81,23 @@ public class TracingProxyDriverTest extends DrillTest {
     }
 
     void redirect() {
-      assert ! redirected;
+      assertFalse( redirected );
       redirected = true;
       System.setErr( capturingStream );
     }
 
     void unredirect() {
-      assert redirected;
+      assertTrue( redirected );
       redirected = false;
       System.setErr( savedStdErr );
     }
 
     String getOutput() {
-      assert ! redirected;
+      assertFalse( redirected );
       return new String( buffer.toByteArray(), StandardCharsets.UTF_8 );
     }
   }
 
-
   @Test
   public void testBasicReturnTrace() throws SQLException {
     final StdErrCapturer nameThis = new StdErrCapturer();
@@ -223,7 +214,6 @@ public class TracingProxyDriverTest extends DrillTest {
       // expected
     }
 
-
     final ResultSet catalogsResultSet = dbMetaData.getCatalogs();
     assertThat( catalogsResultSet, notNullValue() );
     assertThat( catalogsResultSet, instanceOf( ResultSet.class ) );
@@ -247,11 +237,9 @@ public class TracingProxyDriverTest extends DrillTest {
       catch ( SQLException e ) {
         // expected;
       }
-
     }
 
     assertThat( proxyConnection.getMetaData(), sameInstance( dbMetaData ) );
     assertThat( catalogsResultSet.getMetaData(), sameInstance( rsMetaData ) );
   }
-
 } // class ProxyDriverTest

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest.java
b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest.java
index 4203c4a..f1b1e4a 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest.java
@@ -18,6 +18,7 @@
 package org.apache.drill.jdbc.test;
 
 import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.hamcrest.CoreMatchers.*;
 
@@ -49,7 +50,7 @@ public class Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest extends JdbcTestB
   private static DatabaseMetaData dbMetadata;
 
   @Rule
-  public TestRule TIMEOUT = TestTools.getTimeoutRule( 120_000 /* ms */ );
+  public final TestRule TIMEOUT = TestTools.getTimeoutRule( 120_000 /* ms */ );
 
   @BeforeClass
   public static void setUpConnection() throws Exception {
@@ -78,8 +79,7 @@ public class Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest extends JdbcTestB
   public void testColumn_DATA_TYPE_isInteger() throws Exception {
     // Get metadata for some column(s).
     final ResultSet columns = dbMetadata.getColumns( null, null, null, null );
-    final boolean hasRow = columns.next();
-    assert hasRow : "DatabaseMetaData.getColumns(...) returned no rows";
+    assertTrue( "DatabaseMetaData.getColumns(...) returned no rows", columns.next() );
 
     do {
       // DATA_TYPE should be INTEGER, so getInt( "DATA_TYPE" ) should succeed:
@@ -152,8 +152,7 @@ public class Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest extends
JdbcTestB
     final ResultSet columns =
         dbMetadata.getColumns( null, "INFORMATION_SCHEMA", "COLUMNS",
                                "ORDINAL_POSITION" );
-    final boolean hasRow = columns.next();
-    assert hasRow : "DatabaseMetaData.getColumns(...) returned no rows";
+    assertTrue( "DatabaseMetaData.getColumns(...) returned no rows", columns.next() );
 
     // TYPE_NAME should be character string for type name "INTEGER", so
     // getString( "TYPE_NAME" ) should succeed and getInt( "TYPE_NAME" ) should
@@ -179,5 +178,4 @@ public class Drill2128GetColumnsDataTypeNotTypeCodeIntBugsTest extends
JdbcTestB
     assertThat( "getString( 6 ) (expected to be same as getString( \"TYPE_NAME\" ))",
                   typeName2, equalTo( typeName1 ) );
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2461IntervalsBreakInfoSchemaBugTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2461IntervalsBreakInfoSchemaBugTest.java
b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2461IntervalsBreakInfoSchemaBugTest.java
index f0a9eb0..4e27ba7 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2461IntervalsBreakInfoSchemaBugTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2461IntervalsBreakInfoSchemaBugTest.java
@@ -17,24 +17,16 @@
  */
 package org.apache.drill.jdbc.test;
 
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertThat;
-import static org.hamcrest.CoreMatchers.*;
+import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.common.util.TestTools;
-import org.apache.drill.jdbc.Driver;
 import org.apache.drill.jdbc.JdbcTestBase;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.TestRule;
 
 import java.sql.Connection;
-import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.Statement;
-import java.sql.SQLException;
 
 
 public class Drill2461IntervalsBreakInfoSchemaBugTest extends JdbcTestBase {
@@ -63,29 +55,25 @@ public class Drill2461IntervalsBreakInfoSchemaBugTest extends JdbcTestBase
{
 
     // Create a view using an INTERVAL type:
     util = stmt.executeQuery( "USE dfs_test.tmp" );
-    assert util.next();
-    assert util.getBoolean( 1 )
-        : "Error setting schema to dfs_test.tmp: " + util.getString( 2 );
+    assertTrue( util.next() );
+    assertTrue( "Error setting schema to dfs_test.tmp: " + util.getString( 2 ), util.getBoolean(
1 ) );
     util = stmt.executeQuery(
         "CREATE OR REPLACE VIEW " + VIEW_NAME + " AS "
       + "\n  SELECT CAST( NULL AS INTERVAL HOUR(4) TO MINUTE ) AS optINTERVAL_HM "
       + "\n  FROM INFORMATION_SCHEMA.CATALOGS "
       + "\n  LIMIT 1 " );
-    assert util.next();
-    assert util.getBoolean( 1 )
-        : "Error creating temporary test-columns view " + VIEW_NAME + ": "
-          + util.getString( 2 );
+    assertTrue( util.next() );
+    assertTrue( "Error creating temporary test-columns view " + VIEW_NAME + ": "
+          + util.getString( 2 ), util.getBoolean( 1 ) );
 
     // Test whether query INFORMATION_SCHEMA.COLUMNS works (doesn't crash):
     util = stmt.executeQuery( "SELECT * FROM INFORMATION_SCHEMA.COLUMNS" );
-    assert util.next();
+    assertTrue( util.next() );
 
     // Clean up the test view:
     util = connection.createStatement().executeQuery( "DROP VIEW " + VIEW_NAME );
-    assert util.next();
-    assert util.getBoolean( 1 )
-       : "Error dropping temporary test-columns view " + VIEW_NAME + ": "
-         + util.getString( 2 );
+    assertTrue( util.next() );
+    assertTrue( "Error dropping temporary test-columns view " + VIEW_NAME + ": "
+         + util.getString( 2 ), util.getBoolean( 1 ) );
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2463GetNullsFailedWithAssertionsBugTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2463GetNullsFailedWithAssertionsBugTest.java
b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2463GetNullsFailedWithAssertionsBugTest.java
index c355142..0f8b15d 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2463GetNullsFailedWithAssertionsBugTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2463GetNullsFailedWithAssertionsBugTest.java
@@ -17,13 +17,12 @@
  */
 package org.apache.drill.jdbc.test;
 
-import static org.junit.Assert.fail;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertThat;
 import static org.hamcrest.CoreMatchers.*;
 
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.apache.drill.jdbc.Driver;
 import org.apache.drill.jdbc.JdbcTestBase;
@@ -46,12 +45,6 @@ public class Drill2463GetNullsFailedWithAssertionsBugTest extends JdbcTestBase
{
     // class uses some objects across methods.)
     connection = new Driver().connect( "jdbc:drill:zk=local", JdbcAssert.getDefaultProperties()
);
     statement = connection.createStatement();
-
-    boolean assertionsEnabled = false;
-    assert assertionsEnabled = true;
-    if ( ! assertionsEnabled ) {
-      throw new RuntimeException( "Assertions need to be enabled but are not." );
-    }
   }
 
   @AfterClass
@@ -63,69 +56,66 @@ public class Drill2463GetNullsFailedWithAssertionsBugTest extends JdbcTestBase
{
 
   @Test
   public void testGetPrimitiveTypeNullAsOwnType() throws Exception {
-    ResultSet rs = statement.executeQuery(
+    final ResultSet rs = statement.executeQuery(
         "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" );
-    assert rs.next();
+    assertTrue( rs.next() );
     assertThat( "getInt(...) for NULL", rs.getInt( 1 ), equalTo( 0 ) );
     assertThat( "wasNull", rs.wasNull(), equalTo( true ) );
   }
 
   @Test
   public void testGetPrimitiveTypeNullAsObject() throws Exception {
-    ResultSet rs = statement.executeQuery(
+    final ResultSet rs = statement.executeQuery(
         "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" );
-    assert rs.next();
+    assertTrue( rs.next() );
     assertThat( "getObject(...) for NULL", rs.getObject( 1 ), nullValue() );
     assertThat( "wasNull", rs.wasNull(), equalTo( true ) );
   }
 
   @Test
   public void testGetNonprimitiveTypeNullAsOwnType() throws Exception {
-    ResultSet rs = statement.executeQuery(
+    final ResultSet rs = statement.executeQuery(
         "SELECT CAST( NULL AS VARCHAR ) FROM INFORMATION_SCHEMA.CATALOGS" );
-    assert rs.next();
+    assertTrue( rs.next() );
     assertThat( "getString(...) for NULL", rs.getString( 1 ), nullValue() );
     assertThat( "wasNull", rs.wasNull(), equalTo( true ) );
   }
 
-
   // Test a few specifics
 
   @Test
   public void testGetBooleanNullAsOwnType() throws Exception {
-    ResultSet rs = statement.executeQuery(
+    final ResultSet rs = statement.executeQuery(
         "SELECT CAST( NULL AS BOOLEAN ) FROM INFORMATION_SCHEMA.CATALOGS" );
-    assert rs.next();
+    assertTrue( rs.next() );
     assertThat( "getBoolean(...) for NULL", rs.getBoolean( 1 ), equalTo( false ) );
     assertThat( "wasNull", rs.wasNull(), equalTo( true ) );
   }
 
   @Test
   public void testGetBooleanNullAsObject() throws Exception {
-    ResultSet rs = statement.executeQuery(
+    final ResultSet rs = statement.executeQuery(
         "SELECT CAST( NULL AS BOOLEAN ) FROM INFORMATION_SCHEMA.CATALOGS" );
-    assert rs.next();
+    assertTrue( rs.next() );
     assertThat( "getObject(...) for NULL", rs.getObject( 1 ), nullValue() );
     assertThat( "wasNull", rs.wasNull(), equalTo( true ) );
   }
 
   @Test
   public void testGetIntegerNullAsOwnType() throws Exception {
-    ResultSet rs = statement.executeQuery(
+    final ResultSet rs = statement.executeQuery(
         "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" );
-    assert rs.next();
+    assertTrue( rs.next() );
     assertThat( "getInt(...) for NULL", rs.getInt( 1 ), equalTo( 0 ) );
     assertThat( "wasNull", rs.wasNull(), equalTo( true ) );
   }
 
   @Test
   public void testGetIntegerNullAsObject() throws Exception {
-    ResultSet rs = statement.executeQuery(
+    final ResultSet rs = statement.executeQuery(
         "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" );
-    assert rs.next();
+    assertTrue( rs.next() );
     assertThat( "getObject(...) for NULL", rs.getObject( 1 ), nullValue() );
     assertThat( "wasNull", rs.wasNull(), equalTo( true ) );
   }
-
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
index 4081696..926530d 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
@@ -50,7 +50,6 @@ public class TestJdbcQuery extends JdbcTestQueryBase {
     testQuery("select * from cp.`employee.json`");
   }
 
-
   @Test
   public void testCast() throws Exception{
     testQuery(String.format("select R_REGIONKEY, cast(R_NAME as varchar(15)) as region, cast(R_COMMENT
as varchar(255)) as comment from dfs_test.`%s/../../sample-data/region.parquet`", WORKING_PATH));
@@ -199,26 +198,27 @@ public class TestJdbcQuery extends JdbcTestQueryBase {
     JdbcAssert.withNoDefaultSchema().withConnection(new Function<Connection, Void>()
{
       public Void apply(Connection connection) {
         try {
-          Statement statement = connection.createStatement();
+          final Statement statement = connection.createStatement();
 
           // show tables on view
-          ResultSet resultSet = statement.executeQuery("select date '2008-2-23', time '12:23:34',
timestamp '2008-2-23 12:23:34.456', " +
-                                                       "interval '1' year, interval '2' day,
" +
-                                                       "date_add(date '2008-2-23', interval
'1 10:20:30' day to second), " +
-                                                       "date_add(date '2010-2-23', 1) " +
-                                                       "from cp.`employee.json` limit 1");
+          final ResultSet resultSet = statement.executeQuery(
+              "select date '2008-2-23', time '12:23:34', timestamp '2008-2-23 12:23:34.456',
" +
+              "interval '1' year, interval '2' day, " +
+              "date_add(date '2008-2-23', interval '1 10:20:30' day to second), " +
+              "date_add(date '2010-2-23', 1) " +
+              "from cp.`employee.json` limit 1");
 
           resultSet.next();
-          java.sql.Date date = resultSet.getDate(1);
-          java.sql.Time time = resultSet.getTime(2);
-          java.sql.Timestamp ts = resultSet.getTimestamp(3);
-          String intervalYear = resultSet.getString(4);
-          String intervalDay  = resultSet.getString(5);
-          java.sql.Timestamp ts1 = resultSet.getTimestamp(6);
-          java.sql.Date date1 = resultSet.getDate(7);
-
-          java.sql.Timestamp result = java.sql.Timestamp.valueOf("2008-2-24 10:20:30");
-          java.sql.Date result1 = java.sql.Date.valueOf("2010-2-24");
+          final java.sql.Date date = resultSet.getDate(1);
+          final java.sql.Time time = resultSet.getTime(2);
+          final java.sql.Timestamp ts = resultSet.getTimestamp(3);
+          final String intervalYear = resultSet.getString(4);
+          final String intervalDay  = resultSet.getString(5);
+          final java.sql.Timestamp ts1 = resultSet.getTimestamp(6);
+          final java.sql.Date date1 = resultSet.getDate(7);
+
+          final java.sql.Timestamp result = java.sql.Timestamp.valueOf("2008-2-24 10:20:30");
+          final java.sql.Date result1 = java.sql.Date.valueOf("2010-2-24");
           assertEquals(ts1, result);
           assertEquals(date1, result1);
 
@@ -243,13 +243,14 @@ public class TestJdbcQuery extends JdbcTestQueryBase {
     JdbcAssert.withNoDefaultSchema().withConnection(new Function<Connection, Void>()
{
       public Void apply(Connection connection) {
         try {
-          Statement statement = connection.createStatement();
+          final Statement statement = connection.createStatement();
 
           // show files
-          ResultSet resultSet = statement.executeQuery("select timestamp '2008-2-23 12:23:23',
date '2001-01-01' from cp.`employee.json` limit 1");
+          final ResultSet resultSet = statement.executeQuery(
+              "select timestamp '2008-2-23 12:23:23', date '2001-01-01' from cp.`employee.json`
limit 1");
 
-          assert (resultSet.getMetaData().getColumnType(1) == Types.TIMESTAMP);
-          assert (resultSet.getMetaData().getColumnType(2) == Types.DATE);
+          assertEquals( Types.TIMESTAMP, resultSet.getMetaData().getColumnType(1) );
+          assertEquals( Types.DATE, resultSet.getMetaData().getColumnType(2) );
 
           System.out.println(JdbcAssert.toString(resultSet));
           resultSet.close();


Mime
View raw message