kudu-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From granthe...@apache.org
Subject kudu git commit: [Java] Retry tests that don’t inherit from BaseKuduTest
Date Wed, 25 Jul 2018 00:22:38 GMT
Repository: kudu
Updated Branches:
  refs/heads/master d6ee0047c -> 524485ebc


[Java] Retry tests that don’t inherit from BaseKuduTest

The Scala tests and a couple special case tests
don’t inherit from BaseKuduTest but may still
be flaky.

To fix this I moved the retry configuration logic into
the RetryRule and added the rule to the a few tests directly:
   - TestSecurity
   - TestKuduMetastorePlugin

Additionally I updated the Scala TestContext,
now called KuduTestSuite, to
use the JUnitSuite and include the RetryRule so
that all of the Scala tests would be retried. This
required a few syntax changes but has the added
benefit of not needing
`@RunWith(classOf[JUnitRunner])` at the top of
each test class. It also follows similar syntax to the
rest of our tests.

Change-Id: I596fde91a13f7e644fd1b1814e9a672f96fa0e4b
Reviewed-on: http://gerrit.cloudera.org:8080/11037
Reviewed-by: Adar Dembo <adar@cloudera.com>
Tested-by: Kudu Jenkins


Project: http://git-wip-us.apache.org/repos/asf/kudu/repo
Commit: http://git-wip-us.apache.org/repos/asf/kudu/commit/524485eb
Tree: http://git-wip-us.apache.org/repos/asf/kudu/tree/524485eb
Diff: http://git-wip-us.apache.org/repos/asf/kudu/diff/524485eb

Branch: refs/heads/master
Commit: 524485ebc797bf3e7e53d5b931cc0193e140ed41
Parents: d6ee004
Author: Grant Henke <granthenke@apache.org>
Authored: Tue Jul 24 15:29:54 2018 -0500
Committer: Grant Henke <granthenke@apache.org>
Committed: Wed Jul 25 00:22:21 2018 +0000

----------------------------------------------------------------------
 .../org/apache/kudu/backup/TestKuduBackup.scala |  16 +-
 .../org/apache/kudu/client/BaseKuduTest.java    |  10 +-
 .../org/apache/kudu/client/TestSecurity.java    |   7 +
 .../java/org/apache/kudu/junit/RetryRule.java   |  12 +-
 java/kudu-hive/build.gradle                     |   1 +
 java/kudu-hive/pom.xml                          |   7 +
 .../hive/metastore/TestKuduMetastorePlugin.java |   5 +
 .../kudu/spark/tools/ITBigLinkedListTest.scala  |  12 +-
 .../spark/tools/TestImportExportFiles.scala     |  10 +-
 .../kudu/spark/kudu/DefaultSourceTest.scala     | 169 ++++++++++-------
 .../kudu/spark/kudu/KuduContextTest.scala       |  17 +-
 .../apache/kudu/spark/kudu/KuduRDDTest.scala    |  10 +-
 .../apache/kudu/spark/kudu/KuduTestSuite.scala  | 181 +++++++++++++++++++
 .../apache/kudu/spark/kudu/TestContext.scala    | 172 ------------------
 14 files changed, 350 insertions(+), 279 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-backup/src/test/scala/org/apache/kudu/backup/TestKuduBackup.scala
----------------------------------------------------------------------
diff --git a/java/kudu-backup/src/test/scala/org/apache/kudu/backup/TestKuduBackup.scala b/java/kudu-backup/src/test/scala/org/apache/kudu/backup/TestKuduBackup.scala
index 1c166bd..b077b31 100644
--- a/java/kudu-backup/src/test/scala/org/apache/kudu/backup/TestKuduBackup.scala
+++ b/java/kudu-backup/src/test/scala/org/apache/kudu/backup/TestKuduBackup.scala
@@ -29,19 +29,20 @@ import org.apache.kudu.{ColumnSchema, Schema, Type}
 import org.apache.kudu.spark.kudu._
 import org.apache.kudu.util.DecimalUtil
 import org.junit.Assert._
+import org.junit.Test
 import org.junit.runner.RunWith
 import org.scalatest.junit.JUnitRunner
-import org.scalatest.{FunSuite, Matchers}
 import org.slf4j.{Logger, LoggerFactory}
 
 import scala.collection.JavaConverters._
 import scala.util.Random
 
 @RunWith(classOf[JUnitRunner])
-class TestKuduBackup extends FunSuite with TestContext with Matchers {
+class TestKuduBackup extends KuduTestSuite {
   val log: Logger = LoggerFactory.getLogger(getClass)
 
-  test("Simple Backup and Restore") {
+  @Test
+  def testSimpleBackupAndRestore() {
     insertRows(table, 100) // Insert data into the default test table.
 
     backupAndRestore(tableName)
@@ -56,7 +57,8 @@ class TestKuduBackup extends FunSuite with TestContext with Matchers {
     assertTrue(partitionSchemasMatch(tA.getPartitionSchema, tB.getPartitionSchema))
   }
 
-  test("Simple Backup and Restore Table Name With Special Characters") {
+  @Test
+  def testSimpleBackupAndRestoreWithSpecialCharacters() {
     // Use an Impala-style table name to verify url encoding/decoding of the table name works.
     val impalaTableName = "impala::default.test"
 
@@ -73,7 +75,8 @@ class TestKuduBackup extends FunSuite with TestContext with Matchers {
     assert(rdd.isEmpty())
   }
 
-  test("Random Backup and Restore") {
+  @Test
+  def testRandomBackupAndRestore() {
     Random.javaRandomToRandom(TestUtils.getRandom)
 
     val table = createRandomTable()
@@ -93,7 +96,8 @@ class TestKuduBackup extends FunSuite with TestContext with Matchers {
     assertTrue(partitionSchemasMatch(tA.getPartitionSchema, tB.getPartitionSchema))
   }
 
-  test("Backup and Restore Multiple Tables") {
+  @Test
+  def testBackupAndRestoreMultipleTables() {
     val numRows = 1
     val table1Name = "table1"
     val table2Name = "table2"

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-client/src/test/java/org/apache/kudu/client/BaseKuduTest.java
----------------------------------------------------------------------
diff --git a/java/kudu-client/src/test/java/org/apache/kudu/client/BaseKuduTest.java b/java/kudu-client/src/test/java/org/apache/kudu/client/BaseKuduTest.java
index 8d91e01..eeabece 100644
--- a/java/kudu-client/src/test/java/org/apache/kudu/client/BaseKuduTest.java
+++ b/java/kudu-client/src/test/java/org/apache/kudu/client/BaseKuduTest.java
@@ -66,15 +66,9 @@ public class BaseKuduTest {
   protected static final Schema allTypesSchema = getSchemaWithAllTypes();
 
   // Add a rule to rerun tests. We use this with Gradle because it doesn't support
-  // Surefire/Failsafe rerunFailingTestsCount like Maven does. We use the system
-  // property rerunFailingTestsCount to mimic the maven arguments closely.
-  private static final String RETRY_PROPERTY_NAME = "rerunFailingTestsCount";
-  private static final int DEFAULT_RETRY_COUNT = 0;
-  private static final String retryPropVal = System.getProperty(RETRY_PROPERTY_NAME);
-  private static final int retryCount =
-      retryPropVal != null ? Integer.parseInt(retryPropVal) : DEFAULT_RETRY_COUNT;
+  // Surefire/Failsafe rerunFailingTestsCount like Maven does.
   @Rule
-  public RetryRule retryRule = new RetryRule(retryCount);
+  public RetryRule retryRule = new RetryRule();
 
   @Before
   public void setUpBase() throws Exception {

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-client/src/test/java/org/apache/kudu/client/TestSecurity.java
----------------------------------------------------------------------
diff --git a/java/kudu-client/src/test/java/org/apache/kudu/client/TestSecurity.java b/java/kudu-client/src/test/java/org/apache/kudu/client/TestSecurity.java
index f6345c1..40e4d7d 100644
--- a/java/kudu-client/src/test/java/org/apache/kudu/client/TestSecurity.java
+++ b/java/kudu-client/src/test/java/org/apache/kudu/client/TestSecurity.java
@@ -30,6 +30,7 @@ import javax.security.auth.Subject;
 
 import org.apache.kudu.client.Client.AuthenticationCredentialsPB;
 import org.apache.kudu.client.MiniKuduCluster.MiniKuduClusterBuilder;
+import org.apache.kudu.junit.RetryRule;
 import org.apache.kudu.master.Master.ConnectToMasterResponsePB;
 import org.apache.kudu.util.AssertHelpers;
 import org.apache.kudu.util.AssertHelpers.BooleanExpression;
@@ -40,6 +41,7 @@ import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 
 import com.google.common.base.Preconditions;
@@ -87,6 +89,11 @@ public class TestSecurity {
     client.listTabletServers();
   }
 
+  // Add a rule to rerun tests. We use this with Gradle because it doesn't support
+  // Surefire/Failsafe rerunFailingTestsCount like Maven does.
+  @Rule
+  public RetryRule retryRule = new RetryRule();
+
   @Before
   public void setUp() {
     FakeDNS.getInstance().install();

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-client/src/test/java/org/apache/kudu/junit/RetryRule.java
----------------------------------------------------------------------
diff --git a/java/kudu-client/src/test/java/org/apache/kudu/junit/RetryRule.java b/java/kudu-client/src/test/java/org/apache/kudu/junit/RetryRule.java
index 24defed..89bd84f 100644
--- a/java/kudu-client/src/test/java/org/apache/kudu/junit/RetryRule.java
+++ b/java/kudu-client/src/test/java/org/apache/kudu/junit/RetryRule.java
@@ -22,17 +22,19 @@ import org.junit.runners.model.Statement;
 
 /**
  * A JUnit rule to retry failed tests.
+ * We use this with Gradle because it doesn't support
+ * Surefire/Failsafe rerunFailingTestsCount like Maven does. We use the system
+ * property rerunFailingTestsCount to mimic the maven arguments closely.
  */
 public class RetryRule implements TestRule {
-  private int retryCount;
 
-  public RetryRule (int retryCount) {
-    this.retryCount = retryCount;
-  }
+  private static final int RETRY_COUNT = Integer.getInteger("rerunFailingTestsCount", 0);
+
+  public RetryRule () {}
 
   @Override
   public Statement apply(Statement base, Description description) {
-    return new RetryStatement(base, description, retryCount);
+    return new RetryStatement(base, description, RETRY_COUNT);
   }
 
   private static class RetryStatement extends Statement {

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-hive/build.gradle
----------------------------------------------------------------------
diff --git a/java/kudu-hive/build.gradle b/java/kudu-hive/build.gradle
index 239d5d5..56d0fbc 100644
--- a/java/kudu-hive/build.gradle
+++ b/java/kudu-hive/build.gradle
@@ -26,6 +26,7 @@ dependencies {
   provided libs.hadoopCommon
   provided libs.hadoopMRClientCommon
 
+  testCompile project(path: ":kudu-client", configuration: "shadowTest")
   testCompile libs.hiveMetastoreTest
   testCompile libs.junit
   testCompile libs.log4j

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-hive/pom.xml
----------------------------------------------------------------------
diff --git a/java/kudu-hive/pom.xml b/java/kudu-hive/pom.xml
index 0e6244f..a82f294 100644
--- a/java/kudu-hive/pom.xml
+++ b/java/kudu-hive/pom.xml
@@ -60,6 +60,13 @@
         </dependency>
 
         <dependency>
+            <groupId>org.apache.kudu</groupId>
+            <artifactId>kudu-client</artifactId>
+            <version>${project.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
             <groupId>org.apache.hive</groupId>
             <artifactId>hive-metastore</artifactId>
             <version>${hive.version}</version>

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-hive/src/test/java/org/apache/kudu/hive/metastore/TestKuduMetastorePlugin.java
----------------------------------------------------------------------
diff --git a/java/kudu-hive/src/test/java/org/apache/kudu/hive/metastore/TestKuduMetastorePlugin.java b/java/kudu-hive/src/test/java/org/apache/kudu/hive/metastore/TestKuduMetastorePlugin.java
index 47e05f5..0634aa2 100644
--- a/java/kudu-hive/src/test/java/org/apache/kudu/hive/metastore/TestKuduMetastorePlugin.java
+++ b/java/kudu-hive/src/test/java/org/apache/kudu/hive/metastore/TestKuduMetastorePlugin.java
@@ -39,9 +39,11 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.kudu.junit.RetryRule;
 import org.apache.thrift.TException;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -56,6 +58,9 @@ public class TestKuduMetastorePlugin {
     return new EnvironmentContext(ImmutableMap.of(KuduMetastorePlugin.KUDU_MASTER_EVENT, "true"));
   }
 
+  @Rule
+  public RetryRule retryRule = new RetryRule();
+
   @Before
   public void setUp() throws Exception {
     HiveConf metastoreConf = new HiveConf();

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/ITBigLinkedListTest.scala
----------------------------------------------------------------------
diff --git a/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/ITBigLinkedListTest.scala b/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/ITBigLinkedListTest.scala
index c194f0e..8caf2c2 100644
--- a/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/ITBigLinkedListTest.scala
+++ b/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/ITBigLinkedListTest.scala
@@ -19,18 +19,16 @@ package org.apache.kudu.spark.tools
 
 import org.apache.kudu.client.SessionConfiguration.FlushMode
 import org.apache.kudu.mapreduce.tools.BigLinkedListCommon._
-import org.apache.kudu.spark.kudu.TestContext
+import org.apache.kudu.spark.kudu.KuduTestSuite
 import org.junit.Assert._
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{FunSuite, Matchers}
+import org.junit.Test
 
 import scala.collection.JavaConverters._
 
-@RunWith(classOf[JUnitRunner])
-class ITBigLinkedListTest extends FunSuite with TestContext with Matchers {
+class ITBigLinkedListTest extends KuduTestSuite {
 
-  test("Spark ITBLL") {
+  @Test
+  def testSparkITBLL() {
     Generator.testMain(Array("--tasks=2",
                              "--lists=2",
                              "--nodes=10000",

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/TestImportExportFiles.scala
----------------------------------------------------------------------
diff --git a/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/TestImportExportFiles.scala b/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/TestImportExportFiles.scala
index ceca8d1..1bde85d 100644
--- a/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/TestImportExportFiles.scala
+++ b/java/kudu-spark-tools/src/test/scala/org/apache/kudu/spark/tools/TestImportExportFiles.scala
@@ -24,20 +24,18 @@ import org.apache.kudu.{Schema, Type}
 import org.apache.kudu.client.CreateTableOptions
 import org.apache.kudu.spark.kudu._
 import org.junit.Assert._
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{FunSuite, Matchers}
+import org.junit.Test
 import org.spark_project.guava.collect.ImmutableList
 
 import scala.collection.JavaConverters._
 
-@RunWith(classOf[JUnitRunner])
-class TestImportExportFiles  extends FunSuite with TestContext with  Matchers {
+class TestImportExportFiles extends KuduTestSuite {
 
   private val TABLE_NAME: String = "TestImportExportFiles"
   private val TABLE_DATA_PATH: String = "/TestImportExportFiles.csv"
 
-  test("Spark Import Export") {
+  @Test
+  def testSparkImportExport() {
     val schema: Schema = {
       val columns = ImmutableList.of(
         new ColumnSchemaBuilder("key", Type.STRING).key(true).build(),

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/DefaultSourceTest.scala
----------------------------------------------------------------------
diff --git a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/DefaultSourceTest.scala b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/DefaultSourceTest.scala
index f01a211..fc8576e 100644
--- a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/DefaultSourceTest.scala
+++ b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/DefaultSourceTest.scala
@@ -23,25 +23,22 @@ import org.apache.spark.sql.{DataFrame, SQLContext}
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.types.{DataTypes, StructField, StructType}
 import org.junit.Assert._
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfterEach, FunSuite, Matchers}
+import org.scalatest.Matchers
 import org.apache.kudu.ColumnSchema.ColumnSchemaBuilder
 import org.apache.kudu.client.CreateTableOptions
 import org.apache.kudu.{Schema, Type}
 import org.apache.spark.sql.execution.datasources.LogicalRelation
+import org.junit.{Before, Test}
 
-@RunWith(classOf[JUnitRunner])
-class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEach with Matchers {
+class DefaultSourceTest extends KuduTestSuite with Matchers {
 
   val rowCount = 10
   var sqlContext : SQLContext = _
   var rows : IndexedSeq[(Int, Int, String, Long)] = _
   var kuduOptions : Map[String, String] = _
 
-  override def beforeEach(): Unit = {
-    super.beforeEach()
-
+  @Before
+  def setUp(): Unit = {
     rows = insertRows(table, rowCount)
 
     sqlContext = ss.sqlContext
@@ -53,7 +50,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     sqlContext.read.options(kuduOptions).kudu.createOrReplaceTempView(tableName)
   }
 
-  test("table creation") {
+  @Test
+  def testTableCreation() {
     val tableName = "testcreatetable"
     if (kuduContext.tableExists(tableName)) {
       kuduContext.deleteTable(tableName)
@@ -78,7 +76,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assertFalse(kuduContext.tableExists(tableName))
   }
 
-  test("table creation with partitioning") {
+  @Test
+  def testTableCreationWithPartitioning() {
     val tableName = "testcreatepartitionedtable"
     if (kuduContext.tableExists(tableName)) {
       kuduContext.deleteTable(tableName)
@@ -113,7 +112,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assertFalse(kuduContext.tableExists(tableName))
   }
 
-  test("insertion") {
+  @Test
+  def testInsertion() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val changedDF = df.limit(1).withColumn("key", df("key").plus(100)).withColumn("c2_s", lit("abc"))
     kuduContext.insertRows(changedDF, tableName)
@@ -125,7 +125,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     deleteRow(100)
   }
 
-  test("insertion multiple") {
+  @Test
+  def testInsertionMultiple() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val changedDF = df.limit(2).withColumn("key", df("key").plus(100)).withColumn("c2_s", lit("abc"))
     kuduContext.insertRows(changedDF, tableName)
@@ -141,7 +142,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     deleteRow(101)
   }
 
-  test("insert ignore rows") {
+  @Test
+  def testInsertionIgnoreRows() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val baseDF = df.limit(1) // filter down to just the first row
 
@@ -166,7 +168,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     deleteRow(100)
   }
 
-  test("insert ignore rows using DefaultSource") {
+  @Test
+  def testInsertIgnoreRowsUsingDefaultSource() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val baseDF = df.limit(1) // filter down to just the first row
 
@@ -194,7 +197,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     deleteRow(100)
   }
 
-  test("insert ignore rows using DefaultSource with 'kudu.operation' = 'insert-ignore'") {
+  @Test
+  def testInsertIgnoreRowsWriteOption() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val baseDF = df.limit(1) // filter down to just the first row
 
@@ -221,7 +225,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     deleteRow(100)
   }
 
-  test("insert ignore rows with insertIgnoreRows(deprecated)") {
+  @Test
+  def testInsertIgnoreRowsMethod() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val baseDF = df.limit(1) // filter down to just the first row
 
@@ -244,7 +249,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     deleteRow(100)
   }
 
-  test("upsert rows") {
+  @Test
+  def testUpsertRows() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val baseDF = df.limit(1) // filter down to just the first row
 
@@ -268,7 +274,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     deleteRow(100)
   }
 
-  test("upsert rows ignore nulls") {
+  @Test
+  def testUpsertRowsIgnoreNulls() {
     val nonNullDF = sqlContext.createDataFrame(Seq((0, "foo"))).toDF("key", "val")
     kuduContext.insertRows(nonNullDF, simpleTableName)
 
@@ -294,7 +301,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     kuduContext.deleteRows(deleteDF, simpleTableName)
   }
 
-  test("upsert rows ignore nulls using DefaultSource") {
+  @Test
+  def testUpsertRowsIgnoreNullsUsingDefaultSource() {
     val nonNullDF = sqlContext.createDataFrame(Seq((0, "foo"))).toDF("key", "val")
     kuduContext.insertRows(nonNullDF, simpleTableName)
 
@@ -320,7 +328,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     kuduContext.deleteRows(deleteDF, simpleTableName)
   }
 
-  test("delete rows") {
+  @Test
+  def testDeleteRows() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val deleteDF = df.filter("key = 0").select("key")
     kuduContext.deleteRows(deleteDF, tableName)
@@ -335,13 +344,15 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     kuduContext.insertRows(insertDF, tableName)
   }
 
-  test("out of order selection") {
+  @Test
+  def testOutOfOrderSelection() {
     val df = sqlContext.read.options(kuduOptions).kudu.select( "c2_s", "c1_i", "key")
     val collected = df.collect()
     assert(collected(0).getString(0).equals("0"))
   }
 
-  test("table non fault tolerant scan") {
+  @Test
+  def testTableNonFaultTolerantScan() {
     val results = sqlContext.sql(s"SELECT * FROM $tableName").collectAsList()
     assert(results.size() == rowCount)
 
@@ -349,7 +360,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(results.get(1).isNullAt(2))
   }
 
-  test("table fault tolerant scan") {
+  @Test
+  def testTableFaultTolerantScan() {
     kuduOptions = Map(
       "kudu.table" -> tableName,
       "kudu.master" -> miniCluster.getMasterAddresses,
@@ -364,48 +376,63 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(results.get(1).isNullAt(2))
   }
 
-  test("table scan with projection") {
+  @Test
+  def testTableScanWithProjection() {
     assertEquals(10, sqlContext.sql(s"""SELECT key FROM $tableName""").count())
   }
 
-  test("table scan with projection and predicate double") {
+  @Test
+  def testTableScanWithProjectionAndPredicateDouble() {
     assertEquals(rows.count { case (key, i, s, ts) => i > 5 },
                  sqlContext.sql(s"""SELECT key, c3_double FROM $tableName where c3_double > "5.0"""").count())
   }
 
-  test("table scan with projection and predicate long") {
+  @Test
+  def testTableScanWithProjectionAndPredicateLong() {
     assertEquals(rows.count { case (key, i, s, ts) => i > 5 },
                  sqlContext.sql(s"""SELECT key, c4_long FROM $tableName where c4_long > "5"""").count())
-
   }
-  test("table scan with projection and predicate bool") {
+
+  @Test
+  def testTableScanWithProjectionAndPredicateBool() {
     assertEquals(rows.count { case (key, i, s, ts) => i % 2==0 },
                  sqlContext.sql(s"""SELECT key, c5_bool FROM $tableName where c5_bool = true""").count())
-
   }
-  test("table scan with projection and predicate short") {
+
+  @Test
+  def testTableScanWithProjectionAndPredicateShort() {
     assertEquals(rows.count { case (key, i, s, ts) => i > 5},
                  sqlContext.sql(s"""SELECT key, c6_short FROM $tableName where c6_short > 5""").count())
 
   }
-  test("table scan with projection and predicate float") {
+
+  @Test
+  def testTableScanWithProjectionAndPredicateFloat() {
     assertEquals(rows.count { case (key, i, s, ts) => i > 5},
                  sqlContext.sql(s"""SELECT key, c7_float FROM $tableName where c7_float > 5""").count())
 
   }
-  test("table scan with projection and predicate decimal32") {
+
+  @Test
+  def testTableScanWithProjectionAndPredicateDecimal32() {
     assertEquals(rows.count { case (key, i, s, ts) => i > 5},
       sqlContext.sql(s"""SELECT key, c11_decimal32 FROM $tableName where c11_decimal32 > 5""").count())
   }
-  test("table scan with projection and predicate decimal64") {
+
+  @Test
+  def testTableScanWithProjectionAndPredicateDecimal64() {
     assertEquals(rows.count { case (key, i, s, ts) => i > 5},
       sqlContext.sql(s"""SELECT key, c12_decimal64 FROM $tableName where c12_decimal64 > 5""").count())
   }
-  test("table scan with projection and predicate decimal128") {
+
+  @Test
+  def testTableScanWithProjectionAndPredicateDecimal128() {
     assertEquals(rows.count { case (key, i, s, ts) => i > 5},
       sqlContext.sql(s"""SELECT key, c13_decimal128 FROM $tableName where c13_decimal128 > 5""").count())
   }
-  test("table scan with projection and predicate ") {
+
+  @Test
+  def testTableScanWithProjectionAndPredicate() {
     assertEquals(rows.count { case (key, i, s, ts) => s != null && s > "5" },
       sqlContext.sql(s"""SELECT key FROM $tableName where c2_s > "5"""").count())
 
@@ -413,8 +440,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
       sqlContext.sql(s"""SELECT key, c2_s FROM $tableName where c2_s IS NOT NULL""").count())
   }
 
-
-  test("Test basic SparkSQL") {
+  @Test
+  def testBasicSparkSQL() {
     val results = sqlContext.sql("SELECT * FROM " + tableName).collectAsList()
     assert(results.size() == rowCount)
 
@@ -422,14 +449,16 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(!results.get(0).isNullAt(2))
   }
 
-  test("Test basic SparkSQL projection") {
+  @Test
+  def testBasicSparkSQLWithProjection() {
     val results = sqlContext.sql("SELECT key FROM " + tableName).collectAsList()
     assert(results.size() == rowCount)
     assert(results.get(0).size.equals(1))
     assert(results.get(0).getInt(0).equals(0))
   }
 
-  test("Test basic SparkSQL with predicate") {
+  @Test
+  def testBasicSparkSQLWithPredicate() {
     val results = sqlContext.sql("SELECT key FROM " + tableName + " where key=1").collectAsList()
     assert(results.size() == 1)
     assert(results.get(0).size.equals(1))
@@ -437,14 +466,16 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
 
   }
 
-  test("Test basic SparkSQL with two predicates") {
+  @Test
+  def testBasicSparkSQLWithTwoPredicates() {
     val results = sqlContext.sql("SELECT key FROM " + tableName + " where key=2 and c2_s='2'").collectAsList()
     assert(results.size() == 1)
     assert(results.get(0).size.equals(1))
     assert(results.get(0).getInt(0).equals(2))
   }
 
-  test("Test basic SparkSQL with in list predicate") {
+  @Test
+  def testBasicSparkSQLWithInListPredicate() {
     val keys = Array(1, 5, 7)
     val results = sqlContext.sql(s"SELECT key FROM $tableName where key in (${keys.mkString(", ")})").collectAsList()
     assert(results.size() == keys.length)
@@ -454,7 +485,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     }
   }
 
-  test("Test basic SparkSQL with in list predicate on string") {
+  @Test
+  def testBasicSparkSQLWithInListPredicateOnString() {
     val keys = Array(1, 4, 6)
     val results = sqlContext.sql(s"SELECT key FROM $tableName where c2_s in (${keys.mkString("'", "', '", "'")})").collectAsList()
     assert(results.size() == keys.count(_ % 2 == 0))
@@ -464,7 +496,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     }
   }
 
-  test("Test basic SparkSQL with in list and comparison predicate") {
+  @Test
+  def testBasicSparkSQLWithInListAndComparisonPredicate() {
     val keys = Array(1, 5, 7)
     val results = sqlContext.sql(s"SELECT key FROM $tableName where key>2 and key in (${keys.mkString(", ")})").collectAsList()
     assert(results.size() == keys.count(_>2))
@@ -474,19 +507,22 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     }
   }
 
-  test("Test basic SparkSQL with two predicates negative") {
+  @Test
+  def testBasicSparkSQLWithTwoPredicatesNegative() {
     val results = sqlContext.sql("SELECT key FROM " + tableName + " where key=1 and c2_s='2'").collectAsList()
     assert(results.size() == 0)
   }
 
-  test("Test basic SparkSQL with two predicates including string") {
+  @Test
+  def testBasicSparkSQLWithTwoPredicatesIncludingString() {
     val results = sqlContext.sql("SELECT key FROM " + tableName + " where c2_s='2'").collectAsList()
     assert(results.size() == 1)
     assert(results.get(0).size.equals(1))
     assert(results.get(0).getInt(0).equals(2))
   }
 
-  test("Test basic SparkSQL with two predicates and projection") {
+  @Test
+  def testBasicSparkSQLWithTwoPredicatesAndProjection() {
     val results = sqlContext.sql("SELECT key, c2_s FROM " + tableName + " where c2_s='2'").collectAsList()
     assert(results.size() == 1)
     assert(results.get(0).size.equals(2))
@@ -494,7 +530,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(results.get(0).getString(1).equals("2"))
   }
 
-  test("Test basic SparkSQL with two predicates greater than") {
+  @Test
+  def testBasicSparkSQLWithTwoPredicatesGreaterThan() {
     val results = sqlContext.sql("SELECT key, c2_s FROM " + tableName + " where c2_s>='2'").collectAsList()
     assert(results.size() == 4)
     assert(results.get(0).size.equals(2))
@@ -502,7 +539,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(results.get(0).getString(1).equals("2"))
   }
 
-  test("Test SparkSQL StringStartsWith filters") {
+  @Test
+  def testSparkSQLStringStartsWithFilters() {
     // This test requires a special table.
     val testTableName = "startswith"
     val schema = new Schema(List(
@@ -542,7 +580,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     }
   }
 
-  test("Test SparkSQL IS NULL predicate") {
+  @Test
+  def testSparkSQLIsNullPredicate() {
     var results = sqlContext.sql("SELECT key FROM " + tableName + " where c2_s IS NULL").collectAsList()
     assert(results.size() == 5)
 
@@ -550,7 +589,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(results.isEmpty())
   }
 
-  test("Test SparkSQL IS NOT NULL predicate") {
+  @Test
+  def testSparkSQLIsNotNullPredicate() {
     var results = sqlContext.sql("SELECT key FROM " + tableName + " where c2_s IS NOT NULL").collectAsList()
     assert(results.size() == 5)
 
@@ -558,7 +598,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(results.size() == 10)
   }
 
-  test("Test SQL: insert into") {
+  @Test
+  def testSQLInsertInto() {
     val insertTable = "insertintotest"
 
     // read 0 rows just to get the schema
@@ -577,7 +618,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assertEquals(10, results.size())
   }
 
-  test("Test SQL: insert overwrite unsupported") {
+  @Test
+  def testSQLInsertOverwriteUnsupported() {
     val insertTable = "insertoverwritetest"
 
     // read 0 rows just to get the schema
@@ -601,7 +643,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     }
   }
 
-  test("Test write using DefaultSource") {
+  @Test
+  def testWriteUsingDefaultSource() {
     val df = sqlContext.read.options(kuduOptions).kudu
 
     val newTable = "testwritedatasourcetable"
@@ -620,8 +663,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assert(checkDf.count == 10)
   }
 
-  test("create relation with schema") {
-
+  @Test
+  def testCreateRelationWithSchema() {
     // user-supplied schema that is compatible with actual schema, but with the key at the end
     val userSchema: StructType = StructType(List(
       StructField("c4_long", DataTypes.LongType),
@@ -638,8 +681,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     assertTrue(dfWithUserSchema.columns.deep == Array("c4_long", "key").deep)
   }
 
-  test("create relation with invalid schema") {
-
+  @Test
+  def testCreateRelationWithInvalidSchema() {
     // user-supplied schema that is NOT compatible with actual schema
     val userSchema: StructType = StructType(List(
       StructField("foo", DataTypes.LongType),
@@ -651,7 +694,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     }.getMessage should include ("Unknown column: foo")
   }
 
-  test("scan locality") {
+  @Test
+  def testScanLocality() {
     kuduOptions = Map(
       "kudu.table" -> tableName,
       "kudu.master" -> miniCluster.getMasterAddresses,
@@ -668,7 +712,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
 
   // Verify that the propagated timestamp is properly updated inside
   // the same client.
-  test("timestamp propagation") {
+  @Test
+  def testTimestampPropagation() {
     val df = sqlContext.read.options(kuduOptions).kudu
     val insertDF = df.limit(1)
                       .withColumn("key", df("key")
@@ -725,7 +770,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     * DefaultSource and makes it into the KuduRelation as a configuration
     * parameter.
     */
-  test("scan request timeout propagation") {
+  @Test
+  def testScanRequestTimeoutPropagation() {
     kuduOptions = Map(
       "kudu.table" -> tableName,
       "kudu.master" -> miniCluster.getMasterAddresses,
@@ -740,7 +786,8 @@ class DefaultSourceTest extends FunSuite with TestContext with BeforeAndAfterEac
     * DefaultSource and makes it into the KuduRelation as a configuration
     * parameter.
     */
-  test("socket read timeout propagation") {
+  @Test
+  def testSocketReadTimeoutPropagation() {
     kuduOptions = Map(
       "kudu.table" -> tableName,
       "kudu.master" -> miniCluster.getMasterAddresses,

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduContextTest.scala
----------------------------------------------------------------------
diff --git a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduContextTest.scala b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduContextTest.scala
index 47d4519..2329eff 100644
--- a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduContextTest.scala
+++ b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduContextTest.scala
@@ -22,12 +22,10 @@ import java.sql.Timestamp
 
 import org.apache.kudu.util.TimestampUtil
 import org.apache.spark.sql.functions.decode
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{FunSuite, Matchers}
+import org.junit.Test
+import org.scalatest.Matchers
 
-@RunWith(classOf[JUnitRunner])
-class KuduContextTest extends FunSuite with TestContext with Matchers {
+class KuduContextTest extends KuduTestSuite with Matchers {
   val rowCount = 10
 
   private def serialize(value: Any): Array[Byte] = {
@@ -50,7 +48,8 @@ class KuduContextTest extends FunSuite with TestContext with Matchers {
     }
   }
 
-  test("Test KuduContext serialization") {
+  @Test
+  def testKuduContextSerialization() {
     val serialized = serialize(kuduContext)
     KuduClientCache.clearCacheForTests()
     val deserialized = deserialize(serialized).asInstanceOf[KuduContext]
@@ -59,7 +58,8 @@ class KuduContextTest extends FunSuite with TestContext with Matchers {
     deserialized.tableExists("foo")
   }
 
-  test("Test basic kuduRDD") {
+  @Test
+  def testBasicKuduRDD() {
     val rows = insertRows(table, rowCount)
     val scanList = kuduContext.kuduRDD(ss.sparkContext, "test", Seq("key", "c1_i", "c2_s", "c3_double",
         "c4_long", "c5_bool", "c6_short", "c7_float", "c8_binary", "c9_unixtime_micros", "c10_byte",
@@ -86,7 +86,8 @@ class KuduContextTest extends FunSuite with TestContext with Matchers {
     })
   }
 
-  test("Test kudu-spark DataFrame") {
+  @Test
+  def testKuduSparkDataFrame() {
     insertRows(table, rowCount)
     val sqlContext = ss.sqlContext
     val dataDF = sqlContext.read.options(Map("kudu.master" -> miniCluster.getMasterAddresses,

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduRDDTest.scala
----------------------------------------------------------------------
diff --git a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduRDDTest.scala b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduRDDTest.scala
index 56ff412..f0fb4a0 100644
--- a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduRDDTest.scala
+++ b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduRDDTest.scala
@@ -17,14 +17,12 @@
 
 package org.apache.kudu.spark.kudu
 
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.junit.Test
 
-@RunWith(classOf[JUnitRunner])
-class KuduRDDTest extends FunSuite with TestContext with BeforeAndAfter {
+class KuduRDDTest extends KuduTestSuite {
 
-  test("collect rows") {
+  @Test
+  def testCollectRows() {
     insertRows(table, 100)
     val rdd = kuduContext.kuduRDD(ss.sparkContext, tableName, List("key"))
     assert(rdd.collect.length == 100)

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduTestSuite.scala
----------------------------------------------------------------------
diff --git a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduTestSuite.scala b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduTestSuite.scala
new file mode 100644
index 0000000..65f8215
--- /dev/null
+++ b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduTestSuite.scala
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kudu.spark.kudu
+
+import java.math.BigDecimal
+import java.util.Date
+
+import scala.collection.JavaConverters._
+import scala.collection.immutable.IndexedSeq
+import org.apache.spark.SparkConf
+import org.apache.kudu.ColumnSchema.ColumnSchemaBuilder
+import org.apache.kudu.ColumnTypeAttributes.ColumnTypeAttributesBuilder
+import org.apache.kudu.client.KuduClient.KuduClientBuilder
+import org.apache.kudu.client.MiniKuduCluster.MiniKuduClusterBuilder
+import org.apache.kudu.client.{CreateTableOptions, KuduClient, KuduTable, MiniKuduCluster}
+import org.apache.kudu.junit.RetryRule
+import org.apache.kudu.{Schema, Type}
+import org.apache.kudu.util.DecimalUtil
+import org.apache.spark.sql.SparkSession
+import org.junit.{After, Before, Rule}
+import org.scalatest.junit.JUnitSuite
+
+// TODO (grant): Use BaseKuduTest for most of this.
+trait KuduTestSuite extends JUnitSuite {
+  var ss: SparkSession = _
+  var miniCluster: MiniKuduCluster = _
+  var kuduClient: KuduClient = _
+  var table: KuduTable = _
+  var kuduContext: KuduContext = _
+
+  val tableName: String = "test"
+  val simpleTableName: String = "simple-test"
+
+  lazy val schema: Schema = {
+    val columns = List(
+      new ColumnSchemaBuilder("key", Type.INT32).key(true).build(),
+      new ColumnSchemaBuilder("c1_i", Type.INT32).build(),
+      new ColumnSchemaBuilder("c2_s", Type.STRING).nullable(true).build(),
+      new ColumnSchemaBuilder("c3_double", Type.DOUBLE).build(),
+      new ColumnSchemaBuilder("c4_long", Type.INT64).build(),
+      new ColumnSchemaBuilder("c5_bool", Type.BOOL).build(),
+      new ColumnSchemaBuilder("c6_short", Type.INT16).build(),
+      new ColumnSchemaBuilder("c7_float", Type.FLOAT).build(),
+      new ColumnSchemaBuilder("c8_binary", Type.BINARY).build(),
+      new ColumnSchemaBuilder("c9_unixtime_micros", Type.UNIXTIME_MICROS).build(),
+      new ColumnSchemaBuilder("c10_byte", Type.INT8).build(),
+      new ColumnSchemaBuilder("c11_decimal32", Type.DECIMAL)
+        .typeAttributes(
+          new ColumnTypeAttributesBuilder().precision(DecimalUtil.MAX_DECIMAL32_PRECISION).build()
+        ).build(),
+      new ColumnSchemaBuilder("c12_decimal64", Type.DECIMAL)
+        .typeAttributes(
+          new ColumnTypeAttributesBuilder().precision(DecimalUtil.MAX_DECIMAL64_PRECISION).build()
+        ).build(),
+      new ColumnSchemaBuilder("c13_decimal128", Type.DECIMAL)
+        .typeAttributes(
+          new ColumnTypeAttributesBuilder().precision(DecimalUtil.MAX_DECIMAL128_PRECISION).build()
+        ).build())
+      new Schema(columns.asJava)
+  }
+
+  lazy val simpleSchema: Schema = {
+    val columns = List(
+      new ColumnSchemaBuilder("key", Type.INT32).key(true).build(),
+      new ColumnSchemaBuilder("val", Type.STRING).nullable(true).build()).asJava
+    new Schema(columns)
+  }
+
+  val tableOptions: CreateTableOptions = {
+    val bottom = schema.newPartialRow() // Unbounded.
+    val middle = schema.newPartialRow()
+    middle.addInt("key", 50)
+    val top = schema.newPartialRow() // Unbounded.
+
+    new CreateTableOptions()
+      .setRangePartitionColumns(List("key").asJava)
+      .addRangePartition(bottom, middle)
+      .addRangePartition(middle, top)
+      .setNumReplicas(1)
+  }
+
+  val appID: String = new Date().toString + math.floor(math.random * 10E4).toLong.toString
+
+  val conf: SparkConf = new SparkConf().
+    setMaster("local[*]").
+    setAppName("test").
+    set("spark.ui.enabled", "false").
+    set("spark.app.id", appID)
+
+  // Add a rule to rerun tests. We use this with Gradle because it doesn't support
+  // Surefire/Failsafe rerunFailingTestsCount like Maven does.
+  @Rule
+  def retryRule = new RetryRule()
+
+  @Before
+  def setUpBase(): Unit = {
+    miniCluster = new MiniKuduClusterBuilder()
+      .numMasters(1)
+      .numTservers(1)
+      .build()
+
+    ss = SparkSession.builder().config(conf).getOrCreate()
+
+    kuduClient = new KuduClientBuilder(miniCluster.getMasterAddresses).build()
+
+    kuduContext = new KuduContext(miniCluster.getMasterAddresses, ss.sparkContext)
+
+    table = kuduClient.createTable(tableName, schema, tableOptions)
+
+
+    val simpleTableOptions = new CreateTableOptions()
+      .setRangePartitionColumns(List("key").asJava)
+      .setNumReplicas(1)
+
+    kuduClient.createTable(simpleTableName, simpleSchema, simpleTableOptions)
+  }
+
+  @After
+  def tearDownBase() {
+    if (kuduClient != null) kuduClient.shutdown()
+    if (miniCluster != null) miniCluster.shutdown()
+    if (ss != null) ss.stop()
+  }
+
+  def deleteRow(key: Int): Unit = {
+    val kuduSession = kuduClient.newSession()
+    val delete = table.newDelete()
+    delete.getRow.addInt(0, key)
+    kuduSession.apply(delete)
+  }
+
+  def insertRows(targetTable: KuduTable, rowCount: Integer): IndexedSeq[(Int, Int, String, Long)] = {
+    val kuduSession = kuduClient.newSession()
+
+    val rows = Range(0, rowCount).map { i =>
+      val insert = targetTable.newInsert
+      val row = insert.getRow
+      row.addInt(0, i)
+      row.addInt(1, i)
+      row.addDouble(3, i.toDouble)
+      row.addLong(4, i.toLong)
+      row.addBoolean(5, i%2==1)
+      row.addShort(6, i.toShort)
+      row.addFloat(7, i.toFloat)
+      row.addBinary(8, s"bytes $i".getBytes())
+      val ts = System.currentTimeMillis() * 1000
+      row.addLong(9, ts)
+      row.addByte(10, i.toByte)
+      row.addDecimal(11, BigDecimal.valueOf(i))
+      row.addDecimal(12, BigDecimal.valueOf(i))
+      row.addDecimal(13, BigDecimal.valueOf(i))
+
+      // Sprinkling some nulls so that queries see them.
+      val s = if (i % 2 == 0) {
+        row.addString(2, i.toString)
+        i.toString
+      } else {
+        row.setNull(2)
+        null
+      }
+
+      kuduSession.apply(insert)
+      (i, i, s, ts)
+    }
+    rows
+  }
+}

http://git-wip-us.apache.org/repos/asf/kudu/blob/524485eb/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/TestContext.scala
----------------------------------------------------------------------
diff --git a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/TestContext.scala b/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/TestContext.scala
deleted file mode 100644
index a3247da..0000000
--- a/java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/TestContext.scala
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.kudu.spark.kudu
-
-import java.math.BigDecimal
-import java.util.Date
-
-import scala.collection.JavaConverters._
-import scala.collection.immutable.IndexedSeq
-import org.apache.spark.SparkConf
-import org.scalatest.{BeforeAndAfterEach, Suite}
-import org.apache.kudu.ColumnSchema.ColumnSchemaBuilder
-import org.apache.kudu.ColumnTypeAttributes.ColumnTypeAttributesBuilder
-import org.apache.kudu.client.KuduClient.KuduClientBuilder
-import org.apache.kudu.client.MiniKuduCluster.MiniKuduClusterBuilder
-import org.apache.kudu.client.{CreateTableOptions, KuduClient, KuduTable, MiniKuduCluster}
-import org.apache.kudu.{Schema, Type}
-import org.apache.kudu.util.DecimalUtil
-import org.apache.spark.sql.SparkSession
-
-trait TestContext extends BeforeAndAfterEach { self: Suite =>
-
-  var ss: SparkSession = _
-  var miniCluster: MiniKuduCluster = _
-  var kuduClient: KuduClient = _
-  var table: KuduTable = _
-  var kuduContext: KuduContext = _
-
-  val tableName: String = "test"
-  val simpleTableName: String = "simple-test"
-
-  lazy val schema: Schema = {
-    val columns = List(
-      new ColumnSchemaBuilder("key", Type.INT32).key(true).build(),
-      new ColumnSchemaBuilder("c1_i", Type.INT32).build(),
-      new ColumnSchemaBuilder("c2_s", Type.STRING).nullable(true).build(),
-      new ColumnSchemaBuilder("c3_double", Type.DOUBLE).build(),
-      new ColumnSchemaBuilder("c4_long", Type.INT64).build(),
-      new ColumnSchemaBuilder("c5_bool", Type.BOOL).build(),
-      new ColumnSchemaBuilder("c6_short", Type.INT16).build(),
-      new ColumnSchemaBuilder("c7_float", Type.FLOAT).build(),
-      new ColumnSchemaBuilder("c8_binary", Type.BINARY).build(),
-      new ColumnSchemaBuilder("c9_unixtime_micros", Type.UNIXTIME_MICROS).build(),
-      new ColumnSchemaBuilder("c10_byte", Type.INT8).build(),
-      new ColumnSchemaBuilder("c11_decimal32", Type.DECIMAL)
-        .typeAttributes(
-          new ColumnTypeAttributesBuilder().precision(DecimalUtil.MAX_DECIMAL32_PRECISION).build()
-        ).build(),
-      new ColumnSchemaBuilder("c12_decimal64", Type.DECIMAL)
-        .typeAttributes(
-          new ColumnTypeAttributesBuilder().precision(DecimalUtil.MAX_DECIMAL64_PRECISION).build()
-        ).build(),
-      new ColumnSchemaBuilder("c13_decimal128", Type.DECIMAL)
-        .typeAttributes(
-          new ColumnTypeAttributesBuilder().precision(DecimalUtil.MAX_DECIMAL128_PRECISION).build()
-        ).build())
-      new Schema(columns.asJava)
-  }
-
-  lazy val simpleSchema: Schema = {
-    val columns = List(
-      new ColumnSchemaBuilder("key", Type.INT32).key(true).build(),
-      new ColumnSchemaBuilder("val", Type.STRING).nullable(true).build()).asJava
-    new Schema(columns)
-  }
-
-  val appID: String = new Date().toString + math.floor(math.random * 10E4).toLong.toString
-
-  val conf: SparkConf = new SparkConf().
-    setMaster("local[*]").
-    setAppName("test").
-    set("spark.ui.enabled", "false").
-    set("spark.app.id", appID)
-
-  override def beforeEach() {
-    miniCluster = new MiniKuduClusterBuilder()
-      .numMasters(1)
-      .numTservers(1)
-      .build()
-
-    ss = SparkSession.builder().config(conf).getOrCreate()
-
-    kuduClient = new KuduClientBuilder(miniCluster.getMasterAddresses).build()
-
-    kuduContext = new KuduContext(miniCluster.getMasterAddresses, ss.sparkContext)
-
-    table = kuduClient.createTable(tableName, schema, tableOptions)
-
-
-    val simpleTableOptions = new CreateTableOptions()
-      .setRangePartitionColumns(List("key").asJava)
-      .setNumReplicas(1)
-
-    kuduClient.createTable(simpleTableName, simpleSchema, simpleTableOptions)
-  }
-
-  val tableOptions: CreateTableOptions = {
-    val bottom = schema.newPartialRow() // Unbounded.
-    val middle = schema.newPartialRow()
-    middle.addInt("key", 50)
-    val top = schema.newPartialRow() // Unbounded.
-
-    new CreateTableOptions()
-      .setRangePartitionColumns(List("key").asJava)
-      .addRangePartition(bottom, middle)
-      .addRangePartition(middle, top)
-      .setNumReplicas(1)
-  }
-
-  override def afterEach() {
-    if (kuduClient != null) kuduClient.shutdown()
-    if (miniCluster != null) miniCluster.shutdown()
-    if (ss != null) ss.stop()
-  }
-
-  def deleteRow(key: Int): Unit = {
-    val kuduSession = kuduClient.newSession()
-    val delete = table.newDelete()
-    delete.getRow.addInt(0, key)
-    kuduSession.apply(delete)
-  }
-
-  def insertRows(targetTable: KuduTable, rowCount: Integer): IndexedSeq[(Int, Int, String, Long)] = {
-    val kuduSession = kuduClient.newSession()
-
-    val rows = Range(0, rowCount).map { i =>
-      val insert = targetTable.newInsert
-      val row = insert.getRow
-      row.addInt(0, i)
-      row.addInt(1, i)
-      row.addDouble(3, i.toDouble)
-      row.addLong(4, i.toLong)
-      row.addBoolean(5, i%2==1)
-      row.addShort(6, i.toShort)
-      row.addFloat(7, i.toFloat)
-      row.addBinary(8, s"bytes $i".getBytes())
-      val ts = System.currentTimeMillis() * 1000
-      row.addLong(9, ts)
-      row.addByte(10, i.toByte)
-      row.addDecimal(11, BigDecimal.valueOf(i))
-      row.addDecimal(12, BigDecimal.valueOf(i))
-      row.addDecimal(13, BigDecimal.valueOf(i))
-
-      // Sprinkling some nulls so that queries see them.
-      val s = if (i % 2 == 0) {
-        row.addString(2, i.toString)
-        i.toString
-      } else {
-        row.setNull(2)
-        null
-      }
-
-      kuduSession.apply(insert)
-      (i, i, s, ts)
-    }
-    rows
-  }
-}


Mime
View raw message