drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prog...@apache.org
Subject [10/22] drill git commit: DRILL-5783, DRILL-5841, DRILL-5894: Rationalize test temp directories
Date Wed, 15 Nov 2017 01:46:56 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/control/TestCustomTunnel.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/control/TestCustomTunnel.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/control/TestCustomTunnel.java
index 9770a7e..bc9ba9d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/control/TestCustomTunnel.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/control/TestCustomTunnel.java
@@ -27,7 +27,7 @@ import io.netty.util.internal.ThreadLocalRandom;
 import java.util.Arrays;
 import java.util.Random;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
 import org.apache.drill.exec.proto.UserBitShared.QueryId;
 import org.apache.drill.exec.rpc.UserRpcException;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java
index 12cd823..81b027f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java
@@ -26,7 +26,7 @@ import mockit.Injectable;
 import mockit.Mock;
 import mockit.MockUp;
 import mockit.NonStrictExpectations;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SecurityTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.DrillProperties;
@@ -96,7 +96,7 @@ public class TestBitBitKerberos extends BaseTestQuery {
 
     final Config config = DrillConfig.create(cloneDefaultTestConfigProperties());
     krbHelper = new KerberosHelper(TestBitBitKerberos.class.getSimpleName());
-    krbHelper.setupKdc();
+    krbHelper.setupKdc(dirTestWatcher.getTmpDir());
 
     newConfig = new DrillConfig(
         config.withValue(ExecConstants.AUTHENTICATION_MECHANISMS,

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/security/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/security/KerberosHelper.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/security/KerberosHelper.java
index 3320cef..451e0aa 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/security/KerberosHelper.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/security/KerberosHelper.java
@@ -26,8 +26,6 @@ import java.io.IOException;
 import java.net.ServerSocket;
 import java.nio.file.Files;
 
-import static org.apache.drill.exec.ExecTest.getTempDir;
-
 public class KerberosHelper {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(KerberosHelper.class);
 
@@ -59,9 +57,9 @@ public class KerberosHelper {
     this.testName = testName;
   }
 
-  public void setupKdc() throws Exception {
+  public void setupKdc(File workspace) throws Exception {
+    this.workspace = workspace;
     kdc = new SimpleKdcServer();
-    workspace = new File(getTempDir("kerberos_target"));
 
     kdcDir = new File(workspace, testName);
     if(!kdcDir.mkdirs()) {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java
index 9b551ce..bbe3537 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java
@@ -19,31 +19,27 @@ package org.apache.drill.exec.rpc.user;
 
 import mockit.Mock;
 import mockit.MockUp;
-import mockit.integration.junit4.JMockit;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.store.StoragePluginRegistry;
-import org.apache.drill.exec.util.TestUtilities;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
+import org.apache.drill.exec.util.StoragePluginTestUtils;
+import org.apache.drill.test.DirTestWatcher;
 import org.junit.Before;
 import org.junit.Test;
-import org.junit.runner.RunWith;
 
+import java.io.File;
 import java.util.Properties;
 import java.util.UUID;
 
+import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-@RunWith(JMockit.class)
 public class TemporaryTablesAutomaticDropTest extends BaseTestQuery {
 
   private static final String session_id = "sessionId";
 
-  private FileSystem fs;
-
   @Before
   public void setup() throws Exception {
     new MockUp<UUID>() {
@@ -52,53 +48,56 @@ public class TemporaryTablesAutomaticDropTest extends BaseTestQuery {
         return UUID.nameUUIDFromBytes(session_id.getBytes());
       }
     };
+
     Properties testConfigurations = cloneDefaultTestConfigProperties();
-    testConfigurations.put(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE, TEMP_SCHEMA);
+    testConfigurations.put(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE, DFS_TMP_SCHEMA);
     updateTestCluster(1, DrillConfig.create(testConfigurations));
-
-    fs = getLocalFileSystem();
   }
 
   @Test
   public void testAutomaticDropWhenClientIsClosed() throws Exception {
-    Path sessionTemporaryLocation = createAndCheckSessionTemporaryLocation("client_closed",
-            getDfsTestTmpSchemaLocation());
+    final File sessionTemporaryLocation =
+      createAndCheckSessionTemporaryLocation("client_closed", dirTestWatcher.getDfsTestTmpDir());
+
     updateClient("new_client");
-    assertFalse("Session temporary location should be absent", fs.exists(sessionTemporaryLocation));
+    assertFalse("Session temporary location should be absent", sessionTemporaryLocation.exists());
   }
 
   @Test
   public void testAutomaticDropWhenDrillbitIsClosed() throws Exception {
-    Path sessionTemporaryLocation = createAndCheckSessionTemporaryLocation("drillbit_closed",
-            getDfsTestTmpSchemaLocation());
+    final File sessionTemporaryLocation =
+      createAndCheckSessionTemporaryLocation("drillbit_closed", dirTestWatcher.getDfsTestTmpDir());
     bits[0].close();
-    assertFalse("Session temporary location should be absent", fs.exists(sessionTemporaryLocation));
+    assertFalse("Session temporary location should be absent", sessionTemporaryLocation.exists());
   }
 
   @Test
   public void testAutomaticDropOfSeveralSessionTemporaryLocations() throws Exception {
-    Path firstSessionTemporaryLocation = createAndCheckSessionTemporaryLocation("first_location",
-            getDfsTestTmpSchemaLocation());
-    StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
-    String tempDir = TestUtilities.createTempDir();
+    final File firstSessionTemporaryLocation =
+      createAndCheckSessionTemporaryLocation("first_location", dirTestWatcher.getDfsTestTmpDir());
+    final StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
+    final File tempDir = DirTestWatcher.createTempDir(dirTestWatcher.getDir());
+
     try {
-      TestUtilities.updateDfsTestTmpSchemaLocation(pluginRegistry, tempDir);
-      Path secondSessionTemporaryLocation = createAndCheckSessionTemporaryLocation("second_location", tempDir);
+      StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, tempDir);
+      final File secondSessionTemporaryLocation = createAndCheckSessionTemporaryLocation("second_location", tempDir);
       updateClient("new_client");
-      assertFalse("First session temporary location should be absent", fs.exists(firstSessionTemporaryLocation));
-      assertFalse("Second session temporary location should be absent", fs.exists(secondSessionTemporaryLocation));
+      assertFalse("First session temporary location should be absent", firstSessionTemporaryLocation.exists());
+      assertFalse("Second session temporary location should be absent", secondSessionTemporaryLocation.exists());
     } finally {
-      TestUtilities.updateDfsTestTmpSchemaLocation(pluginRegistry, getDfsTestTmpSchemaLocation());
+      StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, dirTestWatcher.getDfsTestTmpDir());
     }
   }
 
-  private Path createAndCheckSessionTemporaryLocation(String suffix, String schemaLocation) throws Exception {
-    String temporaryTableName = "temporary_table_automatic_drop_" + suffix;
-    test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, temporaryTableName);
-    Path sessionTemporaryLocation = new Path(schemaLocation,
-            UUID.nameUUIDFromBytes(session_id.getBytes()).toString());
-    assertTrue("Session temporary location should exist", fs.exists(sessionTemporaryLocation));
+  private File createAndCheckSessionTemporaryLocation(String suffix, File schemaLocation) throws Exception {
+    final String temporaryTableName = "temporary_table_automatic_drop_" + suffix;
+    final File sessionTemporaryLocation = schemaLocation
+      .toPath()
+      .resolve(UUID.nameUUIDFromBytes(session_id.getBytes()).toString())
+      .toFile();
+
+    test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
+    assertTrue("Session temporary location should exist", sessionTemporaryLocation.exists());
     return sessionTemporaryLocation;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java
index b4c0eb2..97360d3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java
@@ -18,7 +18,7 @@
 package org.apache.drill.exec.rpc.user.security;
 
 import com.typesafe.config.ConfigValueFactory;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SecurityTest;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.common.config.DrillConfig;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java
index b1a6e98..d6495e9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.rpc.user.security;
 
 import com.google.common.collect.Lists;
 import com.typesafe.config.ConfigValueFactory;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SecurityTest;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.common.config.DrillConfig;
@@ -57,7 +57,7 @@ public class TestUserBitKerberos extends BaseTestQuery {
   public static void setupTest() throws Exception {
 
     krbHelper = new KerberosHelper(TestUserBitKerberos.class.getSimpleName());
-    krbHelper.setupKdc();
+    krbHelper.setupKdc(dirTestWatcher.getTmpDir());
 
     // Create a new DrillConfig which has user authentication enabled and authenticator set to
     // UserAuthenticatorTestImpl.

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java
index 2ddbcef..4f411ae 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.rpc.user.security;
 
 import com.google.common.collect.Lists;
 import com.typesafe.config.ConfigValueFactory;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SecurityTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.DrillProperties;
@@ -61,7 +61,7 @@ public class TestUserBitKerberosEncryption extends BaseTestQuery {
   @BeforeClass
   public static void setupTest() throws Exception {
     krbHelper = new KerberosHelper(TestUserBitKerberosEncryption.class.getSimpleName());
-    krbHelper.setupKdc();
+    krbHelper.setupKdc(dirTestWatcher.getTmpDir());
 
     // Create a new DrillConfig which has user authentication enabled and authenticator set to
     // UserAuthenticatorTestImpl.

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSL.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSL.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSL.java
index 2228e30..2fb08e3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSL.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSL.java
@@ -20,10 +20,10 @@ package org.apache.drill.exec.rpc.user.security;
 import com.typesafe.config.ConfigValueFactory;
 import io.netty.handler.ssl.util.SelfSignedCertificate;
 import junit.framework.TestCase;
-import org.apache.drill.BaseTestQuery;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -39,9 +39,6 @@ import static junit.framework.TestCase.fail;
 import static org.junit.Assert.assertEquals;
 
 public class TestUserBitSSL extends BaseTestQuery {
-  private static final org.slf4j.Logger logger =
-      org.slf4j.LoggerFactory.getLogger(TestUserBitSSL.class);
-
   private static DrillConfig newConfig;
   private static Properties initProps; // initial client properties
   private static ClassLoader classLoader;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSLServer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSLServer.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSLServer.java
index 890a924..a2cb675 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSLServer.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSSLServer.java
@@ -18,41 +18,31 @@
 package org.apache.drill.exec.rpc.user.security;
 
 import com.typesafe.config.ConfigValueFactory;
-import junit.framework.TestCase;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
 import java.io.File;
-import java.text.MessageFormat;
 import java.util.Properties;
 
-import static org.apache.drill.exec.ssl.SSLConfig.HADOOP_SSL_CONF_TPL_KEY;
 import static org.junit.Assert.assertEquals;
 
 public class TestUserBitSSLServer extends BaseTestQuery {
-  private static final org.slf4j.Logger logger =
-      org.slf4j.LoggerFactory.getLogger(TestUserBitSSLServer.class);
-
   private static DrillConfig sslConfig;
   private static Properties initProps; // initial client properties
   private static ClassLoader classLoader;
   private static String ksPath;
   private static String tsPath;
-  private static String emptyTSPath;
 
   @BeforeClass
   public static void setupTest() throws Exception {
-
     classLoader = TestUserBitSSLServer.class.getClassLoader();
     ksPath = new File(classLoader.getResource("ssl/keystore.ks").getFile()).getAbsolutePath();
     tsPath = new File(classLoader.getResource("ssl/truststore.ks").getFile()).getAbsolutePath();
-    emptyTSPath = new File(classLoader.getResource("ssl/emptytruststore.ks").getFile()).getAbsolutePath();
     sslConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
         .withValue(ExecConstants.USER_SSL_ENABLED, ConfigValueFactory.fromAnyRef(true))
         .withValue(ExecConstants.SSL_KEYSTORE_TYPE, ConfigValueFactory.fromAnyRef("JKS"))
@@ -67,13 +57,6 @@ public class TestUserBitSSLServer extends BaseTestQuery {
     initProps.setProperty(DrillProperties.DISABLE_HOST_VERIFICATION, "true");
   }
 
-  @AfterClass
-  public static void cleanTest() throws Exception {
-    DrillConfig restoreConfig =
-        new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties()), false);
-    updateTestCluster(1, restoreConfig);
-  }
-
   @Test
   public void testInvalidKeystorePath() throws Exception {
     DrillConfig testConfig = new DrillConfig(DrillConfig.create(sslConfig)
@@ -138,5 +121,4 @@ public class TestUserBitSSLServer extends BaseTestQuery {
     }
     assertEquals(failureCaught, false);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSaslCompatibility.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSaslCompatibility.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSaslCompatibility.java
index bbb957d..1ca38f5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSaslCompatibility.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitSaslCompatibility.java
@@ -17,15 +17,14 @@
  */
 package org.apache.drill.exec.rpc.user.security;
 
-
 import com.google.common.collect.Lists;
 import com.typesafe.config.ConfigValueFactory;
-import org.apache.drill.BaseTestQuery;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.rpc.NonTransientRpcException;
 import org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.BeforeClass;
 import org.junit.Test;
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java
index 1548d7d..92d91f7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java
@@ -31,8 +31,8 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.math3.util.Pair;
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.QueryTestUtil;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.QueryTestUtil;
 import org.apache.drill.SingleRowListener;
 import org.apache.drill.common.DrillAutoCloseables;
 import org.apache.drill.common.concurrent.ExtendedLatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptions.java
index 00e356e..0a7f731 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptions.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.server;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OptionsTest;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.test.UserExceptionMatcher;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptionsAuthEnabled.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptionsAuthEnabled.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptionsAuthEnabled.java
index 82a524a..61eb85d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptionsAuthEnabled.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestOptionsAuthEnabled.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.server;
 
 import com.google.common.base.Joiner;
 import com.typesafe.config.ConfigValueFactory;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.util.DrillStringUtils;
@@ -42,10 +42,6 @@ import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorT
 import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_1_PASSWORD;
 import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_2;
 import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_2_PASSWORD;
-
-
-
-
 import java.util.Properties;
 
 /**
@@ -129,7 +125,7 @@ public class TestOptionsAuthEnabled extends BaseTestQuery {
   @Test
   public void testAdminUserOptions() throws Exception {
 
-    try (ClusterFixture cluster = ClusterFixture.standardCluster();
+    try (ClusterFixture cluster = ClusterFixture.standardCluster(dirTestWatcher);
          ClientFixture client = cluster.clientFixture()) {
       OptionManager optionManager = cluster.drillbit().getContext().getOptionManager();
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestTpcdsSf1Leaks.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestTpcdsSf1Leaks.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestTpcdsSf1Leaks.java
index ba19e0d..926024a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestTpcdsSf1Leaks.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestTpcdsSf1Leaks.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.server;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 
 import static org.apache.drill.exec.ExecConstants.SLICE_TARGET;
 import static org.apache.drill.exec.ExecConstants.SLICE_TARGET_DEFAULT;
@@ -34,7 +34,7 @@ import org.junit.rules.Timeout;
  * To run this unit class you need to download the following data file:
  * http://apache-drill.s3.amazonaws.com/files/tpcds-sf1-parquet.tgz
  * and untar it in a some folder (e.g. /tpcds-sf1-parquet) then add the following workspace to
- * exec/java-exec/src/test/resources/bootstrap-storage-plugins.json
+ * the dfs storage plugin
  *
  * ,"tpcds" : {
  *   location: "/tpcds-sf1-parquet",

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/PersistedOptionValueTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/PersistedOptionValueTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/PersistedOptionValueTest.java
index 182442b..165dad6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/PersistedOptionValueTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/PersistedOptionValueTest.java
@@ -21,9 +21,8 @@ package org.apache.drill.exec.server.options;
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.serialization.JacksonSerializer;
-import org.apache.drill.exec.store.sys.PersistentStoreConfig;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -47,10 +46,10 @@ public class PersistedOptionValueTest {
   private void testHelper(String booleanOptionFile, String doubleOptionFile,
                           String longOptionFile, String stringOptionFile) throws IOException {
     JacksonSerializer serializer = new JacksonSerializer<>(new ObjectMapper(), PersistedOptionValue.class);
-    String booleanOptionJson = FileUtils.getResourceAsString(booleanOptionFile);
-    String doubleOptionJson = FileUtils.getResourceAsString(doubleOptionFile);
-    String longOptionJson = FileUtils.getResourceAsString(longOptionFile);
-    String stringOptionJson = FileUtils.getResourceAsString(stringOptionFile);
+    String booleanOptionJson = DrillFileUtils.getResourceAsString(booleanOptionFile);
+    String doubleOptionJson = DrillFileUtils.getResourceAsString(doubleOptionFile);
+    String longOptionJson = DrillFileUtils.getResourceAsString(longOptionFile);
+    String stringOptionJson = DrillFileUtils.getResourceAsString(stringOptionFile);
 
     PersistedOptionValue booleanValue = (PersistedOptionValue) serializer.deserialize(booleanOptionJson.getBytes());
     PersistedOptionValue doubleValue = (PersistedOptionValue) serializer.deserialize(doubleOptionJson.getBytes());

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/TestConfigLinkage.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/TestConfigLinkage.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/TestConfigLinkage.java
new file mode 100644
index 0000000..c263d1d
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/options/TestConfigLinkage.java
@@ -0,0 +1,332 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.server.options;
+
+import org.apache.drill.categories.OptionsTest;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.store.sys.SystemTable;
+import org.apache.drill.test.BaseDirTestWatcher;
+import org.apache.drill.test.ClientFixture;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterFixtureBuilder;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import static org.junit.Assert.assertEquals;
+/*
+ * Tests to test if the linkage between the two config option systems
+ * i.e., the linkage between boot-config system and system/session options.
+ * Tests to assert if the config options are read in the order of session , system, boot-config.
+ * Max width per node is slightly different from other options since it is set to zero by default
+ * in the config and the option value is computed dynamically everytime if the value is zero
+ * i.e., if the value is not set in system/session.
+ * */
+
+@Category(OptionsTest.class)
+public class TestConfigLinkage {
+  public static final String MOCK_PROPERTY = "mock.prop";
+
+  @Rule
+  public BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
+
+  public static OptionDefinition createMockPropOptionDefinition() {
+    return new OptionDefinition(new TypeValidators.StringValidator(MOCK_PROPERTY), new OptionMetaData(OptionValue.AccessibleScopes.ALL, false, true));
+  }
+
+  @Test
+  public void testDefaultInternalValue() throws Exception {
+    OptionDefinition optionDefinition = createMockPropOptionDefinition();
+
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+      .configProperty(ExecConstants.bootDefaultFor(MOCK_PROPERTY), "a")
+      .putDefinition(optionDefinition);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String mockProp = client.queryBuilder().
+        sql("SELECT string_val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS.getTableName(), MOCK_PROPERTY).singletonString();
+      String mockProp2 = client.queryBuilder().
+        sql("SELECT val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS_VAL.getTableName(), MOCK_PROPERTY).singletonString();
+
+      assertEquals("a", mockProp);
+      assertEquals("a", mockProp2);
+    }
+  }
+
+  @Test
+  public void testDefaultValidatorInternalValue() throws Exception {
+    OptionDefinition optionDefinition = createMockPropOptionDefinition();
+
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).
+      putDefinition(optionDefinition).
+      configProperty(ExecConstants.bootDefaultFor(MOCK_PROPERTY), "a");
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String mockProp = client.queryBuilder().
+        sql("SELECT string_val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS.getTableName(), MOCK_PROPERTY).singletonString();
+      String mockProp2 = client.queryBuilder().
+        sql("SELECT val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS_VAL.getTableName(), MOCK_PROPERTY).singletonString();
+
+      assertEquals("a", mockProp);
+      assertEquals("a", mockProp2);
+    }
+  }
+
+  /* Test if session option takes precedence */
+  @Test
+  public void testSessionOption() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+      .sessionOption(ExecConstants.SLICE_TARGET, 10);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String slice_target = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.slice_target' and optionScope = 'SESSION'", SystemTable.OPTION_VAL
+        .getTableName())
+        .singletonString();
+      assertEquals(slice_target, "10");
+    }
+  }
+
+  /* Test if system option takes precedence over the boot option */
+  @Test
+  public void testSystemOption() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+      .systemOption(ExecConstants.SLICE_TARGET, 10000);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String slice_target = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.slice_target' and optionScope = 'SYSTEM'", SystemTable.OPTION_VAL.getTableName())
+        .singletonString();
+      assertEquals(slice_target, "10000");
+    }
+  }
+
+  @Test
+  public void testInternalSystemOption() throws Exception {
+    OptionDefinition optionDefinition = createMockPropOptionDefinition();
+
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).
+      putDefinition(optionDefinition).
+      configProperty(ExecConstants.bootDefaultFor(MOCK_PROPERTY), "a").
+      systemOption(MOCK_PROPERTY, "blah");
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String mockProp = client.queryBuilder().
+        sql("SELECT string_val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS.getTableName(), MOCK_PROPERTY)
+        .singletonString();
+      String mockProp2 = client.queryBuilder().
+        sql("SELECT val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS_VAL.getTableName(), MOCK_PROPERTY)
+        .singletonString();
+
+      assertEquals("blah", mockProp);
+      assertEquals("blah", mockProp2);
+    }
+  }
+
+  /* Test if config option takes precedence if config option is not set */
+  @Test
+  public void testConfigOption() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1000);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String slice_target = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.slice_target' and optionScope = 'BOOT'", SystemTable.OPTION_VAL.getTableName())
+        .singletonString();
+      assertEquals(slice_target, "1000");
+    }
+  }
+
+  /* Test if altering system option takes precedence over config option */
+  @Test
+  public void testAlterSystem() throws Exception {
+    try (ClusterFixture cluster = ClusterFixture.standardCluster(dirTestWatcher);
+         ClientFixture client = cluster.clientFixture()) {
+      client.queryBuilder().sql("ALTER SYSTEM SET `planner.slice_target` = 10000").run();
+      String slice_target = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.slice_target' and optionScope = 'SYSTEM'", SystemTable.OPTION_VAL.getTableName())
+        .singletonString();
+      assertEquals(slice_target, "10000");
+    }
+  }
+
+  /* Test if altering session option takes precedence over system option */
+  @Test
+  public void testSessionPrecedence() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+      .systemOption(ExecConstants.SLICE_TARGET, 100000);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      client.queryBuilder().sql("ALTER SESSION SET `planner.slice_target` = 10000").run();
+      String slice_target = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.slice_target' and optionScope = 'SESSION'", SystemTable.OPTION_VAL.getTableName())
+        .singletonString();
+      assertEquals(slice_target, "10000");
+    }
+  }
+
+  /* Test if setting maxwidth option through config takes effect */
+  @Test
+  public void testMaxWidthPerNodeConfig() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+      .setOptionDefault(ExecConstants.MAX_WIDTH_PER_NODE_KEY, 2);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String maxWidth = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.width.max_per_node' and optionScope = 'BOOT'", SystemTable.OPTION_VAL.getTableName())
+        .singletonString();
+      assertEquals("2", maxWidth);
+    }
+  }
+
+  /* Test if setting maxwidth at system level takes precedence */
+  @Test
+  public void testMaxWidthPerNodeSystem() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+      .systemOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, 3);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String maxWidth = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.width.max_per_node' and optionScope = 'SYSTEM'", SystemTable.OPTION_VAL.getTableName())
+        .singletonString();
+      assertEquals("3", maxWidth);
+    }
+  }
+
+  /* Test if setting maxwidth at session level takes precedence */
+  @Test
+  public void testMaxWidthPerNodeSession() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+      .sessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, 2);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String maxWidth = client.queryBuilder().sql("SELECT val FROM sys.%s where name='planner.width.max_per_node' and optionScope = 'SESSION'", SystemTable.OPTION_VAL.getTableName())
+        .singletonString();
+      assertEquals("2", maxWidth);
+    }
+  }
+
+  /* Test if max width is computed correctly using the cpu load average
+     when the option is not set at either system or session level
+  */
+  @Test
+  public void testMaxWidthPerNodeDefault() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+      .setOptionDefault(ExecConstants.CPU_LOAD_AVERAGE_KEY, 0.70);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      long maxWidth = ExecConstants.MAX_WIDTH_PER_NODE.computeMaxWidth(0.70, 0);
+      int availProc = Runtime.getRuntime().availableProcessors();
+      long maxWidthPerNode = Math.max(1, Math.min(availProc, Math.round(availProc * 0.70)));
+      assertEquals(maxWidthPerNode, maxWidth);
+    }
+  }
+
+  /* Test if the scope is set during BOOT time and scope is actually BOOT */
+  @Test
+  public void testScope() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+      .setOptionDefault(ExecConstants.SLICE_TARGET, 100000);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String scope = client.queryBuilder()
+                          .sql("SELECT optionScope from sys.%s where name='planner.slice_target'", SystemTable.OPTION_VAL.getTableName())
+                          .singletonString();
+      Assert.assertEquals("BOOT",scope);
+    }
+  }
+
+  /* Test if the option is set at SYSTEM scope and the scope is actually SYSTEM */
+  @Test
+  public void testScopeSystem() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+      .systemOption(ExecConstants.SLICE_TARGET, 10000);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String scope = client.queryBuilder()
+              .sql("SELECT optionScope from sys.%s where name='planner.slice_target'", SystemTable.OPTION_VAL.getTableName())
+              .singletonString();
+      Assert.assertEquals("SYSTEM",scope);
+    }
+  }
+
+  /* Test if the option is set at SESSION scope and the scope is actually SESSION */
+  @Test
+  public void testScopeSession() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+      .sessionOption(ExecConstants.SLICE_TARGET, 100000);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String scope = client.queryBuilder()
+              .sql("SELECT optionScope from sys.%s where name='planner.slice_target'", SystemTable.OPTION_VAL.getTableName())
+              .singletonString();
+      Assert.assertEquals("SESSION",scope);
+    }
+  }
+
+  /* Test if the option is altered at SYSTEM scope and the scope is actually SYSTEM */
+  @Test
+  public void testScopeAlterSystem() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      client.queryBuilder().sql("ALTER SYSTEM set `planner.slice_target`= 10000").run();
+      String scope = client.queryBuilder()
+              .sql("SELECT optionScope from sys.%s where name='planner.slice_target'", SystemTable.OPTION_VAL.getTableName())
+              .singletonString();
+      Assert.assertEquals("SYSTEM",scope);
+    }
+  }
+
+  /* Test if the option is altered at SESSION scope and the scope is actually SESSION */
+  @Test
+  public void testScopeAlterSession() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher);
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      client.queryBuilder().sql("ALTER SESSION set `planner.slice_target`= 10000").run();
+      String scope = client.queryBuilder()
+              .sql("SELECT optionScope from sys.%s where name='planner.slice_target'", SystemTable.OPTION_VAL.getTableName())
+              .singletonString();
+      Assert.assertEquals("SESSION",scope);
+    }
+  }
+
+  @Test
+  public void testAlterInternalSystemOption() throws Exception {
+    OptionDefinition optionDefinition = createMockPropOptionDefinition();
+
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).
+      configProperty(ExecConstants.bootDefaultFor(MOCK_PROPERTY), "a").
+      putDefinition(optionDefinition);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      client.queryBuilder().sql("ALTER SYSTEM SET `%s` = 'bleh'", MOCK_PROPERTY).run();
+
+      client.queryBuilder().sql("SELECT * FROM sys.%s", SystemTable.INTERNAL_OPTIONS.getTableName()).printCsv();
+      client.queryBuilder().sql("SELECT * FROM sys.%s", SystemTable.INTERNAL_OPTIONS_VAL.getTableName()).printCsv();
+
+      String mockProp = client.queryBuilder().
+        sql("SELECT string_val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS, MOCK_PROPERTY).singletonString();
+      String mockProp2 = client.queryBuilder().
+        sql("SELECT val FROM sys.%s where name='%s'", SystemTable.INTERNAL_OPTIONS_VAL, MOCK_PROPERTY).singletonString();
+
+      assertEquals("bleh", mockProp);
+      assertEquals("bleh", mockProp2);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/StatusResourcesTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/StatusResourcesTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/StatusResourcesTest.java
index 4f4390f..fc04f88 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/StatusResourcesTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/StatusResourcesTest.java
@@ -20,23 +20,27 @@ package org.apache.drill.exec.server.rest;
 
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.server.options.OptionDefinition;
-import org.apache.drill.exec.server.options.OptionValidator;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
 import org.apache.drill.test.RestClientFixture;
 import org.junit.Assert;
+import org.junit.Rule;
 import org.junit.Test;
 
-import static org.apache.drill.test.TestConfigLinkage.MOCK_PROPERTY;
-import static org.apache.drill.test.TestConfigLinkage.createMockPropOptionDefinition;
+import static org.apache.drill.exec.server.options.TestConfigLinkage.MOCK_PROPERTY;
+import static org.apache.drill.exec.server.options.TestConfigLinkage.createMockPropOptionDefinition;
 
 public class StatusResourcesTest {
+  @Rule
+  public final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
+
   @Test
   public void testRetrieveInternalOption() throws Exception {
     OptionDefinition optionDefinition = createMockPropOptionDefinition();
 
-    ClusterFixtureBuilder builder = ClusterFixture.builder().
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).
       configProperty(ExecConstants.HTTP_ENABLE, true).
       configProperty(ExecConstants.bootDefaultFor(MOCK_PROPERTY), "a").
       configProperty(ExecConstants.HTTP_PORT_HUNT, true).
@@ -60,7 +64,7 @@ public class StatusResourcesTest {
 
   @Test
   public void testRetrievePublicOption() throws Exception {
-    ClusterFixtureBuilder builder = ClusterFixture.builder().
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).
       configProperty(ExecConstants.HTTP_ENABLE, true).
       configProperty(ExecConstants.HTTP_PORT_HUNT, true).
       configProperty(ExecConstants.SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE, false).

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java
index 5bf55af..c85cbb6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java
@@ -18,8 +18,8 @@
 package org.apache.drill.exec.sql;
 
 import com.google.common.base.Strings;
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.TestBuilder;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.TestBuilder;
 
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -34,14 +34,14 @@ public class TestBaseViewSupport extends BaseTestQuery {
   /**
    * Create view with given parameters.
    *
-   * Current default schema "dfs_test"
+   * Current default schema "dfs"
    *
    * CREATE VIEW tmp.viewName(f1, f2) AS SELECT * FROM cp.`region.json`
    *
    * For the above CREATE VIEW query, function parameters are:
    *   viewSchema = "tmp"
    *   viewName = "viewName"
-   *   finalSchema = "dfs_test.tmp"
+   *   finalSchema = "dfs.tmp"
    *   viewFields = "(f1, f2)"
    *   viewDef = "SELECT * FROM cp.`region.json`"
    *
@@ -76,13 +76,13 @@ public class TestBaseViewSupport extends BaseTestQuery {
   /**
    * Drop view with given parameters.
    *
-   * Current schema "dfs_test"
+   * Current schema "dfs"
    * DROP VIEW tmp.viewName
    *
    * For the above DROP VIEW query, function parameters values are:
    *  viewSchema = "tmp"
    *  "viewName" = "viewName"
-   *  "finalSchema" = "dfs_test.tmp"
+   *  "finalSchema" = "dfs.tmp"
    *
    * @param viewSchema
    * @param viewName
@@ -107,13 +107,13 @@ public class TestBaseViewSupport extends BaseTestQuery {
   /**
    * Drop view if exists with given parameters.
    *
-   * Current schema "dfs_test"
+   * Current schema "dfs"
    * DROP VIEW IF EXISTS tmp.viewName
    *
    * For the above DROP VIEW IF EXISTS query, function parameters values are:
    *  viewSchema = "tmp"
    *  "viewName" = "viewName"
-   *  "finalSchema" = "dfs_test.tmp"
+   *  "finalSchema" = "dfs.tmp"
    *  "ifViewExists" = null
    *
    * @param viewSchema

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTAS.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTAS.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTAS.java
index 2e3b796..764cef4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTAS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTAS.java
@@ -18,8 +18,7 @@
 package org.apache.drill.exec.sql;
 
 import com.google.common.collect.Maps;
-import org.apache.commons.io.FileUtils;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.exec.ExecConstants;
@@ -31,7 +30,6 @@ import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.File;
 import java.util.Map;
 
 import static org.junit.Assert.assertEquals;
@@ -41,7 +39,7 @@ public class TestCTAS extends BaseTestQuery {
   @Test // DRILL-2589
   @Category(UnlikelyTest.class)
   public void withDuplicateColumnsInDef1() throws Exception {
-    ctasErrorTestHelper("CREATE TABLE %s.%s AS SELECT region_id, region_id FROM cp.`region.json`",
+    ctasErrorTestHelper("CREATE TABLE dfs.tmp.%s AS SELECT region_id, region_id FROM cp.`region.json`",
         String.format("Duplicate column name [%s]", "region_id")
     );
   }
@@ -49,7 +47,7 @@ public class TestCTAS extends BaseTestQuery {
   @Test // DRILL-2589
   @Category(UnlikelyTest.class)
   public void withDuplicateColumnsInDef2() throws Exception {
-    ctasErrorTestHelper("CREATE TABLE %s.%s AS SELECT region_id, sales_city, sales_city FROM cp.`region.json`",
+    ctasErrorTestHelper("CREATE TABLE dfs.tmp.%s AS SELECT region_id, sales_city, sales_city FROM cp.`region.json`",
         String.format("Duplicate column name [%s]", "sales_city")
     );
   }
@@ -58,7 +56,7 @@ public class TestCTAS extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void withDuplicateColumnsInDef3() throws Exception {
     ctasErrorTestHelper(
-        "CREATE TABLE %s.%s(regionid, regionid) " +
+        "CREATE TABLE dfs.tmp.%s(regionid, regionid) " +
             "AS SELECT region_id, sales_city FROM cp.`region.json`",
         String.format("Duplicate column name [%s]", "regionid")
     );
@@ -68,7 +66,7 @@ public class TestCTAS extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void withDuplicateColumnsInDef4() throws Exception {
     ctasErrorTestHelper(
-        "CREATE TABLE %s.%s(regionid, salescity, salescity) " +
+        "CREATE TABLE dfs.tmp.%s(regionid, salescity, salescity) " +
             "AS SELECT region_id, sales_city, sales_city FROM cp.`region.json`",
         String.format("Duplicate column name [%s]", "salescity")
     );
@@ -78,7 +76,7 @@ public class TestCTAS extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void withDuplicateColumnsInDef5() throws Exception {
     ctasErrorTestHelper(
-        "CREATE TABLE %s.%s(regionid, salescity, SalesCity) " +
+        "CREATE TABLE dfs.tmp.%s(regionid, salescity, SalesCity) " +
             "AS SELECT region_id, sales_city, sales_city FROM cp.`region.json`",
         String.format("Duplicate column name [%s]", "SalesCity")
     );
@@ -87,7 +85,7 @@ public class TestCTAS extends BaseTestQuery {
   @Test // DRILL-2589
   public void whenInEqualColumnCountInTableDefVsInTableQuery() throws Exception {
     ctasErrorTestHelper(
-        "CREATE TABLE %s.%s(regionid, salescity) " +
+        "CREATE TABLE dfs.tmp.%s(regionid, salescity) " +
             "AS SELECT region_id, sales_city, sales_region FROM cp.`region.json`",
         "table's field list and the table's query field list have different counts."
     );
@@ -96,7 +94,7 @@ public class TestCTAS extends BaseTestQuery {
   @Test // DRILL-2589
   public void whenTableQueryColumnHasStarAndTableFiledListIsSpecified() throws Exception {
     ctasErrorTestHelper(
-        "CREATE TABLE %s.%s(regionid, salescity) " +
+        "CREATE TABLE dfs.tmp.%s(regionid, salescity) " +
             "AS SELECT region_id, * FROM cp.`region.json`",
         "table's query field list has a '*', which is invalid when table's field list is specified."
     );
@@ -106,18 +104,10 @@ public class TestCTAS extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void createTableWhenATableWithSameNameAlreadyExists() throws Exception{
     final String newTblName = "createTableWhenTableAlreadyExists";
+    final String ctasQuery = String.format("CREATE TABLE dfs.tmp.%s AS SELECT * from cp.`region.json`", newTblName);
 
-    try {
-      final String ctasQuery =
-          String.format("CREATE TABLE %s.%s AS SELECT * from cp.`region.json`", TEMP_SCHEMA, newTblName);
-
-      test(ctasQuery);
-
-      errorMsgTestHelper(ctasQuery,
-          String.format("A table or view with given name [%s] already exists in schema [%s]", newTblName, TEMP_SCHEMA));
-    } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName));
-    }
+    test(ctasQuery);
+    errorMsgTestHelper(ctasQuery, String.format("A table or view with given name [%s] already exists in schema [dfs.tmp]", newTblName));
   }
 
   @Test // DRILL-2422
@@ -126,143 +116,97 @@ public class TestCTAS extends BaseTestQuery {
     final String newTblName = "createTableWhenAViewWithSameNameAlreadyExists";
 
     try {
-      test(String.format("CREATE VIEW %s.%s AS SELECT * from cp.`region.json`", TEMP_SCHEMA, newTblName));
+      test("CREATE VIEW dfs.tmp.%s AS SELECT * from cp.`region.json`", newTblName);
 
-      final String ctasQuery =
-          String.format("CREATE TABLE %s.%s AS SELECT * FROM cp.`employee.json`", TEMP_SCHEMA, newTblName);
+      final String ctasQuery = String.format("CREATE TABLE dfs.tmp.%s AS SELECT * FROM cp.`employee.json`", newTblName);
 
       errorMsgTestHelper(ctasQuery,
           String.format("A table or view with given name [%s] already exists in schema [%s]",
-              newTblName, "dfs_test.tmp"));
+              newTblName, "dfs.tmp"));
     } finally {
-      test(String.format("DROP VIEW %s.%s", TEMP_SCHEMA, newTblName));
+      test("DROP VIEW dfs.tmp.%s", newTblName);
     }
   }
 
   @Test
   public void ctasPartitionWithEmptyList() throws Exception {
     final String newTblName = "ctasPartitionWithEmptyList";
+    final String ctasQuery = String.format("CREATE TABLE dfs.tmp.%s PARTITION BY AS SELECT * from cp.`region.json`", newTblName);
 
-    try {
-      final String ctasQuery = String.format("CREATE TABLE %s.%s PARTITION BY AS SELECT * from cp.`region.json`", TEMP_SCHEMA, newTblName);
-
-      errorMsgTestHelper(ctasQuery,"PARSE ERROR: Encountered \"AS\"");
-    } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName));
-    }
+    errorMsgTestHelper(ctasQuery,"PARSE ERROR: Encountered \"AS\"");
   }
 
   @Test // DRILL-3377
   public void partitionByCtasColList() throws Exception {
     final String newTblName = "partitionByCtasColList";
 
-    try {
-      final String ctasQuery = String.format("CREATE TABLE %s.%s (cnt, rkey) PARTITION BY (cnt) " +
-          "AS SELECT count(*), n_regionkey from cp.`tpch/nation.parquet` group by n_regionkey",
-          TEMP_SCHEMA, newTblName);
-
-      test(ctasQuery);
+    test("CREATE TABLE dfs.tmp.%s (cnt, rkey) PARTITION BY (cnt) " +
+      "AS SELECT count(*), n_regionkey from cp.`tpch/nation.parquet` group by n_regionkey", newTblName);
 
-      final String selectFromCreatedTable = String.format(" select cnt, rkey from %s.%s", TEMP_SCHEMA, newTblName);
-      final String baselineQuery = "select count(*) as cnt, n_regionkey as rkey from cp.`tpch/nation.parquet` group by n_regionkey";
-      testBuilder()
-          .sqlQuery(selectFromCreatedTable)
-          .unOrdered()
-          .sqlBaselineQuery(baselineQuery)
-          .build()
-          .run();
-    } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName));
-    }
+    testBuilder()
+        .sqlQuery("select cnt, rkey from dfs.tmp.%s", newTblName)
+        .unOrdered()
+        .sqlBaselineQuery("select count(*) as cnt, n_regionkey as rkey from cp.`tpch/nation.parquet` group by n_regionkey")
+        .build()
+        .run();
   }
 
   @Test // DRILL-3374
   public void partitionByCtasFromView() throws Exception {
-    final String newTblName = "partitionByCtasColList";
+    final String newTblName = "partitionByCtasFromView";
     final String newView = "partitionByCtasColListView";
-    try {
-      final String viewCreate = String.format("create or replace view %s.%s (col_int, col_varchar)  " +
-          "AS select cast(n_nationkey as int), cast(n_name as varchar(30)) from cp.`tpch/nation.parquet`",
-          TEMP_SCHEMA, newView);
-
-      final String ctasQuery = String.format("CREATE TABLE %s.%s PARTITION BY (col_int) AS SELECT * from %s.%s",
-          TEMP_SCHEMA, newTblName, TEMP_SCHEMA, newView);
 
-      test(viewCreate);
-      test(ctasQuery);
+    test("create or replace view dfs.tmp.%s (col_int, col_varchar)  " +
+      "AS select cast(n_nationkey as int), cast(n_name as varchar(30)) from cp.`tpch/nation.parquet`", newView);
+    test("CREATE TABLE dfs.tmp.%s PARTITION BY (col_int) AS SELECT * from dfs.tmp.%s",
+      newTblName, newView);
 
-      final String baselineQuery = "select cast(n_nationkey as int) as col_int, cast(n_name as varchar(30)) as col_varchar " +
-        "from cp.`tpch/nation.parquet`";
-      final String selectFromCreatedTable = String.format("select col_int, col_varchar from %s.%s", TEMP_SCHEMA, newTblName);
-      testBuilder()
-          .sqlQuery(selectFromCreatedTable)
-          .unOrdered()
-          .sqlBaselineQuery(baselineQuery)
-          .build()
-          .run();
+    testBuilder()
+        .sqlQuery("select col_int, col_varchar from dfs.tmp.%s", newTblName)
+        .unOrdered()
+        .sqlBaselineQuery("select cast(n_nationkey as int) as col_int, cast(n_name as varchar(30)) as col_varchar " +
+          "from cp.`tpch/nation.parquet`")
+        .build()
+        .run();
 
-      final String viewDrop = String.format("DROP VIEW %s.%s", TEMP_SCHEMA, newView);
-      test(viewDrop);
-    } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName));
-    }
+    test("DROP VIEW dfs.tmp.%s", newView);
   }
 
   @Test // DRILL-3382
   public void ctasWithQueryOrderby() throws Exception {
     final String newTblName = "ctasWithQueryOrderby";
 
-    try {
-      final String ctasQuery = String.format("CREATE TABLE %s.%s   " +
-          "AS SELECT n_nationkey, n_name, n_comment from cp.`tpch/nation.parquet` order by n_nationkey",
-          TEMP_SCHEMA, newTblName);
-
-      test(ctasQuery);
-
-      final String selectFromCreatedTable = String.format(" select n_nationkey, n_name, n_comment from %s.%s", TEMP_SCHEMA, newTblName);
-      final String baselineQuery = "select n_nationkey, n_name, n_comment from cp.`tpch/nation.parquet` order by n_nationkey";
+    test("CREATE TABLE dfs.tmp.%s AS SELECT n_nationkey, n_name, n_comment from " +
+      "cp.`tpch/nation.parquet` order by n_nationkey", newTblName);
 
-      testBuilder()
-          .sqlQuery(selectFromCreatedTable)
-          .ordered()
-          .sqlBaselineQuery(baselineQuery)
-          .build()
-          .run();
-    } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName));
-    }
+    testBuilder()
+        .sqlQuery("select n_nationkey, n_name, n_comment from dfs.tmp.%s", newTblName)
+        .ordered()
+        .sqlBaselineQuery("select n_nationkey, n_name, n_comment from cp.`tpch/nation.parquet` order by n_nationkey")
+        .build()
+        .run();
   }
 
   @Test // DRILL-4392
   public void ctasWithPartition() throws Exception {
     final String newTblName = "nation_ctas";
 
-    try {
-      final String ctasQuery = String.format("CREATE TABLE %s.%s   " +
-          "partition by (n_regionkey) AS SELECT n_nationkey, n_regionkey from cp.`tpch/nation.parquet` order by n_nationkey limit 1",
-          TEMP_SCHEMA, newTblName);
-
-      test(ctasQuery);
-
-      final String selectFromCreatedTable = String.format(" select * from %s.%s", TEMP_SCHEMA, newTblName);
-      final String baselineQuery = "select n_nationkey, n_regionkey from cp.`tpch/nation.parquet` order by n_nationkey limit 1";
+    test("CREATE TABLE dfs.tmp.%s partition by (n_regionkey) AS " +
+      "SELECT n_nationkey, n_regionkey from cp.`tpch/nation.parquet` order by n_nationkey limit 1", newTblName);
 
-      testBuilder()
-          .sqlQuery(selectFromCreatedTable)
-          .ordered()
-          .sqlBaselineQuery(baselineQuery)
-          .build()
-          .run();
-    } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName));
-    }
+    testBuilder()
+        .sqlQuery("select * from dfs.tmp.%s", newTblName)
+        .ordered()
+        .sqlBaselineQuery("select n_nationkey, n_regionkey from cp.`tpch/nation.parquet` order by n_nationkey limit 1")
+        .build()
+        .run();
   }
 
   @Test
   public void testPartitionByForAllTypes() throws Exception {
     final String location = "partitioned_tables_with_nulls";
     final String ctasQuery = "create table %s partition by (%s) as %s";
-    final String tablePath = "%s.`%s/%s_%s`";
+    final String tablePath = "dfs.tmp.`%s/%s_%s`";
 
     // key - new table suffix, value - data query
     final Map<String, String> variations = Maps.newHashMap();
@@ -270,39 +214,36 @@ public class TestCTAS extends BaseTestQuery {
     variations.put("optional", "select * from cp.`parquet/alltypes_optional.parquet`");
     variations.put("nulls_only", "select * from cp.`parquet/alltypes_optional.parquet` where %s is null");
 
-    try {
-      final QueryDataBatch result = testSqlWithResults("select * from cp.`parquet/alltypes_required.parquet` limit 0").get(0);
-      for (UserBitShared.SerializedField field : result.getHeader().getDef().getFieldList()) {
-        final String fieldName = field.getNamePart().getName();
-
-        for (Map.Entry<String, String> variation : variations.entrySet()) {
-          final String table = String.format(tablePath, TEMP_SCHEMA, location, fieldName, variation.getKey());
-          final String dataQuery = String.format(variation.getValue(), fieldName);
-          test(ctasQuery, table, fieldName, dataQuery, fieldName);
-          testBuilder()
-              .sqlQuery("select * from %s", table)
-              .unOrdered()
-              .sqlBaselineQuery(dataQuery)
-              .build()
-              .run();
-        }
+    final QueryDataBatch result = testSqlWithResults("select * from cp.`parquet/alltypes_required.parquet` limit 0").get(0);
+    for (UserBitShared.SerializedField field : result.getHeader().getDef().getFieldList()) {
+      final String fieldName = field.getNamePart().getName();
+
+      for (Map.Entry<String, String> variation : variations.entrySet()) {
+        final String table = String.format(tablePath, location, fieldName, variation.getKey());
+        final String dataQuery = String.format(variation.getValue(), fieldName);
+        test(ctasQuery, table, fieldName, dataQuery, fieldName);
+        testBuilder()
+          .sqlQuery("select * from %s", table)
+          .unOrdered()
+          .sqlBaselineQuery(dataQuery)
+          .build()
+          .run();
       }
-      result.release();
-    } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), location));
     }
+
+    result.release();
   }
 
   @Test
   public void createTableWithCustomUmask() throws Exception {
-    test("use %s", TEMP_SCHEMA);
+    test("use dfs.tmp");
     String tableName = "with_custom_permission";
     StorageStrategy storageStrategy = new StorageStrategy("000", false);
     FileSystem fs = getLocalFileSystem();
     try {
       test("alter session set `%s` = '%s'", ExecConstants.PERSISTENT_TABLE_UMASK, storageStrategy.getUmask());
       test("create table %s as select 'A' from (values(1))", tableName);
-      Path tableLocation = new Path(getDfsTestTmpSchemaLocation(), tableName);
+      Path tableLocation = new Path(dirTestWatcher.getDfsTestTmpDir().getAbsolutePath(), tableName);
       assertEquals("Directory permission should match",
           storageStrategy.getFolderPermission(), fs.getFileStatus(tableLocation).getPermission());
       assertEquals("File permission should match",
@@ -314,7 +255,7 @@ public class TestCTAS extends BaseTestQuery {
   }
 
   private static void ctasErrorTestHelper(final String ctasSql, final String expErrorMsg) throws Exception {
-    final String createTableSql = String.format(ctasSql, TEMP_SCHEMA, "testTableName");
+    final String createTableSql = String.format(ctasSql, "testTableName");
     errorMsgTestHelper(createTableSql, expErrorMsg);
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
index 904c0fe..bc06af3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
@@ -20,17 +20,15 @@ package org.apache.drill.exec.sql;
 import com.google.common.collect.Lists;
 import mockit.Mock;
 import mockit.MockUp;
-import mockit.integration.junit4.JMockit;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.exec.store.StoragePluginRegistry;
+import org.apache.drill.exec.store.dfs.FileSystemConfig;
+import org.apache.drill.exec.store.dfs.WorkspaceConfig;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.StorageStrategy;
-import org.apache.drill.exec.store.dfs.FileSystemConfig;
-import org.apache.drill.exec.store.dfs.WorkspaceConfig;
-import org.apache.drill.exec.util.TestUtilities;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
@@ -39,25 +37,27 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
 
+import java.io.File;
 import java.io.IOException;
+import java.nio.file.Paths;
 import java.util.List;
 import java.util.Properties;
 import java.util.UUID;
 
+import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_PLUGIN_NAME;
+import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 
-@RunWith(JMockit.class)
 @Category(SqlTest.class)
 public class TestCTTAS extends BaseTestQuery {
 
   private static final UUID session_id = UUID.nameUUIDFromBytes("sessionId".getBytes());
   private static final String temp2_wk = "tmp2";
-  private static final String temp2_schema = String.format("%s.%s", TEST_SCHEMA, temp2_wk);
+  private static final String temp2_schema = String.format("%s.%s", DFS_PLUGIN_NAME, temp2_wk);
 
   private static FileSystem fs;
   private static FsPermission expectedFolderPermission;
@@ -67,14 +67,16 @@ public class TestCTTAS extends BaseTestQuery {
   public static void init() throws Exception {
     MockUp<UUID> uuidMockUp = mockRandomUUID(session_id);
     Properties testConfigurations = cloneDefaultTestConfigProperties();
-    testConfigurations.put(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE, TEMP_SCHEMA);
+    testConfigurations.put(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE, DFS_TMP_SCHEMA);
     updateTestCluster(1, DrillConfig.create(testConfigurations));
     uuidMockUp.tearDown();
 
+    File tmp2 = dirTestWatcher.makeSubDir(Paths.get("tmp2"));
+
     StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
-    FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getPlugin(TEST_SCHEMA).getConfig();
-    pluginConfig.workspaces.put(temp2_wk, new WorkspaceConfig(TestUtilities.createTempDir(), true, null));
-    pluginRegistry.createOrUpdate(TEST_SCHEMA, pluginConfig, true);
+    FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getPlugin(DFS_PLUGIN_NAME).getConfig();
+    pluginConfig.workspaces.put(temp2_wk, new WorkspaceConfig(tmp2.getAbsolutePath(), true, null));
+    pluginRegistry.createOrUpdate(DFS_PLUGIN_NAME, pluginConfig, true);
 
     fs = getLocalFileSystem();
     expectedFolderPermission = new FsPermission(StorageStrategy.TEMPORARY.getFolderPermission());
@@ -93,7 +95,7 @@ public class TestCTTAS extends BaseTestQuery {
   @Test
   public void testSyntax() throws Exception {
     test("create TEMPORARY table temporary_keyword as select 1 from (values(1))");
-    test("create TEMPORARY table %s.temporary_keyword_with_wk as select 1 from (values(1))", TEMP_SCHEMA);
+    test("create TEMPORARY table %s.temporary_keyword_with_wk as select 1 from (values(1))", DFS_TMP_SCHEMA);
   }
 
   @Test
@@ -118,7 +120,7 @@ public class TestCTTAS extends BaseTestQuery {
         testBuilder()
             .sqlQuery("select * from %s", temporaryTableName)
             .unOrdered()
-            .sqlBaselineQuery("select * from %s.%s", TEMP_SCHEMA, temporaryTableName)
+            .sqlBaselineQuery("select * from %s.%s", DFS_TMP_SCHEMA, temporaryTableName)
             .go();
       }
     } finally {
@@ -148,7 +150,7 @@ public class TestCTTAS extends BaseTestQuery {
   @Test
   public void testResolveTemporaryTableWithPartialSchema() throws Exception {
     String temporaryTableName = "temporary_table_with_partial_schema";
-    test("use %s", TEST_SCHEMA);
+    test("use %s", DFS_PLUGIN_NAME);
     test("create temporary table tmp.%s as select 'A' as c1 from (values(1))", temporaryTableName);
 
     testBuilder()
@@ -176,8 +178,7 @@ public class TestCTTAS extends BaseTestQuery {
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: Temporary tables are not allowed to be created / dropped " +
-              "outside of default temporary workspace [%s].",
-          TEMP_SCHEMA)));
+              "outside of default temporary workspace [%s].", DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -191,7 +192,7 @@ public class TestCTTAS extends BaseTestQuery {
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
          "VALIDATION ERROR: A table or view with given name [%s]" +
-             " already exists in schema [%s]", temporaryTableName, TEMP_SCHEMA)));
+             " already exists in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -205,7 +206,7 @@ public class TestCTTAS extends BaseTestQuery {
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", temporaryTableName.toUpperCase(), TEMP_SCHEMA)));
+              " already exists in schema [%s]", temporaryTableName.toUpperCase(), DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -214,12 +215,12 @@ public class TestCTTAS extends BaseTestQuery {
   public void testCreateWhenTemporaryTableExistsWithSchema() throws Exception {
     String temporaryTableName = "temporary_table_exists_with_schema";
     try {
-      test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, temporaryTableName);
-      test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, temporaryTableName);
+      test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
+      test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", temporaryTableName, TEMP_SCHEMA)));
+              " already exists in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -228,12 +229,12 @@ public class TestCTTAS extends BaseTestQuery {
   public void testCreateWhenPersistentTableExists() throws Exception {
     String persistentTableName = "persistent_table_exists";
     try {
-      test("create table %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, persistentTableName);
+      test("create table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, persistentTableName);
       test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", persistentTableName);
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", persistentTableName, TEMP_SCHEMA)));
+              " already exists in schema [%s]", persistentTableName, DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -242,12 +243,12 @@ public class TestCTTAS extends BaseTestQuery {
   public void testCreateWhenViewExists() throws Exception {
     String viewName = "view_exists";
     try {
-      test("create view %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, viewName);
+      test("create view %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, viewName);
       test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", viewName);
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", viewName, TEMP_SCHEMA)));
+              " already exists in schema [%s]", viewName, DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -257,11 +258,11 @@ public class TestCTTAS extends BaseTestQuery {
     String temporaryTableName = "temporary_table_exists_before_persistent";
     try {
       test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-      test("create table %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, temporaryTableName);
+      test("create table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", temporaryTableName, TEMP_SCHEMA)));
+              " already exists in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -271,11 +272,11 @@ public class TestCTTAS extends BaseTestQuery {
     String temporaryTableName = "temporary_table_exists_before_view";
     try {
       test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-      test("create view %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, temporaryTableName);
+      test("create view %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: A non-view table with given name [%s] already exists in schema [%s]",
-          temporaryTableName, TEMP_SCHEMA)));
+          temporaryTableName, DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -348,7 +349,7 @@ public class TestCTTAS extends BaseTestQuery {
     test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
 
     try {
-      test("create view %s.view_with_temp_table as select * from %s", TEMP_SCHEMA, temporaryTableName);
+      test("create view %s.view_with_temp_table as select * from %s", DFS_TMP_SCHEMA, temporaryTableName);
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
           "VALIDATION ERROR: Temporary tables usage is disallowed. Used temporary table name: [%s]", temporaryTableName)));
@@ -360,7 +361,7 @@ public class TestCTTAS extends BaseTestQuery {
   public void testTemporaryTablesInViewExpansionLogic() throws Exception {
     String tableName = "table_for_expansion_logic_test";
     String viewName = "view_for_expansion_logic_test";
-    test("use %s", TEMP_SCHEMA);
+    test("use %s", DFS_TMP_SCHEMA);
     test("create table %s as select 'TABLE' as c1 from (values(1))", tableName);
     test("create view %s as select * from %s", viewName, tableName);
 
@@ -398,10 +399,10 @@ public class TestCTTAS extends BaseTestQuery {
   @Test
   public void testManualDropWithSchema() throws Exception {
     String temporaryTableName = "temporary_table_to_drop_with_schema";
-    test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", TEMP_SCHEMA, temporaryTableName);
+    test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
 
     testBuilder()
-        .sqlQuery("drop table %s.%s", TEMP_SCHEMA, temporaryTableName)
+        .sqlQuery("drop table %s.%s", DFS_TMP_SCHEMA, temporaryTableName)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Temporary table [%s] dropped", temporaryTableName))
@@ -414,11 +415,11 @@ public class TestCTTAS extends BaseTestQuery {
     test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
 
     testBuilder()
-        .sqlQuery("drop view if exists %s.%s", TEMP_SCHEMA, temporaryTableName)
+        .sqlQuery("drop view if exists %s.%s", DFS_TMP_SCHEMA, temporaryTableName)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, String.format("View [%s] not found in schema [%s].",
-            temporaryTableName, TEMP_SCHEMA))
+            temporaryTableName, DFS_TMP_SCHEMA))
         .go();
   }
 
@@ -428,10 +429,10 @@ public class TestCTTAS extends BaseTestQuery {
     test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
 
     try {
-      test("drop view %s.%s", TEMP_SCHEMA, temporaryTableName);
+      test("drop view %s.%s", DFS_TMP_SCHEMA, temporaryTableName);
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString(String.format(
-              "VALIDATION ERROR: Unknown view [%s] in schema [%s]", temporaryTableName, TEMP_SCHEMA)));
+              "VALIDATION ERROR: Unknown view [%s] in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
       throw e;
     }
   }
@@ -449,7 +450,7 @@ public class TestCTTAS extends BaseTestQuery {
   }
 
   private List<Path> findTemporaryTableLocation(String tableName) throws IOException {
-    Path sessionTempLocation = new Path(getDfsTestTmpSchemaLocation(), session_id.toString());
+    Path sessionTempLocation = new Path(dirTestWatcher.getDfsTestTmpDir().getAbsolutePath(), session_id.toString());
     assertTrue("Session temporary location must exist", fs.exists(sessionTempLocation));
     assertEquals("Session temporary location permission should match",
         expectedFolderPermission, fs.getFileStatus(sessionTempLocation).getPermission());
@@ -465,5 +466,4 @@ public class TestCTTAS extends BaseTestQuery {
     }
     return matchingPath;
   }
-
 }


Mime
View raw message