drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prog...@apache.org
Subject [15/22] drill git commit: DRILL-5783, DRILL-5841, DRILL-5894: Rationalize test temp directories
Date Wed, 15 Nov 2017 01:47:01 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
index 6ae8ae1..ebad2f7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.fn.impl;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.categories.PlannerTest;
@@ -28,9 +28,9 @@ import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.proto.UserBitShared;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -38,13 +38,17 @@ import org.junit.experimental.categories.Category;
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
+import java.nio.file.Paths;
 import java.util.List;
 import java.util.Map;
 
 @Category({SqlFunctionTest.class, OperatorTest.class, PlannerTest.class})
 public class TestAggregateFunctions extends BaseTestQuery {
 
-  private static final String TEST_RES_PATH =   TestTools.getWorkingPath() + "/src/test/resources";
+  @BeforeClass
+  public static void setupFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("agg"));
+  }
 
   /*
    * Test checks the count of a nullable column within a map
@@ -54,7 +58,7 @@ public class TestAggregateFunctions extends BaseTestQuery {
   @Test
   public void testCountOnNullableColumn() throws Exception {
     testBuilder()
-        .sqlQuery("select count(t.x.y)  as cnt1, count(`integer`) as cnt2 from cp.`/jsoninput/input2.json` t")
+        .sqlQuery("select count(t.x.y)  as cnt1, count(`integer`) as cnt2 from cp.`jsoninput/input2.json` t")
         .ordered()
         .baselineColumns("cnt1", "cnt2")
         .baselineValues(3l, 4l)
@@ -352,23 +356,23 @@ public class TestAggregateFunctions extends BaseTestQuery {
    */
   @Test
   public void drill3069() throws Exception {
-    final String query = "select max(foo) col1 from dfs_test.`%s/agg/bugs/drill3069` where foo = %d";
+    final String query = "select max(foo) col1 from dfs.`agg/bugs/drill3069` where foo = %d";
     testBuilder()
-        .sqlQuery(String.format(query, TEST_RES_PATH, 2))
+        .sqlQuery(query, 2)
         .unOrdered()
         .baselineColumns("col1")
         .baselineValues(2l)
         .go();
 
     testBuilder()
-        .sqlQuery(String.format(query, TEST_RES_PATH, 4))
+        .sqlQuery(query, 4)
         .unOrdered()
         .baselineColumns("col1")
         .baselineValues(4l)
         .go();
 
     testBuilder()
-        .sqlQuery(String.format(query, TEST_RES_PATH, 6))
+        .sqlQuery(query, 6)
         .unOrdered()
         .baselineColumns("col1")
         .baselineValues(6l)
@@ -466,25 +470,22 @@ public class TestAggregateFunctions extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   // GROUP BY System functions in csv, parquet, json table.
   public void testGroupBySystemFuncFileSystemTable() throws Exception {
-    final String query = String.format("select count(*) as cnt from dfs_test.`%s/nation/nation.tbl` group by CURRENT_DATE", TEST_RES_PATH);
     testBuilder()
-        .sqlQuery(query)
+        .sqlQuery("select count(*) as cnt from cp.`nation/nation.tbl` group by CURRENT_DATE")
         .unOrdered()
         .baselineColumns("cnt")
         .baselineValues(25l)
         .build().run();
 
-    final String query2 = "select count(*) as cnt from cp.`tpch/nation.parquet` group by CURRENT_DATE";
     testBuilder()
-        .sqlQuery(query2)
+        .sqlQuery("select count(*) as cnt from cp.`tpch/nation.parquet` group by CURRENT_DATE")
         .unOrdered()
         .baselineColumns("cnt")
         .baselineValues(25l)
         .build().run();
 
-    final String query3 = "select count(*) as cnt from cp.`employee.json` group by CURRENT_DATE";
     testBuilder()
-        .sqlQuery(query3)
+        .sqlQuery("select count(*) as cnt from cp.`employee.json` group by CURRENT_DATE")
         .unOrdered()
         .baselineColumns("cnt")
         .baselineValues(1155l)
@@ -493,7 +494,7 @@ public class TestAggregateFunctions extends BaseTestQuery {
 
   @Test
   public void test4443() throws Exception {
-    test("SELECT MIN(columns[1]) FROM dfs_test.`%s/agg/4443.csv` GROUP BY columns[0]", TEST_RES_PATH);
+    test("SELECT MIN(columns[1]) FROM cp.`agg/4443.csv` GROUP BY columns[0]");
   }
 
   @Test
@@ -590,30 +591,21 @@ public class TestAggregateFunctions extends BaseTestQuery {
   @Test // DRILL-4264
   @Category(UnlikelyTest.class)
   public void testCountOnFieldWithDots() throws Exception {
-    File directory = new File(BaseTestQuery.getTempDir("json/input"));
-    try {
-      directory.mkdirs();
-      String fileName = "table.json";
-      try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(directory, fileName)))) {
-        writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
-      }
-
-      String query = String.format("select count(t.m.`a.b`) as a,\n" +
-                                          "count(t.m.a.b) as b,\n" +
-                                          "count(t.m['a.b']) as c,\n" +
-                                          "count(t.rk.q) as d,\n" +
-                                          "count(t.`rk.q`) as e\n" +
-                                    "from dfs_test.`%s/%s` t",
-                                  directory.toPath().toString(), fileName);
-      testBuilder()
-        .sqlQuery(query)
-        .unOrdered()
-        .baselineColumns("a", "b", "c", "d", "e")
-        .baselineValues(1L, 1L, 1L, 0L, 1L)
-        .go();
-
-    } finally {
-      org.apache.commons.io.FileUtils.deleteQuietly(directory);
+    String fileName = "table.json";
+    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
+      writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
     }
+
+    testBuilder()
+      .sqlQuery("select count(t.m.`a.b`) as a,\n" +
+        "count(t.m.a.b) as b,\n" +
+        "count(t.m['a.b']) as c,\n" +
+        "count(t.rk.q) as d,\n" +
+        "count(t.`rk.q`) as e\n" +
+        "from dfs.`%s` t", fileName)
+      .unOrdered()
+      .baselineColumns("a", "b", "c", "d", "e")
+      .baselineValues(1L, 1L, 1L, 0L, 1L)
+      .go();
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastEmptyStrings.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastEmptyStrings.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastEmptyStrings.java
index 29cff1f..576dd48 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastEmptyStrings.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastEmptyStrings.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,10 +18,9 @@
 
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
-import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -30,76 +29,68 @@ import org.junit.experimental.categories.Category;
 
 @Category({UnlikelyTest.class, SqlFunctionTest.class})
 public class TestCastEmptyStrings extends BaseTestQuery {
-    // enable decimal data type
-    @BeforeClass
-    public static void enableDecimalDataType() throws Exception {
-        test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
-    }
-
-    @AfterClass
-    public static void disableDecimalDataType() throws Exception {
-        test(String.format("alter session set `%s` = false", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
-    }
-
-    @Test // see DRILL-1874
-    public void testCastInputTypeNullableVarCharToNumeric() throws Exception {
-        String root = FileUtils.getResourceAsFile("/emptyStrings.csv").toURI().toString();
-
-        // Enable the new cast functions (cast empty string "" to null)
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
-
-        // Test Optional VarChar
-        test(String.format("select cast(columns[0] as int) from dfs_test.`%s`;", root));
-        test(String.format("select cast(columns[0] as bigint) from dfs_test.`%s`;", root));
-        test(String.format("select cast(columns[0] as float) from dfs_test.`%s`;", root));
-        test(String.format("select cast(columns[0] as double) from dfs_test.`%s`;", root));
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
-    }
-
-    @Test // see DRILL-1874
-    public void testCastInputTypeNonNullableVarCharToNumeric() throws Exception {
-        String root = FileUtils.getResourceAsFile("/emptyStrings.csv").toURI().toString();
-
-        // Enable the new cast functions (cast empty string "" to null)
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
-        // Test Required VarChar
-        test(String.format("select cast('' as int) from dfs_test.`%s`;", root));
-        test(String.format("select cast('' as bigint) from dfs_test.`%s`;", root));
-        test(String.format("select cast('' as float) from dfs_test.`%s`;", root));
-        test(String.format("select cast('' as double) from dfs_test.`%s`;", root));
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
-    }
-
-    @Test // see DRILL-1874
-    public void testCastInputTypeNullableVarCharToDecimal() throws Exception {
-        String root = FileUtils.getResourceAsFile("/emptyStrings.csv").toURI().toString();
-
-        // Enable the new cast functions (cast empty string "" to null)
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
-
-        // Test Optional VarChar
-        test(String.format("select cast(columns[0] as decimal) from dfs_test.`%s` where cast(columns[0] as decimal) is null;", root));
-        test(String.format("select cast(columns[0] as decimal(9)) from dfs_test.`%s`;", root));
-        test(String.format("select cast(columns[0] as decimal(18)) from dfs_test.`%s`;", root));
-        test(String.format("select cast(columns[0] as decimal(28)) from dfs_test.`%s`;", root));
-        test(String.format("select cast(columns[0] as decimal(38)) from dfs_test.`%s`;", root));
-
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
-    }
-
-    @Test // see DRILL-1874
-    public void testCastInputTypeNonNullableVarCharToDecimal() throws Exception {
-        String root = FileUtils.getResourceAsFile("/emptyStrings.csv").toURI().toString();
-
-        // Enable the new cast functions (cast empty string "" to null)
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
-
-        // Test Required VarChar
-        test(String.format("select cast('' as decimal) from dfs_test.`%s` where cast('' as decimal) is null;", root));
-        test(String.format("select cast('' as decimal(18)) from dfs_test.`%s`;", root));
-        test(String.format("select cast('' as decimal(28)) from dfs_test.`%s`;", root));
-        test(String.format("select cast('' as decimal(38)) from dfs_test.`%s`;", root));
-
-        test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
-    }
+  // enable decimal data type
+  @BeforeClass
+  public static void enableDecimalDataType() throws Exception {
+    test("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
+  }
+
+  @AfterClass
+  public static void disableDecimalDataType() throws Exception {
+    test("alter session set `%s` = false", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
+  }
+
+  @Test // see DRILL-1874
+  public void testCastInputTypeNullableVarCharToNumeric() throws Exception {
+    // Enable the new cast functions (cast empty string "" to null)
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
+
+    // Test Optional VarChar
+    test("select cast(columns[0] as int) from cp.`emptyStrings.csv`");
+    test("select cast(columns[0] as bigint) from cp.`emptyStrings.csv`");
+    test("select cast(columns[0] as float) from cp.`emptyStrings.csv`");
+    test("select cast(columns[0] as double) from cp.`emptyStrings.csv`");
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
+  }
+
+  @Test // see DRILL-1874
+  public void testCastInputTypeNonNullableVarCharToNumeric() throws Exception {
+    // Enable the new cast functions (cast empty string "" to null)
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
+    // Test Required VarChar
+    test("select cast('' as int) from cp.`emptyStrings.csv`");
+    test("select cast('' as bigint) from cp.`emptyStrings.csv`");
+    test("select cast('' as float) from cp.`emptyStrings.csv`");
+    test("select cast('' as double) from cp.`emptyStrings.csv`");
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
+  }
+
+  @Test // see DRILL-1874
+  public void testCastInputTypeNullableVarCharToDecimal() throws Exception {
+    // Enable the new cast functions (cast empty string "" to null)
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
+
+    // Test Optional VarChar
+    test("select cast(columns[0] as decimal) from cp.`emptyStrings.csv` where cast(columns[0] as decimal) is null");
+    test("select cast(columns[0] as decimal(9)) from cp.`emptyStrings.csv`");
+    test("select cast(columns[0] as decimal(18)) from cp.`emptyStrings.csv`");
+    test("select cast(columns[0] as decimal(28)) from cp.`emptyStrings.csv`");
+    test("select cast(columns[0] as decimal(38)) from cp.`emptyStrings.csv`");
+
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
+  }
+
+  @Test // see DRILL-1874
+  public void testCastInputTypeNonNullableVarCharToDecimal() throws Exception {
+    // Enable the new cast functions (cast empty string "" to null)
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = true;");
+
+    // Test Required VarChar
+    test("select cast('' as decimal) from cp.`emptyStrings.csv` where cast('' as decimal) is null");
+    test("select cast('' as decimal(18)) from cp.`emptyStrings.csv`");
+    test("select cast('' as decimal(28)) from cp.`emptyStrings.csv`");
+    test("select cast('' as decimal(38)) from cp.`emptyStrings.csv`");
+
+    test("alter system set `drill.exec.functions.cast_empty_string_to_null` = false;");
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
index 4aeb396..14a49b5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
@@ -20,10 +20,9 @@ package org.apache.drill.exec.fn.impl;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import mockit.integration.junit4.JMockit;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
-import org.apache.drill.common.util.FileUtils;
 import org.joda.time.DateTime;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -48,13 +47,8 @@ public class TestCastFunctions extends BaseTestQuery {
 
   @Test // DRILL-2827
   public void testImplicitCastStringToBoolean() throws Exception {
-    String boolTable= FileUtils.getResourceAsFile("/store/json/booleanData.json").toURI().toString();
-
-    String query = String.format(
-        "(select * from dfs_test.`%s` where key = 'true' or key = 'false')", boolTable);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("(select * from cp.`store/json/booleanData.json` where key = 'true' or key = 'false')")
       .unOrdered()
       .baselineColumns("key")
       .baselineValues(true)
@@ -114,18 +108,18 @@ public class TestCastFunctions extends BaseTestQuery {
 
     for (float value : values.keySet()) {
       try {
-        test("create table dfs_test.tmp.table_with_float as\n" +
+        test("create table dfs.tmp.table_with_float as\n" +
               "(select cast(%1$s as float) c1 from (values(1)))", value);
 
         testBuilder()
-          .sqlQuery("select cast(c1 as int) col1 from dfs_test.tmp.table_with_float")
+          .sqlQuery("select cast(c1 as int) col1 from dfs.tmp.table_with_float")
           .unOrdered()
           .baselineColumns("col1")
           .baselineValues(values.get(value))
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_float");
+        test("drop table if exists dfs.tmp.table_with_float");
       }
     }
   }
@@ -144,20 +138,20 @@ public class TestCastFunctions extends BaseTestQuery {
 
     for (int value : values) {
       try {
-        test("create table dfs_test.tmp.table_with_int as\n" +
+        test("create table dfs.tmp.table_with_int as\n" +
               "(select cast(%1$s as int) c1 from (values(1)))", value);
 
         testBuilder()
           .sqlQuery("select cast(c1 as float) col1,\n" +
                             "cast(c1 as double) col2\n" +
-                    "from dfs_test.tmp.table_with_int")
+                    "from dfs.tmp.table_with_int")
           .unOrdered()
           .baselineColumns("col1", "col2")
           .baselineValues((float) value, (double) value)
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_int");
+        test("drop table if exists dfs.tmp.table_with_int");
       }
     }
   }
@@ -183,18 +177,18 @@ public class TestCastFunctions extends BaseTestQuery {
 
     for (float value : values.keySet()) {
       try {
-        test("create table dfs_test.tmp.table_with_float as\n" +
+        test("create table dfs.tmp.table_with_float as\n" +
               "(select cast(%1$s as float) c1 from (values(1)))", value);
 
         testBuilder()
-          .sqlQuery("select cast(c1 as bigInt) col1 from dfs_test.tmp.table_with_float")
+          .sqlQuery("select cast(c1 as bigInt) col1 from dfs.tmp.table_with_float")
           .unOrdered()
           .baselineColumns("col1")
           .baselineValues(values.get(value))
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_float");
+        test("drop table if exists dfs.tmp.table_with_float");
       }
     }
   }
@@ -215,20 +209,20 @@ public class TestCastFunctions extends BaseTestQuery {
 
     for (long value : values) {
       try {
-        test("create table dfs_test.tmp.table_with_bigint as\n" +
+        test("create table dfs.tmp.table_with_bigint as\n" +
               "(select cast(%1$s as bigInt) c1 from (values(1)))", value);
 
         testBuilder()
           .sqlQuery("select cast(c1 as float) col1,\n" +
                             "cast(c1 as double) col2\n" +
-                    "from dfs_test.tmp.table_with_bigint")
+                    "from dfs.tmp.table_with_bigint")
           .unOrdered()
           .baselineColumns("col1", "col2")
           .baselineValues((float) value, (double) value)
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_bigint");
+        test("drop table if exists dfs.tmp.table_with_bigint");
       }
     }
   }
@@ -254,18 +248,18 @@ public class TestCastFunctions extends BaseTestQuery {
 
     for (double value : values.keySet()) {
       try {
-        test("create table dfs_test.tmp.table_with_double as\n" +
+        test("create table dfs.tmp.table_with_double as\n" +
               "(select cast(%1$s as double) c1 from (values(1)))", value);
 
         testBuilder()
-          .sqlQuery("select cast(c1 as int) col1 from dfs_test.tmp.table_with_double")
+          .sqlQuery("select cast(c1 as int) col1 from dfs.tmp.table_with_double")
           .unOrdered()
           .baselineColumns("col1")
           .baselineValues(values.get(value))
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_double");
+        test("drop table if exists dfs.tmp.table_with_double");
       }
     }
   }
@@ -291,18 +285,18 @@ public class TestCastFunctions extends BaseTestQuery {
     values.put(Double.MIN_VALUE, 0L);
     for (double value : values.keySet()) {
       try {
-        test("create table dfs_test.tmp.table_with_double as\n" +
+        test("create table dfs.tmp.table_with_double as\n" +
               "(select cast(%1$s as double) c1 from (values(1)))", value);
 
         testBuilder()
-          .sqlQuery("select cast(c1 as bigInt) col1 from dfs_test.tmp.table_with_double")
+          .sqlQuery("select cast(c1 as bigInt) col1 from dfs.tmp.table_with_double")
           .unOrdered()
           .baselineColumns("col1")
           .baselineValues(values.get(value))
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_double");
+        test("drop table if exists dfs.tmp.table_with_double");
       }
     }
   }
@@ -320,20 +314,20 @@ public class TestCastFunctions extends BaseTestQuery {
 
     for (int value : values) {
       try {
-        test("create table dfs_test.tmp.table_with_int as\n" +
+        test("create table dfs.tmp.table_with_int as\n" +
               "(select cast(%1$s as int) c1, cast(%1$s as bigInt) c2 from (values(1)))", value);
 
         testBuilder()
           .sqlQuery("select cast(c1 as bigint) col1,\n" +
                             "cast(c1 as int) col2\n" +
-                    "from dfs_test.tmp.table_with_int")
+                    "from dfs.tmp.table_with_int")
           .unOrdered()
           .baselineColumns("col1", "col2")
           .baselineValues((long) value, value)
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_int");
+        test("drop table if exists dfs.tmp.table_with_int");
       }
     }
   }
@@ -356,7 +350,7 @@ public class TestCastFunctions extends BaseTestQuery {
 
     for (double value : values) {
       try {
-        test("create table dfs_test.tmp.table_with_float as\n" +
+        test("create table dfs.tmp.table_with_float as\n" +
               "(select cast(%1$s as float) c1,\n" +
                       "cast(%1$s as double) c2\n" +
               "from (values(1)))", value);
@@ -364,14 +358,14 @@ public class TestCastFunctions extends BaseTestQuery {
         testBuilder()
           .sqlQuery("select cast(c1 as double) col1,\n" +
                             "cast(c2 as float) col2\n" +
-                    "from dfs_test.tmp.table_with_float")
+                    "from dfs.tmp.table_with_float")
           .unOrdered()
           .baselineColumns("col1", "col2")
           .baselineValues((double) ((float) (value)), (float) value)
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_float");
+        test("drop table if exists dfs.tmp.table_with_float");
       }
     }
   }
@@ -406,7 +400,7 @@ public class TestCastFunctions extends BaseTestQuery {
           .build()
           .run();
       } finally {
-        test("drop table if exists dfs_test.tmp.table_with_int");
+        test("drop table if exists dfs.tmp.table_with_int");
         test("alter session reset planner.enable_decimal_data_type");
       }
   }
@@ -429,7 +423,7 @@ public class TestCastFunctions extends BaseTestQuery {
         .build()
         .run();
     } finally {
-      test("drop table if exists dfs_test.tmp.table_with_int");
+      test("drop table if exists dfs.tmp.table_with_int");
       test("alter session reset planner.enable_decimal_data_type");
     }
   }
@@ -452,7 +446,7 @@ public class TestCastFunctions extends BaseTestQuery {
         .build()
         .run();
     } finally {
-      test("drop table if exists dfs_test.tmp.table_with_int");
+      test("drop table if exists dfs.tmp.table_with_int");
       test("alter session reset planner.enable_decimal_data_type");
     }
   }
@@ -460,7 +454,7 @@ public class TestCastFunctions extends BaseTestQuery {
   @Test // DRILL-4970
   public void testCastNegativeFloatToInt() throws Exception {
     try {
-      test("create table dfs_test.tmp.table_with_float as\n" +
+      test("create table dfs.tmp.table_with_float as\n" +
               "(select cast(-255.0 as double) as double_col,\n" +
                       "cast(-255.0 as float) as float_col\n" +
               "from (values(1)))");
@@ -473,7 +467,7 @@ public class TestCastFunctions extends BaseTestQuery {
       castTypes.add("int");
       castTypes.add("bigInt");
 
-      final String query = "select count(*) as c from dfs_test.tmp.table_with_float\n" +
+      final String query = "select count(*) as c from dfs.tmp.table_with_float\n" +
                             "where (cast(%1$s as %2$s) >= -255 and (%1$s <= -5)) or (%1$s <= -256)";
 
       for (String columnName : columnNames) {
@@ -488,7 +482,7 @@ public class TestCastFunctions extends BaseTestQuery {
         }
       }
     } finally {
-      test("drop table if exists dfs_test.tmp.table_with_float");
+      test("drop table if exists dfs.tmp.table_with_float");
     }
   }
 
@@ -497,7 +491,7 @@ public class TestCastFunctions extends BaseTestQuery {
     try {
       test("alter session set planner.enable_decimal_data_type = true");
 
-      test("create table dfs_test.tmp.table_with_decimal as" +
+      test("create table dfs.tmp.table_with_decimal as" +
               "(select cast(cast(manager_id as double) * (-1) as decimal(9, 0)) as decimal9_col,\n" +
                       "cast(cast(manager_id as double) * (-1) as decimal(18, 0)) as decimal18_col\n" +
               "from cp.`parquet/fixedlenDecimal.parquet` limit 1)");
@@ -506,7 +500,7 @@ public class TestCastFunctions extends BaseTestQuery {
       columnNames.add("decimal9_col");
       columnNames.add("decimal18_col");
 
-      final String query = "select count(*) as c from dfs_test.tmp.table_with_decimal\n" +
+      final String query = "select count(*) as c from dfs.tmp.table_with_decimal\n" +
                             "where (cast(%1$s as varchar) = '-124' and (%1$s <= -5)) or (%1$s <= -256)";
 
       for (String colName : columnNames) {
@@ -519,7 +513,7 @@ public class TestCastFunctions extends BaseTestQuery {
           .run();
       }
     } finally {
-      test("drop table if exists dfs_test.tmp.table_with_decimal");
+      test("drop table if exists dfs.tmp.table_with_decimal");
       test("alter session reset planner.enable_decimal_data_type");
     }
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestContextFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestContextFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestContextFunctions.java
index 6f4bd29..2e39966 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestContextFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestContextFunctions.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -75,22 +75,22 @@ public class TestContextFunctions extends BaseTestQuery {
   @Test
   public void currentSchemaUDFWithSingleLevelDefaultSchema() throws Exception {
     testBuilder()
-        .optionSettingQueriesForTestQuery("USE dfs_test")
+        .optionSettingQueriesForTestQuery("USE dfs")
         .sqlQuery("select current_schema from cp.`employee.json` limit 1")
         .unOrdered()
         .baselineColumns("current_schema")
-        .baselineValues("dfs_test")
+        .baselineValues("dfs")
         .go();
   }
 
   @Test
   public void currentSchemaUDFWithMultiLevelDefaultSchema() throws Exception {
     testBuilder()
-        .optionSettingQueriesForTestQuery("USE dfs_test.tmp")
+        .optionSettingQueriesForTestQuery("USE dfs.tmp")
         .sqlQuery("select current_schema from cp.`employee.json` limit 1")
         .unOrdered()
         .baselineColumns("current_schema")
-        .baselineValues("dfs_test.tmp")
+        .baselineValues("dfs.tmp")
         .go();
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java
index 5cfdf84..77b4fb7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java
@@ -17,7 +17,7 @@
 
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
index 8d3c31a..0c8f6d2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
@@ -16,7 +16,7 @@
 */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.joda.time.DateTime;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
index 622d8f4..b1adcc9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
@@ -22,7 +22,7 @@ import com.google.common.io.Files;
 import mockit.integration.junit4.JMockit;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.pop.PopUnitTestBase;
 import org.apache.drill.exec.record.RecordBatchLoader;
@@ -45,19 +45,16 @@ import static org.junit.Assert.assertTrue;
 @RunWith(JMockit.class)
 @Category({UnlikelyTest.class, SqlFunctionTest.class})
 public class TestDateFunctions extends PopUnitTestBase {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestDateFunctions.class);
-
 
   public void testCommon(String[] expectedResults, String physicalPlan, String resourceFile) throws Exception {
     try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
          Drillbit bit = new Drillbit(CONFIG, serviceSet);
          DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
-
       // run query.
       bit.run();
       client.connect();
       List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-        Files.toString(FileUtils.getResourceAsFile(physicalPlan), Charsets.UTF_8)
+        Files.toString(DrillFileUtils.getResourceAsFile(physicalPlan), Charsets.UTF_8)
           .replace("#{TEST_FILE}", resourceFile));
 
       RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
index c62dc65..9af4e52 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.joda.time.DateTime;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
index 172f35c..5a85319 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
@@ -25,7 +25,7 @@ import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -59,7 +59,7 @@ public class TestMathFunctions extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/functions/simple_math_functions.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/functions/simple_math_functions.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java
index 02aeb0a..237b01a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMultiInputAdd.java
@@ -26,7 +26,7 @@ import mockit.Injectable;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.pop.PopUnitTestBase;
 import org.apache.drill.exec.record.RecordBatchLoader;
@@ -62,7 +62,7 @@ public class TestMultiInputAdd extends PopUnitTestBase {
             bit.run();
             client.connect();
             List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                    Files.toString(FileUtils.getResourceAsFile("/functions/multi_input_add_test.json"), Charsets.UTF_8));
+                    Files.toString(DrillFileUtils.getResourceAsFile("/functions/multi_input_add_test.json"), Charsets.UTF_8));
 
             RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java
index 5beb4d5..737dace 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
 
 public class TestNetworkFunctions extends BaseTestQuery {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewAggregateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewAggregateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewAggregateFunctions.java
index 00b0b36..cf5df1f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewAggregateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewAggregateFunctions.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.List;
 
 import org.apache.drill.categories.OperatorTest;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.pop.PopUnitTestBase;
 import org.apache.drill.exec.proto.UserBitShared.QueryType;
@@ -55,7 +55,7 @@ public class TestNewAggregateFunctions extends PopUnitTestBase {
       client.connect();
       List<QueryDataBatch> results = client.runQuery(
           QueryType.PHYSICAL,
-          Files.toString(FileUtils.getResourceAsFile(physicalPlan),
+          Files.toString(DrillFileUtils.getResourceAsFile(physicalPlan),
               Charsets.UTF_8).replace("#{TEST_FILE}",
               inputDataFile));
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
index e339117..238b048 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.joda.time.DateTime;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewSimpleRepeatedFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewSimpleRepeatedFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewSimpleRepeatedFunctions.java
index 9f37312..6b0a685 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewSimpleRepeatedFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewSimpleRepeatedFunctions.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
index c4aabe9..fedd30a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
@@ -24,7 +24,7 @@ import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -58,7 +58,7 @@ public class TestRepeatedFunction extends ExecTest{
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/physical_repeated_1.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/physical_repeated_1.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTrigFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTrigFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTrigFunctions.java
index 60ca587..f9bbd29 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTrigFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTrigFunctions.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
index 4da6db3..cc13039 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
@@ -17,11 +17,10 @@
 package org.apache.drill.exec.fn.impl.testing;
 
 import mockit.integration.junit4.JMockit;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
-import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -36,26 +35,19 @@ import static org.junit.Assert.assertThat;
 @RunWith(JMockit.class)
 @Category({UnlikelyTest.class, SqlFunctionTest.class})
 public class TestDateConversions extends BaseTestQuery {
-
-  private static String TEMP_DIR;
-
   @BeforeClass
   public static void generateTestFiles() throws IOException {
-    File path = new File(BaseTestQuery.getTempDir("json/input"));
-    path.mkdirs();
-    TEMP_DIR = path.toPath().toString();
-
-    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(path, "joda_postgres_date.json")))) {
+    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "joda_postgres_date.json")))) {
       writer.write("{\"date1\" : \"1970-01-02\",\n \"date2\" : \"01021970\",\n \"date3\" : \"32/1970\"\n}\n"
         + "{\"date1\" : \"2010-05-03\",\n \"date2\" : \"01021970\",\n \"date3\" : \"64/2010\"\n}");
     }
 
-    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(path, "joda_postgres_time.json")))) {
+    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "joda_postgres_time.json")))) {
       writer.write("{\"time1\" : \"23:11:59\",\n \"time2\" : \"11:11:59pm\",\n \"time3\" : \"591111pm\"\n}\n"
         + "{\"time1\" : \"17:33:41\",\n \"time2\" : \"5:33:41am\",\n \"time3\" : \"413305pm\"\n}");
     }
 
-    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(path, "joda_postgres_date_time.json")))) {
+    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "joda_postgres_date_time.json")))) {
       writer.write("{ \"time1\" : \"1970-01-0223:11:59\",\n \"time2\" : \"0102197011:11:59pm\",\n"
         + "  \"time3\" : \"32/1970591111pm\"\n}\n"
         + "{\"time1\" : \"2010-05-0317:33:41\",\n \"time2\" : \"0102197005:33:41am\",\n"
@@ -63,22 +55,13 @@ public class TestDateConversions extends BaseTestQuery {
     }
   }
 
-  @AfterClass
-  public static void deleteTestFiles() throws IOException {
-    java.nio.file.Files.delete(new File(TEMP_DIR, "joda_postgres_date.json").toPath());
-    java.nio.file.Files.delete(new File(TEMP_DIR, "joda_postgres_time.json").toPath());
-    java.nio.file.Files.delete(new File(TEMP_DIR, "joda_postgres_date_time.json").toPath());
-  }
-
   @Test
   public void testJodaDate() throws Exception {
-    String query = String.format("SELECT to_date(date1, 'yyyy-dd-MM') = "
-      + "to_date(date2, 'ddMMyyyy') as col1, " + "to_date(date1, 'yyyy-dd-MM') = "
-      + "to_date(date3, 'D/yyyy') as col2 "
-      + "from dfs_test.`%s/joda_postgres_date.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT to_date(date1, 'yyyy-dd-MM') = "
+        + "to_date(date2, 'ddMMyyyy') as col1, " + "to_date(date1, 'yyyy-dd-MM') = "
+        + "to_date(date3, 'D/yyyy') as col2 "
+        + "from dfs.`joda_postgres_date.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)
@@ -88,14 +71,12 @@ public class TestDateConversions extends BaseTestQuery {
 
   @Test
   public void testPostgresDate() throws Exception {
-    String query = String.format("SELECT sql_to_date(date1, 'yyyy-DD-MM') = "
-      + "sql_to_date(date2, 'DDMMyyyy') as col1, "
-      + "sql_to_date(date1, 'yyyy-DD-MM') = "
-      + "sql_to_date(date3, 'DDD/yyyy') as col2 "
-      + "from dfs_test.`%s/joda_postgres_date.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT sql_to_date(date1, 'yyyy-DD-MM') = "
+        + "sql_to_date(date2, 'DDMMyyyy') as col1, "
+        + "sql_to_date(date1, 'yyyy-DD-MM') = "
+        + "sql_to_date(date3, 'DDD/yyyy') as col2 "
+        + "from dfs.`joda_postgres_date.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)
@@ -107,14 +88,12 @@ public class TestDateConversions extends BaseTestQuery {
   public void testJodaTime() throws Exception {
     mockUsDateFormatSymbols();
 
-    String query = String.format("SELECT to_time(time1, 'H:m:ss') = "
-      + "to_time(time2, 'h:m:ssa') as col1, "
-      + "to_time(time1, 'H:m:ss') = "
-      + "to_time(time3, 'ssmha') as col2 "
-      + "from dfs_test.`%s/joda_postgres_time.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT to_time(time1, 'H:m:ss') = "
+        + "to_time(time2, 'h:m:ssa') as col1, "
+        + "to_time(time1, 'H:m:ss') = "
+        + "to_time(time3, 'ssmha') as col2 "
+        + "from dfs.`joda_postgres_time.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)
@@ -126,14 +105,12 @@ public class TestDateConversions extends BaseTestQuery {
   public void testPostgresTime() throws Exception {
     mockUsDateFormatSymbols();
 
-    String query = String.format("SELECT sql_to_time(time1, 'HH24:MI:SS') = "
-      + "sql_to_time(time2, 'HH12:MI:SSam') as col1, "
-      + "sql_to_time(time1, 'HH24:MI:SS') = "
-      + "sql_to_time(time3, 'SSMIHH12am') as col2 "
-      + "from dfs_test.`%s/joda_postgres_time.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT sql_to_time(time1, 'HH24:MI:SS') = "
+        + "sql_to_time(time2, 'HH12:MI:SSam') as col1, "
+        + "sql_to_time(time1, 'HH24:MI:SS') = "
+        + "sql_to_time(time3, 'SSMIHH12am') as col2 "
+        + "from dfs.`joda_postgres_time.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)
@@ -145,14 +122,12 @@ public class TestDateConversions extends BaseTestQuery {
   public void testPostgresDateTime() throws Exception {
     mockUsDateFormatSymbols();
 
-    String query = String.format("SELECT sql_to_timestamp(time1, 'yyyy-DD-MMHH24:MI:SS') = "
-      + "sql_to_timestamp(time2, 'DDMMyyyyHH12:MI:SSam') as col1, "
-      + "sql_to_timestamp(time1, 'yyyy-DD-MMHH24:MI:SS') = "
-      + "sql_to_timestamp(time3, 'DDD/yyyySSMIHH12am') as col2 "
-      + "from dfs_test.`%s/joda_postgres_date_time.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT sql_to_timestamp(time1, 'yyyy-DD-MMHH24:MI:SS') = "
+        + "sql_to_timestamp(time2, 'DDMMyyyyHH12:MI:SSam') as col1, "
+        + "sql_to_timestamp(time1, 'yyyy-DD-MMHH24:MI:SS') = "
+        + "sql_to_timestamp(time3, 'DDD/yyyySSMIHH12am') as col2 "
+        + "from dfs.`joda_postgres_date_time.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)
@@ -164,14 +139,12 @@ public class TestDateConversions extends BaseTestQuery {
   public void testJodaDateTime() throws Exception {
     mockUsDateFormatSymbols();
 
-    String query = String.format("SELECT to_timestamp(time1, 'yyyy-dd-MMH:m:ss') = "
-      + "to_timestamp(time2, 'ddMMyyyyh:m:ssa') as col1, "
-      + "to_timestamp(time1, 'yyyy-dd-MMH:m:ss') = "
-      + "to_timestamp(time3, 'DDD/yyyyssmha') as col2 "
-      + "from dfs_test.`%s/joda_postgres_date_time.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT to_timestamp(time1, 'yyyy-dd-MMH:m:ss') = "
+        + "to_timestamp(time2, 'ddMMyyyyh:m:ssa') as col1, "
+        + "to_timestamp(time1, 'yyyy-dd-MMH:m:ss') = "
+        + "to_timestamp(time3, 'DDD/yyyyssmha') as col2 "
+        + "from dfs.`joda_postgres_date_time.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)
@@ -183,14 +156,12 @@ public class TestDateConversions extends BaseTestQuery {
   public void testJodaDateTimeNested() throws Exception {
     mockUsDateFormatSymbols();
 
-    String query = String.format("SELECT date_add(to_date(time1, concat('yyyy-dd-MM','H:m:ss')), 22)= "
-      + "date_add(to_date(time2, concat('ddMMyyyy', 'h:m:ssa')), 22) as col1, "
-      + "date_add(to_date(time1, concat('yyyy-dd-MM', 'H:m:ss')), 22) = "
-      + "date_add(to_date(time3, concat('DDD/yyyy', 'ssmha')), 22) as col2 "
-      + "from dfs_test.`%s/joda_postgres_date_time.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT date_add(to_date(time1, concat('yyyy-dd-MM','H:m:ss')), 22)= "
+        + "date_add(to_date(time2, concat('ddMMyyyy', 'h:m:ssa')), 22) as col1, "
+        + "date_add(to_date(time1, concat('yyyy-dd-MM', 'H:m:ss')), 22) = "
+        + "date_add(to_date(time3, concat('DDD/yyyy', 'ssmha')), 22) as col2 "
+        + "from dfs.`joda_postgres_date_time.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)
@@ -202,14 +173,12 @@ public class TestDateConversions extends BaseTestQuery {
   public void testPostgresDateTimeNested() throws Exception {
     mockUsDateFormatSymbols();
 
-    String query = String.format("SELECT date_add(sql_to_date(time1, concat('yyyy-DD-MM', 'HH24:MI:SS')), 22) = "
-      + "date_add(sql_to_date(time2, concat('DDMMyyyy', 'HH12:MI:SSam')), 22) as col1, "
-      + "date_add(sql_to_date(time1, concat('yyyy-DD-MM', 'HH24:MI:SS')), 10) = "
-      + "date_add(sql_to_date(time3, concat('DDD/yyyySSMI', 'HH12am')), 10) as col2 "
-      + "from dfs_test.`%s/joda_postgres_date_time.json`", TEMP_DIR);
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("SELECT date_add(sql_to_date(time1, concat('yyyy-DD-MM', 'HH24:MI:SS')), 22) = "
+        + "date_add(sql_to_date(time2, concat('DDMMyyyy', 'HH12:MI:SSam')), 22) as col1, "
+        + "date_add(sql_to_date(time1, concat('yyyy-DD-MM', 'HH24:MI:SS')), 10) = "
+        + "date_add(sql_to_date(time3, concat('DDD/yyyySSMI', 'HH12am')), 10) as col2 "
+        + "from dfs.`joda_postgres_date_time.json`")
       .unOrdered()
       .baselineColumns("col1", "col2")
       .baselineValues(true, true)

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
index 206bf97..0cf62bd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
@@ -37,22 +37,14 @@ import java.util.List;
 @Category(SqlTest.class)
 public class TestConstantFolding extends PlanTestBase {
 
-  @Rule
-  public TemporaryFolder folder = new TemporaryFolder();
-
-  // This should run as a @BeforeClass, but these methods must be defined static.
-  // Unfortunately, the temporary folder with an @Rule annotation cannot be static, this issue
-  // has been fixed in a newer version of JUnit
-  // http://stackoverflow.com/questions/2722358/junit-rule-temporaryfolder
-
   public static class SmallFileCreator {
 
-    private final TemporaryFolder folder;
+    private final File folder;
     private static final List<String> values = Lists.newArrayList("1","2","3");
     private static final String jsonRecord =  "{\"col1\" : 1,\"col2\" : 2, \"col3\" : 3}";
     private String record;
 
-    public SmallFileCreator(TemporaryFolder folder) {
+    public SmallFileCreator(File folder) {
       this.folder = folder;
       this.record = null;
     }
@@ -76,7 +68,8 @@ public class TestConstantFolding extends PlanTestBase {
       }
       PrintWriter out;
       for (String fileAndFolderName : new String[]{"bigfile", "BIGFILE_2"}) {
-        File bigFolder = folder.newFolder(fileAndFolderName);
+        File bigFolder = new File(folder, fileAndFolderName);
+        bigFolder.mkdirs();
         File bigFile = new File (bigFolder, fileAndFolderName + "." + extension);
         out = new PrintWriter(bigFile);
         for (int i = 0; i < bigFileLines; i++ ) {
@@ -86,7 +79,8 @@ public class TestConstantFolding extends PlanTestBase {
       }
 
       for (String fileAndFolderName : new String[]{"smallfile", "SMALLFILE_2"}) {
-        File smallFolder = folder.newFolder(fileAndFolderName);
+        File smallFolder = new File(folder, fileAndFolderName);
+        smallFolder.mkdirs();
         File smallFile = new File (smallFolder, fileAndFolderName + "." + extension);
         out = new PrintWriter(smallFile);
         for (int i = 0; i < smallFileLines; i++ ) {
@@ -125,7 +119,7 @@ public class TestConstantFolding extends PlanTestBase {
       test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
 
       String query2 = "SELECT *  " +
-          "FROM   cp.`/parquet/alltypes.json`  " +
+          "FROM   cp.`parquet/alltypes.json`  " +
           "WHERE  12 = extract(day from (to_timestamp('2014-02-12 03:18:31:07 AM', 'YYYY-MM-dd HH:mm:ss:SS a'))) " +
           "AND    cast( `int_col` AS             int) = castint('1')  " +
           "AND    cast( `bigint_col` AS          bigint) = castbigint('100000000000')  " +
@@ -144,10 +138,7 @@ public class TestConstantFolding extends PlanTestBase {
           "AND    cast( `intervalday_col` AS interval day) = castintervalday('P1D')" +
           "AND    cast( `bit_col` AS       boolean) = castbit('false')  " +
           "AND    `varchar_col` = concat('qwe','rty')  " +
-
-          "AND    cast( `time_col` AS            time) = casttime('01:00:00')  " +
-
-          "";
+          "AND    cast( `time_col` AS            time) = casttime('01:00:00')";
 
 
       testBuilder()
@@ -164,8 +155,7 @@ public class TestConstantFolding extends PlanTestBase {
           .baselineValues(
               "1", "1", "1", "1", "01:00:00", "1.0", "100000000000", "1", "1", "1995-01-01", "1995-01-01 01:00:10.000",
               "123456789.000000000", "P1Y", "P1D", "P1Y1M1DT1H1M", "123456789.000000000",
-              "123456789.000000000", "qwerty", "qwerty","qwerty", "false"
-          )
+              "123456789.000000000", "qwerty", "qwerty","qwerty", "false")
           .go();
     } finally {
       test("alter session set `store.json.all_text_mode` = false;");
@@ -176,20 +166,18 @@ public class TestConstantFolding extends PlanTestBase {
   @Ignore("DRILL-2553")
   @Test
   public void testConstExprFolding_withPartitionPrune_verySmallFiles() throws Exception {
-    new SmallFileCreator(folder).createFiles(1, 8);
-    String path = folder.getRoot().toPath().toString();
+    new SmallFileCreator(dirTestWatcher.getRootDir()).createFiles(1, 8);
     testPlanOneExpectedPatternOneExcluded(
-        "select * from dfs.`" + path + "/*/*.csv` where dir0 = concat('small','file')",
+        "select * from dfs.`*/*.csv` where dir0 = concat('small','file')",
         "smallfile",
         "bigfile");
   }
 
   @Test
   public void testConstExprFolding_withPartitionPrune() throws Exception {
-    new SmallFileCreator(folder).createFiles(1, 1000);
-    String path = folder.getRoot().toPath().toString();
+    new SmallFileCreator(dirTestWatcher.getRootDir()).createFiles(1, 1000);
     testPlanOneExpectedPatternOneExcluded(
-        "select * from dfs.`" + path + "/*/*.csv` where dir0 = concat('small','file')",
+        "select * from dfs.`*/*.csv` where dir0 = concat('small','file')",
         "smallfile",
         "bigfile");
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
index 7fc5cea..b2671be 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import java.io.File;
+import java.nio.file.Paths;
 import java.util.Map;
 import java.util.Properties;
 
@@ -49,7 +50,7 @@ public class BaseTestImpersonation extends PlanTestBase {
   protected static MiniDFSCluster dfsCluster;
   protected static Configuration dfsConf;
   protected static FileSystem fs;
-  protected static String miniDfsStoragePath;
+  protected static File miniDfsStoragePath;
 
   // Test users and groups
   protected static final String[] org1Users = { "user0_1", "user1_1", "user2_1", "user3_1", "user4_1", "user5_1" };
@@ -93,8 +94,8 @@ public class BaseTestImpersonation extends PlanTestBase {
 
     // Set the MiniDfs base dir to be the temp directory of the test, so that all files created within the MiniDfs
     // are properly cleanup when test exits.
-    miniDfsStoragePath = System.getProperty("java.io.tmpdir") + Path.SEPARATOR + testClass;
-    dfsConf.set("hdfs.minidfs.basedir", miniDfsStoragePath);
+    miniDfsStoragePath = dirTestWatcher.makeRootSubDir(Paths.get("miniDfs"));
+    dfsConf.set("hdfs.minidfs.basedir", miniDfsStoragePath.getCanonicalPath());
 
     if (isImpersonationEnabled) {
       // Set the proxyuser settings so that the user who is running the Drillbits/MiniDfs can impersonate other users.
@@ -126,7 +127,7 @@ public class BaseTestImpersonation extends PlanTestBase {
     // Create a HDFS based storage plugin based on local storage plugin and add it to plugin registry (connection string
     // for mini dfs is varies for each run).
     final StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
-    final FileSystemConfig lfsPluginConfig = (FileSystemConfig) pluginRegistry.getPlugin("dfs_test").getConfig();
+    final FileSystemConfig lfsPluginConfig = (FileSystemConfig) pluginRegistry.getPlugin("dfs").getConfig();
 
     final FileSystemConfig miniDfsPluginConfig = new FileSystemConfig();
     miniDfsPluginConfig.connection = dfsConf.get(FileSystem.FS_DEFAULT_NAME_KEY);
@@ -156,7 +157,7 @@ public class BaseTestImpersonation extends PlanTestBase {
     }
 
     if (miniDfsStoragePath != null) {
-      FileUtils.deleteQuietly(new File(miniDfsStoragePath));
+      FileUtils.deleteQuietly(miniDfsStoragePath);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java
index 767c9ed..702d14c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java
@@ -52,14 +52,14 @@ public class TestImpersonationDisabledWithMiniDFS extends BaseTestImpersonation
     // page of data all at once, see notes above testReadLargeParquetFileFromDFS()
     test(String.format(
         "CREATE TABLE %s.tmp.large_employee AS " +
-            "(SELECT employee_id, full_name FROM cp.`/employee.json`) " +
-            "UNION ALL (SELECT employee_id, full_name FROM cp.`/employee.json`)" +
-            "UNION ALL (SELECT employee_id, full_name FROM cp.`/employee.json`)" +
-            "UNION ALL (SELECT employee_id, full_name FROM cp.`/employee.json`)" +
-            "UNION ALL (SELECT employee_id, full_name FROM cp.`/employee.json`)" +
-            "UNION ALL (SELECT employee_id, full_name FROM cp.`/employee.json`)" +
-            "UNION ALL (SELECT employee_id, full_name FROM cp.`/employee.json`)" +
-        "UNION ALL (SELECT employee_id, full_name FROM cp.`/employee.json`)",
+            "(SELECT employee_id, full_name FROM cp.`employee.json`) " +
+            "UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)" +
+            "UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)" +
+            "UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)" +
+            "UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)" +
+            "UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)" +
+            "UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)" +
+        "UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)",
         MINIDFS_STORAGE_PLUGIN_NAME));
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
index 60baed9..d023610 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
@@ -105,17 +105,17 @@ public class TestImpersonationMetadata extends BaseTestImpersonation {
 
     // create tables as user2
     updateClient(user2);
-    test(String.format("use `%s.user2_workspace1`", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("use `%s.user2_workspace1`", MINIDFS_STORAGE_PLUGIN_NAME);
     // create a table that can be dropped by another user in a different group
     test("create table parquet_table_775 as select * from cp.`employee.json`");
 
     // create a table that cannot be dropped by another user
-    test(String.format("use `%s.user2_workspace2`", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("use `%s.user2_workspace2`", MINIDFS_STORAGE_PLUGIN_NAME);
     test("create table parquet_table_700 as select * from cp.`employee.json`");
 
     // Drop tables as user1
     updateClient(user1);
-    test(String.format("use `%s.user2_workspace1`", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("use `%s.user2_workspace1`", MINIDFS_STORAGE_PLUGIN_NAME);
     testBuilder()
         .sqlQuery("drop table parquet_table_775")
         .unOrdered()
@@ -123,7 +123,7 @@ public class TestImpersonationMetadata extends BaseTestImpersonation {
         .baselineValues(true, String.format("Table [%s] dropped", "parquet_table_775"))
         .go();
 
-    test(String.format("use `%s.user2_workspace2`", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("use `%s.user2_workspace2`", MINIDFS_STORAGE_PLUGIN_NAME);
     boolean dropFailed = false;
     try {
       test("drop table parquet_table_700");
@@ -154,11 +154,11 @@ public class TestImpersonationMetadata extends BaseTestImpersonation {
     updateClient(user1);
 
     // Try show tables in schema "drillTestGrp1_700" which is owned by "user1"
-    test(String.format("SHOW FILES IN %s.drillTestGrp1_700", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("SHOW FILES IN %s.drillTestGrp1_700", MINIDFS_STORAGE_PLUGIN_NAME);
 
     // Try show tables in schema "drillTestGrp0_750" which is owned by "processUser" and has group permissions for
     // "user1"
-    test(String.format("SHOW FILES IN %s.drillTestGrp0_750", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("SHOW FILES IN %s.drillTestGrp0_750", MINIDFS_STORAGE_PLUGIN_NAME);
   }
 
   @Test
@@ -166,7 +166,7 @@ public class TestImpersonationMetadata extends BaseTestImpersonation {
     updateClient(user2);
     // Try show tables in schema "drillTestGrp0_755" which is owned by "processUser" and group0. "user2" is not part
     // of the "group0"
-    test(String.format("SHOW FILES IN %s.drillTestGrp0_755", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("SHOW FILES IN %s.drillTestGrp0_755", MINIDFS_STORAGE_PLUGIN_NAME);
   }
 
   @Test
@@ -176,7 +176,7 @@ public class TestImpersonationMetadata extends BaseTestImpersonation {
     updateClient(user2);
     try {
       // Try show tables in schema "drillTestGrp1_700" which is owned by "user1"
-      test(String.format("SHOW FILES IN %s.drillTestGrp1_700", MINIDFS_STORAGE_PLUGIN_NAME));
+      test("SHOW FILES IN %s.drillTestGrp1_700", MINIDFS_STORAGE_PLUGIN_NAME);
     } catch(UserRemoteException e) {
       ex = e;
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java
index 88b2ad4..b3977e9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.impersonation;
 import com.google.common.collect.Maps;
 import org.apache.drill.categories.SecurityTest;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.store.avro.AvroTestUtil;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
 import org.apache.drill.categories.SlowTest;
@@ -97,7 +97,7 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
   private static void createTestTable(String user, String group, String tableName) throws Exception {
     updateClient(user);
     test("USE " + getWSSchema(user));
-    test(String.format("CREATE TABLE %s as SELECT * FROM cp.`tpch/%s.parquet`;", tableName, tableName));
+    test("CREATE TABLE %s as SELECT * FROM cp.`tpch/%s.parquet`", tableName, tableName);
 
     // Change the ownership and permissions manually. Currently there is no option to specify the default permissions
     // and ownership for new tables.
@@ -162,7 +162,7 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
   private static void createRecordReadersData(String user, String group) throws Exception {
     // copy sequence file
     updateClient(user);
-    Path localFile = new Path(FileUtils.getResourceAsFile("/sequencefiles/simple.seq").toURI().toString());
+    Path localFile = new Path(DrillFileUtils.getResourceAsFile("/sequencefiles/simple.seq").toURI().toString());
     Path dfsFile = new Path(getUserHome(user), "simple.seq");
     fs.copyFromLocalFile(localFile, dfsFile);
     fs.setOwner(dfsFile, user, group);
@@ -180,7 +180,7 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
     // Table lineitem is owned by "user0_1:group0_1" with permissions 750. Try to read the table as "user0_1". We
     // shouldn't expect any errors.
     updateClient(org1Users[0]);
-    test(String.format("SELECT * FROM %s.lineitem ORDER BY l_orderkey LIMIT 1", getWSSchema(org1Users[0])));
+    test("SELECT * FROM %s.lineitem ORDER BY l_orderkey LIMIT 1", getWSSchema(org1Users[0]));
   }
 
   @Test
@@ -188,7 +188,7 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
     // Table lineitem is owned by "user0_1:group0_1" with permissions 750. Try to read the table as "user1_1". We
     // shouldn't expect any errors as "user1_1" is part of the "group0_1"
     updateClient(org1Users[1]);
-    test(String.format("SELECT * FROM %s.lineitem ORDER BY l_orderkey LIMIT 1", getWSSchema(org1Users[0])));
+    test("SELECT * FROM %s.lineitem ORDER BY l_orderkey LIMIT 1", getWSSchema(org1Users[0]));
   }
 
   @Test
@@ -198,7 +198,7 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
       // Table lineitem is owned by "user0_1:group0_1" with permissions 750. Now try to read the table as "user2_1". We
       // should expect a permission denied error as "user2_1" is not part of the "group0_1"
       updateClient(org1Users[2]);
-      test(String.format("SELECT * FROM %s.lineitem ORDER BY l_orderkey LIMIT 1", getWSSchema(org1Users[0])));
+      test("SELECT * FROM %s.lineitem ORDER BY l_orderkey LIMIT 1", getWSSchema(org1Users[0]));
     } catch(UserRemoteException e) {
       ex = e;
     }
@@ -212,7 +212,7 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
   @Test
   public void testMultiLevelImpersonationEqualToMaxUserHops() throws Exception {
     updateClient(org1Users[4]);
-    test(String.format("SELECT * from %s.u4_lineitem LIMIT 1;", getWSSchema(org1Users[4])));
+    test("SELECT * from %s.u4_lineitem LIMIT 1;", getWSSchema(org1Users[4]));
   }
 
   @Test
@@ -221,7 +221,7 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
 
     try {
       updateClient(org1Users[5]);
-      test(String.format("SELECT * from %s.u4_lineitem LIMIT 1;", getWSSchema(org1Users[4])));
+      test("SELECT * from %s.u4_lineitem LIMIT 1;", getWSSchema(org1Users[4]));
     } catch (UserRemoteException e) {
       ex = e;
     }
@@ -235,8 +235,8 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
   @Test
   public void testMultiLevelImpersonationJoinEachSideReachesMaxUserHops() throws Exception {
     updateClient(org1Users[4]);
-    test(String.format("SELECT * from %s.u4_lineitem l JOIN %s.u3_orders o ON l.l_orderkey = o.o_orderkey LIMIT 1;",
-      getWSSchema(org1Users[4]), getWSSchema(org2Users[3])));
+    test("SELECT * from %s.u4_lineitem l JOIN %s.u3_orders o ON l.l_orderkey = o.o_orderkey LIMIT 1",
+      getWSSchema(org1Users[4]), getWSSchema(org2Users[3]));
   }
 
   @Test
@@ -245,8 +245,8 @@ public class TestImpersonationQueries extends BaseTestImpersonation {
 
     try {
       updateClient(org1Users[4]);
-      test(String.format("SELECT * from %s.u4_lineitem l JOIN %s.u4_orders o ON l.l_orderkey = o.o_orderkey LIMIT 1;",
-          getWSSchema(org1Users[4]), getWSSchema(org2Users[4])));
+      test("SELECT * from %s.u4_lineitem l JOIN %s.u4_orders o ON l.l_orderkey = o.o_orderkey LIMIT 1",
+          getWSSchema(org1Users[4]), getWSSchema(org2Users[4]));
     } catch(UserRemoteException e) {
       ex = e;
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonation.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonation.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonation.java
index 7537431..ee57ad9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonation.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonation.java
@@ -90,7 +90,7 @@ public class TestInboundImpersonation extends BaseTestImpersonation {
     final String tableName = "lineitem";
     updateClient(OWNER, OWNER_PASSWORD);
     test("USE " + getWSSchema(OWNER));
-    test(String.format("CREATE TABLE %s as SELECT * FROM cp.`tpch/%s.parquet`;", tableName, tableName));
+    test("CREATE TABLE %s as SELECT * FROM cp.`tpch/%s.parquet`", tableName, tableName);
 
     // Change the ownership and permissions manually.
     // Currently there is no option to specify the default permissions and ownership for new tables.
@@ -101,9 +101,9 @@ public class TestInboundImpersonation extends BaseTestImpersonation {
     // Create a view on top of lineitem table; allow IMPERSONATION_TARGET to read the view
     // /user/user0_1    u0_lineitem    750    user0_1:group0_1
     final String viewName = "u0_lineitem";
-    test(String.format("ALTER SESSION SET `%s`='%o';", ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY, (short) 0750));
-    test(String.format("CREATE VIEW %s.%s AS SELECT l_orderkey, l_partkey FROM %s.%s;",
-        getWSSchema(OWNER), viewName, getWSSchema(OWNER), "lineitem"));
+    test("ALTER SESSION SET `%s`='%o';", ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY, (short) 0750);
+    test("CREATE VIEW %s.%s AS SELECT l_orderkey, l_partkey FROM %s.%s",
+        getWSSchema(OWNER), viewName, getWSSchema(OWNER), "lineitem");
     // Verify the view file created has the expected permissions and ownership
     final Path viewFilePath = new Path(getUserHome(OWNER), viewName + DotDrillType.VIEW.getEnding());
     final FileStatus status = fs.getFileStatus(viewFilePath);

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonationPrivileges.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonationPrivileges.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonationPrivileges.java
index e1d2dfc..199b1e1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonationPrivileges.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonationPrivileges.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.impersonation;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 import org.apache.drill.categories.SecurityTest;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.rpc.user.InboundImpersonationManager;
 import org.apache.drill.exec.server.options.OptionDefinition;
@@ -43,7 +43,7 @@ public class TestInboundImpersonationPrivileges extends BaseTestImpersonation {
 
   static {
     try {
-      IMPERSONATION_POLICIES = Files.toString(FileUtils.getResourceAsFile("/inbound_impersonation_policies.json"),
+      IMPERSONATION_POLICIES = Files.toString(DrillFileUtils.getResourceAsFile("/inbound_impersonation_policies.json"),
           Charsets.UTF_8);
     } catch (final IOException e) {
       throw new RuntimeException("Cannot load impersonation policies.", e);

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java b/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
index 6fca84d..e903166 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
@@ -30,7 +30,7 @@ import java.util.Properties;
 import org.apache.drill.categories.MemoryTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -204,7 +204,7 @@ public class TestAllocators extends DrillTest {
       // Get a few physical operators. Easiest way is to read a physical plan.
       PhysicalPlanReader planReader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(bitContext,
           storageRegistry);
-      PhysicalPlan plan = planReader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile(planFile),
+      PhysicalPlan plan = planReader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile(planFile),
           Charsets.UTF_8));
       List<PhysicalOperator> physicalOperators = plan.getSortedOperators();
       Iterator<PhysicalOperator> physicalOperatorIterator = physicalOperators.iterator();

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestFastComplexSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestFastComplexSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestFastComplexSchema.java
index 8f6ab6e..6173bb3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestFastComplexSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestFastComplexSchema.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.nested;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.UnlikelyTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestNestedComplexSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestNestedComplexSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestNestedComplexSchema.java
index d0d0016..1754fab 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestNestedComplexSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/nested/TestNestedComplexSchema.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.nested;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.UnlikelyTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
index a5f7bdc..fca9cb4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.opt;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.logical.LogicalPlan;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.junit.Test;
@@ -31,7 +31,7 @@ public class BasicOptimizerTest extends ExecTest {
     public void parseSimplePlan() throws Exception{
         DrillConfig c = DrillConfig.create();
         LogicalPlanPersistence lpp = PhysicalPlanReaderTestFactory.defaultLogicalPlanPersistence(c);
-        LogicalPlan plan = LogicalPlan.parse(lpp, FileUtils.getResourceAsString("/scan_screen_logical.json"));
+        LogicalPlan plan = LogicalPlan.parse(lpp, DrillFileUtils.getResourceAsString("/scan_screen_logical.json"));
         String unparse = plan.unparse(lpp);
 //        System.out.println(unparse);
         //System.out.println( new BasicOptimizer(DrillConfig.create()).convert(plan).unparse(c.getMapper().writer()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
index 2fadb6e..f4ae026 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
@@ -22,7 +22,7 @@ import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.scanner.ClassPathScanner;
 import org.apache.drill.common.scanner.persistence.ScanResult;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.physical.PhysicalPlan;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
@@ -49,7 +49,7 @@ public class TestParsePhysicalPlan extends ExecTest {
     PhysicalPlanReader reader = new PhysicalPlanReader(c, scanResult, lpp, CoordinationProtos.DrillbitEndpoint.getDefaultInstance(), null);
     ObjectReader r = lpp.getMapper().reader(PhysicalPlan.class);
     ObjectWriter writer = lpp.getMapper().writer();
-    PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/physical_test1.json"), Charsets.UTF_8));
+    PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/physical_test1.json"), Charsets.UTF_8));
     String unparse = plan.unparse(writer);
 //    System.out.println(unparse);
   }


Mime
View raw message