drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jacq...@apache.org
Subject [09/13] drill git commit: DRILL-3742: Classpath scanning and build improvement
Date Mon, 26 Oct 2015 17:42:56 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index 4cd61c2..b3bab2a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -26,7 +26,7 @@ import java.util.Set;
 
 import com.google.common.collect.Sets;
 import org.apache.commons.collections.IteratorUtils;
-import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.map.CaseInsensitiveMap;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.ExecConstants;
@@ -149,9 +149,9 @@ public class SystemOptionManager extends BaseOptionManager {
    */
   private PStore<OptionValue> options;
 
-  public SystemOptionManager(final DrillConfig config, final PStoreProvider provider) {
+  public SystemOptionManager(LogicalPlanPersistence lpPersistence, final PStoreProvider provider) {
     this.provider = provider;
-    this.config =  PStoreConfig.newJacksonBuilder(config.getMapper(), OptionValue.class)
+    this.config =  PStoreConfig.newJacksonBuilder(lpPersistence.getMapper(), OptionValue.class)
         .name("sys.options")
         .build();
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/DrillRestServer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/DrillRestServer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/DrillRestServer.java
index 8c14587..7d2dfe8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/DrillRestServer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/DrillRestServer.java
@@ -59,14 +59,14 @@ public class DrillRestServer extends ResourceConfig {
     register(GenericExceptionMapper.class);
 
     JacksonJaxbJsonProvider provider = new JacksonJaxbJsonProvider();
-    provider.setMapper(workManager.getContext().getConfig().getMapper());
+    provider.setMapper(workManager.getContext().getLpPersistence().getMapper());
     register(provider);
 
     register(new AbstractBinder() {
       @Override
       protected void configure() {
         bind(workManager).to(WorkManager.class);
-        bind(workManager.getContext().getConfig().getMapper()).to(ObjectMapper.class);
+        bind(workManager.getContext().getLpPersistence().getMapper()).to(ObjectMapper.class);
         bind(workManager.getContext().getPersistentStoreProvider()).to(PStoreProvider.class);
         bind(workManager.getContext().getStorage()).to(StoragePluginRegistry.class);
       }

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java b/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java
index 25ea307..8c347e8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/service/ServiceEngine.java
@@ -17,12 +17,18 @@
  */
 package org.apache.drill.exec.service;
 
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
+
 import java.io.Closeable;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
 
 import io.netty.channel.EventLoopGroup;
+
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.DrillbitStartupException;
@@ -38,6 +44,7 @@ import org.apache.drill.exec.server.BootStrapContext;
 import org.apache.drill.exec.work.batch.ControlMessageHandler;
 import org.apache.drill.exec.work.user.UserWorker;
 
+import com.google.common.base.Stopwatch;
 import com.google.common.io.Closeables;
 
 public class ServiceEngine implements Closeable{
@@ -54,7 +61,7 @@ public class ServiceEngine implements Closeable{
       WorkEventBus workBus, DataResponseHandler dataHandler, boolean allowPortHunting) throws DrillbitStartupException {
     final EventLoopGroup eventLoopGroup = TransportCheck.createEventLoopGroup(
         context.getConfig().getInt(ExecConstants.USER_SERVER_RPC_THREADS), "UserServer-");
-    this.userServer = new UserServer(context.getConfig(), context.getAllocator(), eventLoopGroup, userWorker);
+    this.userServer = new UserServer(context.getConfig(), context.getClasspathScan(), context.getAllocator(), eventLoopGroup, userWorker);
     this.controller = new ControllerImpl(context, controlMessageHandler, allowPortHunting);
     this.dataPool = new DataConnectionCreator(context, workBus, dataHandler, allowPortHunting);
     this.config = context.getConfig();
@@ -82,10 +89,33 @@ public class ServiceEngine implements Closeable{
     return controller;
   }
 
+  private void submit(ExecutorService p, final String name, final Closeable c) {
+    p.submit(new Runnable() {
+      @Override
+      public void run() {
+        Stopwatch watch = new Stopwatch().start();
+        Closeables.closeQuietly(c);
+        long elapsed = watch.elapsed(MILLISECONDS);
+        if (elapsed > 500) {
+          logger.info("closed " + name + " in " + elapsed + " ms");
+        }
+      }
+    });
+  }
+
   @Override
   public void close() throws IOException {
-    Closeables.closeQuietly(userServer);
-    Closeables.closeQuietly(dataPool);
-    Closeables.closeQuietly(controller);
+    // this takes time so close them in parallel
+    // Ideally though we fix this netty bug: https://github.com/netty/netty/issues/2545
+    ExecutorService p = Executors.newFixedThreadPool(2);
+    submit(p, "userServer", userServer);
+    submit(p, "dataPool", dataPool);
+    submit(p, "controller", controller);
+    p.shutdown();
+    try {
+      p.awaitTermination(3, TimeUnit.SECONDS);
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
index 6eb65e6..4673cb5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.store;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 import java.io.IOException;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
@@ -34,12 +36,14 @@ import java.util.concurrent.TimeUnit;
 import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.tools.RuleSet;
-import org.apache.drill.common.config.DrillConfig;
+
+import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
-import org.apache.drill.common.util.PathScanner;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.DrillbitStartupException;
 import org.apache.drill.exec.ops.OptimizerRulesContext;
@@ -57,6 +61,7 @@ import org.apache.drill.exec.store.sys.SystemTablePluginConfig;
 
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
+import com.google.common.base.Preconditions;
 import com.google.common.base.Stopwatch;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.ImmutableSet.Builder;
@@ -78,22 +83,23 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
   private DrillbitContext context;
   private final DrillSchemaFactory schemaFactory = new DrillSchemaFactory();
   private final PStore<StoragePluginConfig> pluginSystemTable;
-  private final Object updateLock = new Object();
-  private volatile long lastUpdate = 0;
-  private static final long UPDATE_FREQUENCY = 2 * 60 * 1000;
+  private final LogicalPlanPersistence lpPersistence;
+  private final ScanResult classpathScan;
 
   public StoragePluginRegistry(DrillbitContext context) {
+    this.context = checkNotNull(context);
+    this.lpPersistence = checkNotNull(context.getLpPersistence());
+    this.classpathScan = checkNotNull(context.getClasspathScan());
     try {
-      this.context = context;
       this.pluginSystemTable = context //
           .getPersistentStoreProvider() //
           .getStore(PStoreConfig //
-              .newJacksonBuilder(context.getConfig().getMapper(), StoragePluginConfig.class) //
+              .newJacksonBuilder(lpPersistence.getMapper(), StoragePluginConfig.class) //
               .name("sys.storage_plugins") //
               .build());
     } catch (IOException | RuntimeException e) {
       logger.error("Failure while loading storage plugin registry.", e);
-      throw new RuntimeException("Faiure while reading and loading storage plugin configuration.", e);
+      throw new RuntimeException("Failure while reading and loading storage plugin configuration.", e);
     }
   }
 
@@ -103,11 +109,8 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
 
   @SuppressWarnings("unchecked")
   public void init() throws DrillbitStartupException {
-    final DrillConfig config = context.getConfig();
     final Collection<Class<? extends StoragePlugin>> pluginClasses =
-        PathScanner.scanForImplementations(
-            StoragePlugin.class,
-            config.getStringList(ExecConstants.STORAGE_ENGINE_SCAN_PACKAGES));
+        classpathScan.getImplementations(StoragePlugin.class);
     final String lineBrokenList =
         pluginClasses.size() == 0
         ? "" : "\n\t- " + Joiner.on("\n\t- ").join(pluginClasses);
@@ -146,13 +149,13 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
       if (!pluginSystemTable.iterator().hasNext()) {
         // bootstrap load the config since no plugins are stored.
         logger.info("No storage plugin instances configured in persistent store, loading bootstrap configuration.");
-        Collection<URL> urls = PathScanner.forResource(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, false, Resources.class.getClassLoader());
+        Collection<URL> urls = ClassPathScanner.forResource(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, false);
         if (urls != null && ! urls.isEmpty()) {
           logger.info("Loading the storage plugin configs from URLs {}.", urls);
           Map<String, URL> pluginURLMap = Maps.newHashMap();
           for (URL url :urls) {
             String pluginsData = Resources.toString(url, Charsets.UTF_8);
-            StoragePlugins plugins = context.getConfig().getMapper().readValue(pluginsData, StoragePlugins.class);
+            StoragePlugins plugins = lpPersistence.getMapper().readValue(pluginsData, StoragePlugins.class);
             for (Map.Entry<String, StoragePluginConfig> config : plugins) {
               if (!pluginSystemTable.putIfAbsent(config.getKey(), config.getValue())) {
                 logger.warn("Duplicate plugin instance '{}' defined in [{}, {}], ignoring the later one.",

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index 6f3ce27..fb3e58a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -17,20 +17,21 @@
  */
 package org.apache.drill.exec.store.dfs;
 
+import static org.apache.drill.exec.store.dfs.FileSystemSchemaFactory.DEFAULT_WS_NAME;
+
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
 import org.apache.calcite.schema.SchemaPlus;
-
 import org.apache.drill.common.JSONOptions;
+import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.exec.ops.OptimizerRulesContext;
-import org.apache.drill.exec.ops.QueryContext;
 import org.apache.drill.exec.physical.base.AbstractGroupScan;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.AbstractStoragePlugin;
@@ -46,8 +47,6 @@ import com.google.common.collect.ImmutableSet.Builder;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-import static org.apache.drill.exec.store.dfs.FileSystemSchemaFactory.DEFAULT_WS_NAME;
-
 /**
  * A Storage engine associated with a Hadoop FileSystem Implementation. Examples include HDFS, MapRFS, QuantacastFileSystem,
  * LocalFileSystem, as well Apache Drill specific CachedFileSystem, ClassPathFileSystem and LocalSyncableFileSystem.
@@ -61,20 +60,20 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
   private final Map<String, FormatPlugin> formatPluginsByName;
   private final Map<FormatPluginConfig, FormatPlugin> formatPluginsByConfig;
   private final FileSystemConfig config;
-  private final DrillbitContext context;
   private final Configuration fsConf;
+  private final LogicalPlanPersistence lpPersistance;
 
   public FileSystemPlugin(FileSystemConfig config, DrillbitContext context, String name) throws ExecutionSetupException{
+    this.config = config;
+    this.lpPersistance = context.getLpPersistence();
     try {
-      this.config = config;
-      this.context = context;
 
       fsConf = new Configuration();
       fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.connection);
       fsConf.set("fs.classpath.impl", ClassPathFileSystem.class.getName());
       fsConf.set("fs.drill-local.impl", LocalSyncableFileSystem.class.getName());
 
-      formatPluginsByName = FormatCreator.getFormatPlugins(context, fsConf, config);
+      formatPluginsByName = FormatCreator.getFormatPlugins(context, fsConf, config, context.getClasspathScan());
       List<FormatMatcher> matchers = Lists.newArrayList();
       formatPluginsByConfig = Maps.newHashMap();
       for (FormatPlugin p : formatPluginsByName.values()) {
@@ -86,13 +85,13 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
       List<WorkspaceSchemaFactory> factories = Lists.newArrayList();
       if (!noWorkspace) {
         for (Map.Entry<String, WorkspaceConfig> space : config.workspaces.entrySet()) {
-          factories.add(new WorkspaceSchemaFactory(context.getConfig(), this, space.getKey(), name, space.getValue(), matchers));
+          factories.add(new WorkspaceSchemaFactory(this, space.getKey(), name, space.getValue(), matchers, context.getLpPersistence()));
         }
       }
 
       // if the "default" workspace is not given add one.
       if (noWorkspace || !config.workspaces.containsKey(DEFAULT_WS_NAME)) {
-        factories.add(new WorkspaceSchemaFactory(context.getConfig(), this, DEFAULT_WS_NAME, name, WorkspaceConfig.DEFAULT, matchers));
+        factories.add(new WorkspaceSchemaFactory(this, DEFAULT_WS_NAME, name, WorkspaceConfig.DEFAULT, matchers, context.getLpPersistence()));
       }
 
       this.schemaFactory = new FileSystemSchemaFactory(name, factories);
@@ -114,7 +113,7 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
   @Override
   public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns)
       throws IOException {
-    FormatSelection formatSelection = selection.getWith(context.getConfig(), FormatSelection.class);
+    FormatSelection formatSelection = selection.getWith(lpPersistance, FormatSelection.class);
     FormatPlugin plugin;
     if (formatSelection.getFormat() instanceof NamedFormatPluginConfig) {
       plugin = formatPluginsByName.get( ((NamedFormatPluginConfig) formatSelection.getFormat()).name);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
index 729dc55..3f932cd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
@@ -22,16 +22,14 @@ import java.lang.reflect.InvocationTargetException;
 import java.util.Collection;
 import java.util.Map;
 
-import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.common.util.ConstructorChecker;
-import org.apache.drill.common.util.PathScanner;
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.collect.Maps;
-import org.apache.hadoop.conf.Configuration;
 
 public class FormatCreator {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FormatCreator.class);
@@ -41,13 +39,14 @@ public class FormatCreator {
   private static final ConstructorChecker DEFAULT_BASED = new ConstructorChecker(String.class, DrillbitContext.class,
       Configuration.class, StoragePluginConfig.class);
 
-  static Map<String, FormatPlugin> getFormatPlugins(DrillbitContext context, Configuration fsConf,
-      FileSystemConfig storageConfig) {
-    final DrillConfig config = context.getConfig();
+  static Map<String, FormatPlugin> getFormatPlugins(
+      DrillbitContext context,
+      Configuration fsConf,
+      FileSystemConfig storageConfig,
+      ScanResult classpathScan) {
     Map<String, FormatPlugin> plugins = Maps.newHashMap();
 
-    Collection<Class<? extends FormatPlugin>> pluginClasses =
-        PathScanner.scanForImplementations(FormatPlugin.class, config.getStringList(ExecConstants.STORAGE_ENGINE_SCAN_PACKAGES));
+    Collection<Class<? extends FormatPlugin>> pluginClasses = classpathScan.getImplementations(FormatPlugin.class);
 
     if (storageConfig.formats == null || storageConfig.formats.isEmpty()) {
 
@@ -73,7 +72,7 @@ public class FormatCreator {
             if (!FORMAT_BASED.check(c)) {
               continue;
             }
-            Class<? extends FormatPluginConfig> configClass = (Class<? extends FormatPluginConfig>) c.getParameterTypes()[4];
+            Class<?> configClass = c.getParameterTypes()[4];
             constructors.put(configClass, c);
           } catch (Exception e) {
             logger.warn(String.format("Failure while trying instantiate FormatPlugin %s.", pluginClass.getName()), e);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index fb48a80..bb42009 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -28,14 +28,10 @@ import java.util.Set;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.regex.Pattern;
 
-import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableList;
 import org.apache.calcite.schema.Table;
-
-import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.dotdrill.DotDrillFile;
 import org.apache.drill.exec.dotdrill.DotDrillType;
@@ -55,13 +51,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.AccessControlException;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Joiner;
+import com.google.common.base.Strings;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.security.AccessControlException;
 
 public class WorkspaceSchemaFactory {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
@@ -72,21 +70,26 @@ public class WorkspaceSchemaFactory {
 
   private final WorkspaceConfig config;
   private final Configuration fsConf;
-  private final DrillConfig drillConfig;
   private final String storageEngineName;
   private final String schemaName;
   private final FileSystemPlugin plugin;
   private final ObjectMapper mapper;
+  private final LogicalPlanPersistence logicalPlanPersistence;
   private final Path wsPath;
 
-  public WorkspaceSchemaFactory(DrillConfig drillConfig, FileSystemPlugin plugin, String schemaName,
-      String storageEngineName, WorkspaceConfig config, List<FormatMatcher> formatMatchers)
+  public WorkspaceSchemaFactory(
+      FileSystemPlugin plugin,
+      String schemaName,
+      String storageEngineName,
+      WorkspaceConfig config,
+      List<FormatMatcher> formatMatchers,
+      LogicalPlanPersistence logicalPlanPersistence)
     throws ExecutionSetupException, IOException {
+    this.logicalPlanPersistence = logicalPlanPersistence;
     this.fsConf = plugin.getFsConf();
     this.plugin = plugin;
-    this.drillConfig = drillConfig;
     this.config = config;
-    this.mapper = drillConfig.getMapper();
+    this.mapper = logicalPlanPersistence.getMapper();
     this.fileMatchers = Lists.newArrayList();
     this.dirMatchers = Lists.newArrayList();
     this.storageEngineName = storageEngineName;
@@ -226,7 +229,7 @@ public class WorkspaceSchemaFactory {
 
     private View getView(DotDrillFile f) throws IOException{
       assert f.getType() == DotDrillType.VIEW;
-      return f.getView(drillConfig);
+      return f.getView(logicalPlanPersistence);
     }
 
     @Override
@@ -237,7 +240,7 @@ public class WorkspaceSchemaFactory {
       }
 
       // then look for files that start with this name and end in .drill.
-      List<DotDrillFile> files = Collections.EMPTY_LIST;
+      List<DotDrillFile> files = Collections.emptyList();
       try {
         try {
           files = DotDrillUtil.getDotDrills(fs, new Path(config.getLocation()), name, DotDrillType.VIEW);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java
index f939ba8..385c99f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java
@@ -62,7 +62,7 @@ public class InfoSchemaStoragePlugin extends AbstractStoragePlugin {
   @Override
   public InfoSchemaGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns)
       throws IOException {
-    SelectedTable table = selection.getWith(context.getConfig(),  SelectedTable.class);
+    SelectedTable table = selection.getWith(context.getLpPersistence(),  SelectedTable.class);
     return new InfoSchemaGroupScan(table);
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java
index bac02ca..33f030b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java
@@ -74,7 +74,7 @@ public class SystemTablePlugin extends AbstractStoragePlugin {
   @Override
   public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns)
       throws IOException {
-    SystemTable table = selection.getWith(context.getConfig(), SystemTable.class);
+    SystemTable table = selection.getWith(context.getLpPersistence(), SystemTable.class);
     return new SystemTableScan(table, this);
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
index 8209277..0bf1673 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
@@ -28,6 +28,7 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.drill.common.SelfCleaningRunnable;
 import org.apache.drill.common.concurrent.ExtendedLatch;
+import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.exec.coord.ClusterCoordinator;
 import org.apache.drill.exec.proto.BitControl.FragmentStatus;
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
@@ -122,8 +123,12 @@ public class WorkManager implements AutoCloseable {
     dataHandler = new DataResponseHandlerImpl(bee); // TODO only uses startFragmentPendingRemote()
   }
 
-  public void start(final DrillbitEndpoint endpoint, final Controller controller,
-      final DataConnectionCreator data, final ClusterCoordinator coord, final PStoreProvider provider) {
+  public void start(
+      final DrillbitEndpoint endpoint,
+      final Controller controller,
+      final DataConnectionCreator data,
+      final ClusterCoordinator coord,
+      final PStoreProvider provider) {
     dContext = new DrillbitContext(endpoint, bContext, coord, controller, data, workBus, provider, executor);
     statusThread.start();
 

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
index 2b17933..e17eba5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
@@ -351,14 +351,14 @@ public class Foreman implements Runnable {
 
   private void log(final LogicalPlan plan) {
     if (logger.isDebugEnabled()) {
-      logger.debug("Logical {}", plan.unparse(queryContext.getConfig()));
+      logger.debug("Logical {}", plan.unparse(queryContext.getLpPersistence()));
     }
   }
 
   private void log(final PhysicalPlan plan) {
     if (logger.isDebugEnabled()) {
       try {
-        final String planText = queryContext.getConfig().getMapper().writeValueAsString(plan);
+        final String planText = queryContext.getLpPersistence().getMapper().writeValueAsString(plan);
         logger.debug("Physical {}", planText);
       } catch (final IOException e) {
         logger.warn("Error while attempting to log physical plan.", e);
@@ -367,7 +367,7 @@ public class Foreman implements Runnable {
   }
 
   private void returnPhysical(final PhysicalPlan plan) throws ExecutionSetupException {
-    final String jsonPlan = plan.unparse(queryContext.getConfig().getMapper().writer());
+    final String jsonPlan = plan.unparse(queryContext.getLpPersistence().getMapper().writer());
     runPhysicalPlan(DirectPlan.createDirectPlan(queryContext, new PhysicalFromLogicalExplain(jsonPlan)));
   }
 
@@ -909,7 +909,7 @@ public class Foreman implements Runnable {
 
   private PhysicalPlan convert(final LogicalPlan plan) throws OptimizerException {
     if (logger.isDebugEnabled()) {
-      logger.debug("Converting logical plan {}.", plan.toJsonStringSafe(queryContext.getConfig()));
+      logger.debug("Converting logical plan {}.", plan.toJsonStringSafe(queryContext.getLpPersistence()));
     }
     return new BasicOptimizer(queryContext, initiatingClient).optimize(
         new BasicOptimizer.BasicOptimizationContext(queryContext), plan);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/main/resources/drill-module.conf
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index cf0d86c..9bde6e0 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -17,7 +17,29 @@
 //  This file can also include any supplementary configuration information.
 //  This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information.
 
-drill.logical.function.packages += "org.apache.drill.exec.expr.fn.impl"
+drill {
+  classpath.scanning {
+    base.classes : ${?drill.classpath.scanning.base.classes} [
+      org.apache.drill.exec.expr.DrillFunc,
+      org.apache.drill.exec.expr.fn.PluggableFunctionRegistry,
+      org.apache.drill.exec.physical.base.PhysicalOperator,
+      org.apache.drill.exec.physical.impl.BatchCreator,
+      org.apache.drill.exec.physical.impl.RootCreator,
+      org.apache.drill.exec.rpc.user.security.UserAuthenticator,
+      org.apache.drill.exec.store.dfs.FormatPlugin,
+      org.apache.drill.exec.store.StoragePlugin
+    ],
+
+    annotations += org.apache.drill.exec.expr.annotations.FunctionTemplate
+
+    packages : ${?drill.classpath.scanning.packages} [
+      org.apache.drill.exec.expr,
+      org.apache.drill.exec.physical,
+      org.apache.drill.exec.store,
+      org.apache.drill.exec.rpc.user.security
+    ]
+  }
+}
 
 drill.client: {
   supports-complex-types: true
@@ -49,15 +71,10 @@ drill.exec: {
     },
     use.ip : false
   },
-  operator: {
-    packages += "org.apache.drill.exec.physical.config"
-  },
   optimizer: {
     implementation: "org.apache.drill.exec.opt.IdentityOptimizer"
   },
-  functions: ["org.apache.drill.expr.fn.impl"],
   storage: {
-    packages += "org.apache.drill.exec.store",
     file: {
       text: {
         buffer.size: 262144,
@@ -90,7 +107,6 @@ drill.exec: {
     ssl_enabled: false,
     port: 8047
   },
-  functions: ["org.apache.drill.expr.fn.impl"],
   network: {
     start: 35000
   },
@@ -110,7 +126,6 @@ drill.exec: {
   },
   security.user.auth {
     enabled: false,
-    packages += "org.apache.drill.exec.rpc.user.security",
     impl: "pam",
     pam_profiles: [ "sudo", "login" ]
   },

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
index 9387662..6413571 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
@@ -30,6 +30,8 @@ import com.google.common.io.Files;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.client.DrillClient;
@@ -113,9 +115,12 @@ public class BaseTestQuery extends ExecTest {
 
   private int[] columnWidths = new int[] { 8 };
 
+  private static ScanResult classpathScan;
+
   @BeforeClass
   public static void setupDefaultTestCluster() throws Exception {
     config = DrillConfig.create(TEST_CONFIGURATIONS);
+    classpathScan = ClassPathScanner.fromPrescan(config);
     openClient();
   }
 
@@ -179,7 +184,7 @@ public class BaseTestQuery extends ExecTest {
 
     bits = new Drillbit[drillbitCount];
     for(int i = 0; i < drillbitCount; i++) {
-      bits[i] = new Drillbit(config, serviceSet);
+      bits[i] = new Drillbit(config, serviceSet, classpathScan);
       bits[i].run();
 
       final StoragePluginRegistry pluginRegistry = bits[i].getContext().getStorage();

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
index 05b7238..79245b2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
@@ -23,9 +23,12 @@ import java.net.URL;
 import mockit.Mocked;
 import mockit.NonStrictExpectations;
 
-import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.jdbc.SimpleCalciteSchema;
+import org.apache.calcite.schema.SchemaPlus;
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
@@ -74,8 +77,9 @@ public class PlanningBase extends ExecTest{
     final String[] sqlStrings = sqlCommands.split(";");
     final LocalPStoreProvider provider = new LocalPStoreProvider(config);
     provider.start();
-
-    final SystemOptionManager systemOptions = new SystemOptionManager(config, provider);
+    final ScanResult scanResult = ClassPathScanner.fromPrescan(config);
+    final LogicalPlanPersistence logicalPlanPersistence = new LogicalPlanPersistence(config, scanResult);
+    final SystemOptionManager systemOptions = new SystemOptionManager(logicalPlanPersistence , provider);
     systemOptions.init();
     final UserSession userSession = UserSession.Builder.newBuilder().withOptionManager(systemOptions).build();
     final SessionOptionManager sessionOptions = (SessionOptionManager) userSession.getOptions();
@@ -94,6 +98,10 @@ public class PlanningBase extends ExecTest{
         result = systemOptions;
         dbContext.getPersistentStoreProvider();
         result = provider;
+        dbContext.getClasspathScan();
+        result = scanResult;
+        dbContext.getLpPersistence();
+        result = logicalPlanPersistence;
       }
     };
 
@@ -108,6 +116,8 @@ public class PlanningBase extends ExecTest{
       {
         context.getNewDefaultSchema();
         result = root;
+        context.getLpPersistence();
+        result = new LogicalPlanPersistence(config, ClassPathScanner.fromPrescan(config));
         context.getStorage();
         result = registry;
         context.getFunctionRegistry();
@@ -130,6 +140,8 @@ public class PlanningBase extends ExecTest{
         result = allocator;
         context.getExecutionControls();
         result = executionControls;
+        dbContext.getLpPersistence();
+        result = logicalPlanPersistence;
       }
     };
 

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/common/scanner/TestClassPathScanner.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/common/scanner/TestClassPathScanner.java b/exec/java-exec/src/test/java/org/apache/drill/common/scanner/TestClassPathScanner.java
new file mode 100644
index 0000000..824d6e8
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/common/scanner/TestClassPathScanner.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.common.scanner;
+
+import static java.util.Arrays.asList;
+import static java.util.Collections.sort;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.scanner.persistence.AnnotationDescriptor;
+import org.apache.drill.common.scanner.persistence.FieldDescriptor;
+import org.apache.drill.common.scanner.persistence.AnnotatedClassDescriptor;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.expr.DrillFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.fn.impl.testing.GeneratorFunctions.IncreasingBigInt;
+import org.apache.drill.exec.fn.impl.testing.GeneratorFunctions.RandomBigIntGauss;
+import org.apache.drill.exec.physical.base.PhysicalOperator;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestClassPathScanner {
+
+  @SafeVarargs
+  final private <T extends Comparable<? super T>> void assertListEqualsUnordered(Collection<T> list, T... expected) {
+    List<T> expectedList = asList(expected);
+    sort(expectedList);
+    List<T> gotList = new ArrayList<>(list);
+    sort(gotList);
+    assertEquals(expectedList.toString(), gotList.toString());
+  }
+
+  @Test
+  public void test() throws Exception {
+    ScanResult result = ClassPathScanner.fromPrescan(DrillConfig.create());
+    // if the build has run properly. BuildTimeScan.REGISTRY_FILE was created with a prescan
+//    assertListEqualsUnordered(result.getPrescannedPackages(),
+//      "org.apache.drill.common.logical",
+//      "org.apache.drill.exec.expr",
+//      "org.apache.drill.exec.physical.base",
+//      "org.apache.drill.exec.expr.fn.impl",
+//      "org.apache.drill.exec.physical.impl",
+//      "org.apache.drill.exec.rpc.user.security",
+//      "org.apache.drill.exec.store",
+//      "org.apache.drill.exec.store.mock",
+//      "org.apache.drill.exec.physical.config",
+//      "org.apache.drill.storage"
+//    );
+//    // this is added in the unit test folder that was not scanned so far
+//    assertListEqualsUnordered(result.getScannedPackages(),
+//      "org.apache.drill.exec.testing",
+//      "org.apache.drill.exec.fn.impl.testing",
+//      "org.apache.drill.exec.rpc.user.security.testing"
+//    );
+    List<AnnotatedClassDescriptor> functions = result.getAnnotatedClasses();
+    Set<String> scanned = new HashSet<>();
+    AnnotatedClassDescriptor functionRandomBigIntGauss = null;
+    for (AnnotatedClassDescriptor function : functions) {
+      assertTrue(function.getClassName() + " scanned twice", scanned.add(function.getClassName()));
+      if (function.getClassName().equals(RandomBigIntGauss.class.getName())) {
+        functionRandomBigIntGauss = function;
+      }
+    }
+    if (functionRandomBigIntGauss == null) {
+      Assert.fail("functionRandomBigIntGauss not found");
+    }
+    // TODO: use Andrew's randomized test framework to verify a subset of the functions
+    for (AnnotatedClassDescriptor function : functions) {
+      Class<?> c = Class.forName(function.getClassName(), false, this.getClass().getClassLoader());
+
+      Field[] fields = c.getDeclaredFields();
+      assertEquals("fields count for " + function, fields.length, function.getFields().size());
+      for (int i = 0; i < fields.length; i++) {
+        FieldDescriptor fieldDescriptor = function.getFields().get(i);
+        Field field = fields[i];
+        assertEquals(
+            "Class fields:\n" + Arrays.toString(fields) + "\n != \nDescriptor fields:\n" + function.getFields(),
+            field.getName(), fieldDescriptor.getName());
+        verifyAnnotations(field.getDeclaredAnnotations(), fieldDescriptor.getAnnotations());
+        assertEquals(field.getType(), fieldDescriptor.getFieldClass());
+      }
+
+      Annotation[] annotations = c.getDeclaredAnnotations();
+      List<AnnotationDescriptor> scannedAnnotations = function.getAnnotations();
+      verifyAnnotations(annotations, scannedAnnotations);
+      FunctionTemplate bytecodeAnnotation = function.getAnnotationProxy(FunctionTemplate.class);
+      FunctionTemplate reflectionAnnotation = c.getAnnotation(FunctionTemplate.class);
+      assertEquals(reflectionAnnotation.name(), bytecodeAnnotation.name());
+      assertArrayEquals(reflectionAnnotation.names(), bytecodeAnnotation.names());
+      assertEquals(reflectionAnnotation.scope(), bytecodeAnnotation.scope());
+      assertEquals(reflectionAnnotation.nulls(), bytecodeAnnotation.nulls());
+      assertEquals(reflectionAnnotation.isBinaryCommutative(), bytecodeAnnotation.isBinaryCommutative());
+      assertEquals(reflectionAnnotation.desc(), bytecodeAnnotation.desc());
+      assertEquals(reflectionAnnotation.costCategory(), bytecodeAnnotation.costCategory());
+    }
+    for (String baseType : result.getScannedClasses()) {
+      validateType(result, baseType);
+    }
+    assertTrue(result.getImplementations(PhysicalOperator.class).size() > 0);
+    assertTrue(result.getImplementations(DrillFunc.class).size() > 0);
+  }
+
+  private <T> void validateType(ScanResult result, String baseType) throws ClassNotFoundException {
+    if (baseType.startsWith("org.apache.hadoop.hive")) {
+      return;
+    }
+    @SuppressWarnings("unchecked")
+    Class<T> baseClass = (Class<T>)Class.forName(baseType);
+    Set<Class<? extends T>> impls = result.getImplementations(baseClass);
+    if (impls != null) {
+      for (Class<? extends T> impl : impls) {
+        assertTrue(impl + " extends " + baseType, baseClass.isAssignableFrom(impl));
+      }
+    }
+  }
+
+  private void verifyAnnotations(Annotation[] annotations, List<AnnotationDescriptor> scannedAnnotations) throws Exception {
+    assertEquals(Arrays.toString(annotations) + " expected but got " + scannedAnnotations, annotations.length, scannedAnnotations.size());
+    for (int i = 0; i < annotations.length; i++) {
+      Annotation annotation = annotations[i];
+      AnnotationDescriptor scannedAnnotation = scannedAnnotations.get(i);
+      Class<? extends Annotation> annotationType = annotation.annotationType();
+      assertEquals(annotationType.getName(), scannedAnnotation.getAnnotationType());
+      if (annotation instanceof FunctionTemplate) {
+        FunctionTemplate ft = (FunctionTemplate)annotation;
+        if (ft.name() != null && !ft.name().equals("")) {
+          assertEquals(ft.name(), scannedAnnotation.getSingleValue("name"));
+        }
+      }
+      // generally verify all properties
+      Annotation proxy = scannedAnnotation.getProxy(annotationType);
+      Method[] declaredMethods = annotationType.getDeclaredMethods();
+      for (Method method : declaredMethods) {
+        if (method.getParameterTypes().length == 0) {
+          Object reflectValue = method.invoke(annotation);
+          Object byteCodeValue = method.invoke(proxy);
+          String message = annotationType.getSimpleName() + "." + method.getName();
+          if (method.getReturnType().isArray()) {
+            assertArrayEquals(message, (Object[])reflectValue, (Object[])byteCodeValue);
+          } else {
+            assertEquals(message, reflectValue, byteCodeValue);
+          }
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java b/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java
index 13f9563..5928dce 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.scanner.ClassPathScanner;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.PhysicalPlan;
@@ -45,7 +46,7 @@ public class RunRootExec {
   public static void main(String args[]) throws Exception {
     String path = args[0];
     int iterations = Integer.parseInt(args[1]);
-    Drillbit bit = new Drillbit(c, RemoteServiceSet.getLocalServiceSet());
+    Drillbit bit = new Drillbit(c, RemoteServiceSet.getLocalServiceSet(), ClassPathScanner.fromPrescan(c));
     bit.run();
     DrillbitContext bitContext = bit.getContext();
     PhysicalPlanReader reader = bitContext.getPlanReader();

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java
index 05105fc..f4fe2da 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestOpSerialization.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals;
 import java.util.List;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.ValueExpressions;
 import org.apache.drill.common.logical.PlanProperties;
@@ -32,6 +33,7 @@ import org.apache.drill.exec.physical.config.Filter;
 import org.apache.drill.exec.physical.config.Screen;
 import org.apache.drill.exec.physical.config.UnionExchange;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.apache.drill.exec.proto.CoordinationProtos;
 import org.apache.drill.exec.store.mock.MockSubScanPOP;
 import org.junit.Test;
@@ -44,7 +46,7 @@ public class TestOpSerialization {
   @Test
   public void testSerializedDeserialize() throws Throwable {
     DrillConfig c = DrillConfig.create();
-    PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+    PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
     MockSubScanPOP s = new MockSubScanPOP("abc", null);
     s.setOperatorId(3);
     Filter f = new Filter(s, new ValueExpressions.BooleanExpression("true", ExpressionPosition.UNKNOWN), 0.1f);
@@ -67,12 +69,10 @@ public class TestOpSerialization {
         pops = Lists.reverse(pops);
       }
       PhysicalPlan plan1 = new PhysicalPlan(PlanProperties.builder().build(), pops);
-      String json = plan1.unparse(c.getMapper().writer());
-      System.out.println(json);
+      LogicalPlanPersistence logicalPlanPersistence = PhysicalPlanReaderTestFactory.defaultLogicalPlanPersistence(c);
+      String json = plan1.unparse(logicalPlanPersistence.getMapper().writer());
 
       PhysicalPlan plan2 = reader.readPhysicalPlan(json);
-      System.out.println("++++++++");
-      System.out.println(plan2.unparse(c.getMapper().writer()));
 
       PhysicalOperator root = plan2.getSortedOperators(false).iterator().next();
       assertEquals(0, root.getOperatorId());

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java b/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java
index 0f437e9..0dd20b6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestWriteToDisk.java
@@ -24,6 +24,7 @@ import com.google.common.io.Files;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.scanner.ClassPathScanner;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.common.util.TestTools;

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/client/DumpCatTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/client/DumpCatTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/client/DumpCatTest.java
index 6b6bd64..d6c5688 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/client/DumpCatTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/client/DumpCatTest.java
@@ -17,18 +17,17 @@
  */
 package org.apache.drill.exec.client;
 
+import static org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader;
 import static org.junit.Assert.assertTrue;
 
 import java.io.FileInputStream;
 
-import mockit.Injectable;
-import mockit.NonStrictExpectations;
-
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.scanner.ClassPathScanner;
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ExecTest;
-import org.apache.drill.exec.compile.CodeCompiler;
+import org.apache.drill.exec.compile.CodeCompilerTestFactory;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.memory.RootAllocatorFactory;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -39,7 +38,6 @@ import org.apache.drill.exec.physical.impl.OperatorCreatorRegistry;
 import org.apache.drill.exec.physical.impl.SimpleRootExec;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
 import org.apache.drill.exec.proto.BitControl.PlanFragment;
-import org.apache.drill.exec.proto.CoordinationProtos;
 import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
 import org.apache.drill.exec.proto.helper.QueryIdHelper;
 import org.apache.drill.exec.rpc.user.UserServer.UserClientConnection;
@@ -53,6 +51,9 @@ import com.codahale.metrics.MetricRegistry;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 
+import mockit.Injectable;
+import mockit.NonStrictExpectations;
+
 /**
  * The unit test case will read a physical plan in json format. The physical plan contains a "trace" operator,
  * which will produce a dump file.  The dump file will be input into DumpCat to test query mode and batch mode.
@@ -69,12 +70,12 @@ public class DumpCatTest  extends ExecTest {
       new NonStrictExpectations(){{
           bitContext.getMetrics(); result = new MetricRegistry();
           bitContext.getAllocator(); result = RootAllocatorFactory.newRoot(c);
-          bitContext.getConfig(); result = c;
-          bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
-          bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
+          bitContext.getConfig(); result = c;  minTimes = 1;
+          bitContext.getCompiler(); result = CodeCompilerTestFactory.getTestCompiler(c);
+          bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(ClassPathScanner.fromPrescan(c));
       }};
 
-      final PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+      final PhysicalPlanReader reader = defaultPhysicalPlanReader(c);
       final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/trace/simple_trace.json"), Charsets.UTF_8));
       final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
       final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/compile/CodeCompilerTestFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/CodeCompilerTestFactory.java b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/CodeCompilerTestFactory.java
new file mode 100644
index 0000000..9032946
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/CodeCompilerTestFactory.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.compile;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.IOException;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.exec.server.options.SystemOptionManager;
+import org.apache.drill.exec.store.sys.local.LocalPStoreProvider;
+
+public class CodeCompilerTestFactory {
+  public static CodeCompiler getTestCompiler(DrillConfig c) throws IOException {
+    DrillConfig config = checkNotNull(c);
+    LogicalPlanPersistence persistence = new LogicalPlanPersistence(config, ClassPathScanner.fromPrescan(config));
+    LocalPStoreProvider provider = new LocalPStoreProvider(config);
+    SystemOptionManager systemOptionManager = new SystemOptionManager(persistence, provider);
+    return new CodeCompiler(config, systemOptionManager.init());
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
index 345ac3c..8486801 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
@@ -25,6 +25,8 @@ import java.net.URL;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.exec.compile.DrillCheckClassAdapter;
 import org.apache.drill.exec.compile.QueryClassLoader;
+import org.apache.drill.exec.planner.PhysicalPlanReader;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.apache.drill.exec.server.options.SystemOptionManager;
 import org.apache.drill.exec.store.sys.local.LocalPStoreProvider;
 import org.objectweb.asm.ClassReader;
@@ -55,7 +57,7 @@ public class ReplaceMethodInvoke {
     check(output);
 
     final DrillConfig c = DrillConfig.forClient();
-    final SystemOptionManager m = new SystemOptionManager(c, new LocalPStoreProvider(c));
+    final SystemOptionManager m = new SystemOptionManager(PhysicalPlanReaderTestFactory.defaultLogicalPlanPersistence(c), new LocalPStoreProvider(c));
     m.init();
     try (QueryClassLoader ql = new QueryClassLoader(DrillConfig.create(), m)) {
       ql.injectByteCode("org.apache.drill.Pickle$OutgoingBatch", output);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/GeneratorFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/GeneratorFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/GeneratorFunctions.java
deleted file mode 100644
index 087595b..0000000
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/GeneratorFunctions.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.fn.impl;
-
-import java.util.Random;
-
-import org.apache.drill.exec.ExecTest;
-import org.apache.drill.exec.expr.DrillSimpleFunc;
-import org.apache.drill.exec.expr.annotations.FunctionTemplate;
-import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
-import org.apache.drill.exec.expr.annotations.Output;
-import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.annotations.Workspace;
-import org.apache.drill.exec.expr.holders.BigIntHolder;
-import org.apache.drill.exec.expr.holders.Float8Holder;
-
-public class GeneratorFunctions extends ExecTest {
-
-  public static final Random random = new Random(1234L);
-
-  @FunctionTemplate(name = "increasingBigInt", isRandom = true,
-    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-  public static class IncreasingBigInt implements DrillSimpleFunc {
-
-    @Param BigIntHolder start;
-    @Workspace long current;
-    @Output BigIntHolder out;
-
-    public void setup() {
-      current = 0;
-    }
-
-    public void eval() {
-      out.value = start.value + current++;
-    }
-  }
-
-  @FunctionTemplate(name = "randomBigInt", isRandom = true,
-    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-  public static class RandomBigIntGauss implements DrillSimpleFunc {
-
-    @Param BigIntHolder range;
-    @Output BigIntHolder out;
-
-    public void setup() {
-    }
-
-    public void eval() {
-      out.value = (long)(org.apache.drill.exec.fn.impl.GeneratorFunctions.random.nextGaussian() * range.value);
-    }
-  }
-
-  @FunctionTemplate(name = "randomBigInt", isRandom = true,
-    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-  public static class RandomBigInt implements DrillSimpleFunc {
-
-    @Param BigIntHolder min;
-    @Param BigIntHolder max;
-    @Output BigIntHolder out;
-
-    public void setup() {
-    }
-
-    public void eval() {
-      out.value = (long)(org.apache.drill.exec.fn.impl.GeneratorFunctions.random.nextFloat() * (max.value - min.value) + min.value);
-    }
-  }
-
-  @FunctionTemplate(name = "randomFloat8", isRandom = true,
-    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-  public static class RandomFloat8Gauss implements DrillSimpleFunc {
-
-    @Param BigIntHolder range;
-    @Output
-    Float8Holder out;
-
-    public void setup() {
-    }
-
-    public void eval() {
-      out.value = org.apache.drill.exec.fn.impl.GeneratorFunctions.random.nextGaussian() * range.value;
-    }
-  }
-
-  @FunctionTemplate(name = "randomFloat8", isRandom = true,
-    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-  public static class RandomFloat8 implements DrillSimpleFunc {
-
-    @Param BigIntHolder min;
-    @Param BigIntHolder max;
-    @Output Float8Holder out;
-
-    public void setup() {
-    }
-
-    public void eval() {
-      out.value = org.apache.drill.exec.fn.impl.GeneratorFunctions.random.nextFloat() * (max.value - min.value) + min.value;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
index 99ee904..4865683 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestMathFunctions.java
@@ -20,15 +20,14 @@ package org.apache.drill.exec.fn.impl;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import mockit.Injectable;
-import mockit.NonStrictExpectations;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.scanner.ClassPathScanner;
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.ExecTest;
-import org.apache.drill.exec.compile.CodeCompiler;
+import org.apache.drill.exec.compile.CodeCompilerTestFactory;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.memory.RootAllocatorFactory;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -38,8 +37,8 @@ import org.apache.drill.exec.physical.impl.ImplCreator;
 import org.apache.drill.exec.physical.impl.OperatorCreatorRegistry;
 import org.apache.drill.exec.physical.impl.SimpleRootExec;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.apache.drill.exec.proto.BitControl;
-import org.apache.drill.exec.proto.CoordinationProtos;
 import org.apache.drill.exec.rpc.user.UserServer.UserClientConnection;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.vector.Float8Vector;
@@ -50,6 +49,9 @@ import com.codahale.metrics.MetricRegistry;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 
+import mockit.Injectable;
+import mockit.NonStrictExpectations;
+
 
 public class TestMathFunctions extends ExecTest {
   //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMathFunctions.class);
@@ -62,11 +64,11 @@ public class TestMathFunctions extends ExecTest {
       bitContext.getMetrics(); result = new MetricRegistry();
       bitContext.getAllocator(); result = RootAllocatorFactory.newRoot(c);
       bitContext.getConfig(); result = c;
-      bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
-      bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
+      bitContext.getCompiler(); result = CodeCompilerTestFactory.getTestCompiler(c);
+      bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(ClassPathScanner.fromPrescan(c));
     }};
 
-    final PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+    final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
     final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/functions/simple_math_functions.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java
index 3447dae..b8e7c37 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewMathFunctions.java
@@ -23,11 +23,9 @@ import static org.junit.Assert.assertTrue;
 
 import java.math.BigDecimal;
 
-import mockit.Injectable;
-import mockit.NonStrictExpectations;
-
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.exec.compile.CodeCompiler;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.exec.compile.CodeCompilerTestFactory;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.memory.RootAllocatorFactory;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -37,8 +35,8 @@ import org.apache.drill.exec.physical.impl.ImplCreator;
 import org.apache.drill.exec.physical.impl.OperatorCreatorRegistry;
 import org.apache.drill.exec.physical.impl.SimpleRootExec;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.apache.drill.exec.proto.BitControl.PlanFragment;
-import org.apache.drill.exec.proto.CoordinationProtos;
 import org.apache.drill.exec.rpc.user.UserServer;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.vector.ValueVector;
@@ -49,6 +47,9 @@ import com.codahale.metrics.MetricRegistry;
 import com.google.common.base.Charsets;
 import com.google.common.io.Resources;
 
+import mockit.Injectable;
+import mockit.NonStrictExpectations;
+
 public class TestNewMathFunctions {
   //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestNewMathFunctions.class);
   private final DrillConfig c = DrillConfig.create();
@@ -79,14 +80,14 @@ public class TestNewMathFunctions {
     new NonStrictExpectations() {{
       bitContext.getMetrics(); result = new MetricRegistry();
       bitContext.getAllocator(); result = RootAllocatorFactory.newRoot(c);
-      bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
+      bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(ClassPathScanner.fromPrescan(c));
       bitContext.getConfig(); result = c;
-      bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
+      bitContext.getCompiler(); result = CodeCompilerTestFactory.getTestCompiler(c);
     }};
 
     final String planString = Resources.toString(Resources.getResource(planPath), Charsets.UTF_8);
     if (reader == null) {
-      reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+      reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
     }
     if (registry == null) {
       registry = new FunctionImplementationRegistry(c);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
index 68c3b9c..81d1157 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestRepeatedFunction.java
@@ -19,15 +19,14 @@ package org.apache.drill.exec.fn.impl;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import mockit.Injectable;
-import mockit.NonStrictExpectations;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.scanner.ClassPathScanner;
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.ExecTest;
-import org.apache.drill.exec.compile.CodeCompiler;
+import org.apache.drill.exec.compile.CodeCompilerTestFactory;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.memory.RootAllocatorFactory;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -37,8 +36,8 @@ import org.apache.drill.exec.physical.impl.ImplCreator;
 import org.apache.drill.exec.physical.impl.OperatorCreatorRegistry;
 import org.apache.drill.exec.physical.impl.SimpleRootExec;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.apache.drill.exec.proto.BitControl.PlanFragment;
-import org.apache.drill.exec.proto.CoordinationProtos;
 import org.apache.drill.exec.rpc.user.UserServer.UserClientConnection;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.vector.BitVector;
@@ -49,6 +48,9 @@ import com.codahale.metrics.MetricRegistry;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 
+import mockit.Injectable;
+import mockit.NonStrictExpectations;
+
 public class TestRepeatedFunction extends ExecTest{
   //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestRepeatedFunction.class);
   private final DrillConfig c = DrillConfig.create();
@@ -59,12 +61,12 @@ public class TestRepeatedFunction extends ExecTest{
     new NonStrictExpectations() {{
       bitContext.getMetrics(); result = new MetricRegistry();
       bitContext.getAllocator(); result = RootAllocatorFactory.newRoot(c);
-      bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(c);
+      bitContext.getOperatorCreatorRegistry(); result = new OperatorCreatorRegistry(ClassPathScanner.fromPrescan(c));
       bitContext.getConfig(); result = c;
-      bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
+      bitContext.getCompiler(); result = CodeCompilerTestFactory.getTestCompiler(c);
     }};
 
-    final PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+    final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
     final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/physical_repeated_1.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/GeneratorFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/GeneratorFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/GeneratorFunctions.java
new file mode 100644
index 0000000..93c67cc
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/GeneratorFunctions.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.fn.impl.testing;
+
+import java.util.Random;
+
+import org.apache.drill.exec.ExecTest;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import org.apache.drill.exec.expr.holders.BigIntHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
+
+public class GeneratorFunctions extends ExecTest {
+
+  public static final Random random = new Random(1234L);
+
+  @FunctionTemplate(name = "increasingBigInt", isRandom = true,
+    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
+  public static class IncreasingBigInt implements DrillSimpleFunc {
+
+    @Param BigIntHolder start;
+    @Workspace long current;
+    @Output BigIntHolder out;
+
+    public void setup() {
+      current = 0;
+    }
+
+    public void eval() {
+      out.value = start.value + current++;
+    }
+  }
+
+  @FunctionTemplate(name = "randomBigInt", isRandom = true,
+    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
+  public static class RandomBigIntGauss implements DrillSimpleFunc {
+
+    @Param BigIntHolder range;
+    @Output BigIntHolder out;
+
+    public void setup() {
+    }
+
+    public void eval() {
+      out.value = (long)(org.apache.drill.exec.fn.impl.testing.GeneratorFunctions.random.nextGaussian() * range.value);
+    }
+  }
+
+  @FunctionTemplate(name = "randomBigInt", isRandom = true,
+    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
+  public static class RandomBigInt implements DrillSimpleFunc {
+
+    @Param BigIntHolder min;
+    @Param BigIntHolder max;
+    @Output BigIntHolder out;
+
+    public void setup() {
+    }
+
+    public void eval() {
+      out.value = (long)(org.apache.drill.exec.fn.impl.testing.GeneratorFunctions.random.nextFloat() * (max.value - min.value) + min.value);
+    }
+  }
+
+  @FunctionTemplate(name = "randomFloat8", isRandom = true,
+    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
+  public static class RandomFloat8Gauss implements DrillSimpleFunc {
+
+    @Param BigIntHolder range;
+    @Output
+    Float8Holder out;
+
+    public void setup() {
+    }
+
+    public void eval() {
+      out.value = org.apache.drill.exec.fn.impl.testing.GeneratorFunctions.random.nextGaussian() * range.value;
+    }
+  }
+
+  @FunctionTemplate(name = "randomFloat8", isRandom = true,
+    scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
+  public static class RandomFloat8 implements DrillSimpleFunc {
+
+    @Param BigIntHolder min;
+    @Param BigIntHolder max;
+    @Output Float8Holder out;
+
+    public void setup() {
+    }
+
+    public void eval() {
+      out.value = org.apache.drill.exec.fn.impl.testing.GeneratorFunctions.random.nextFloat() * (max.value - min.value) + min.value;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java b/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
index 18aae2c..d522663 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
@@ -21,8 +21,6 @@ package org.apache.drill.exec.memory;
 
 import static org.junit.Assert.fail;
 
-import io.netty.buffer.DrillBuf;
-
 import java.util.Iterator;
 import java.util.List;
 import java.util.Properties;
@@ -38,8 +36,8 @@ import org.apache.drill.exec.ops.OperatorStats;
 import org.apache.drill.exec.physical.PhysicalPlan;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.apache.drill.exec.proto.BitControl;
-import org.apache.drill.exec.proto.CoordinationProtos;
 import org.apache.drill.exec.proto.UserBitShared;
 import org.apache.drill.exec.server.Drillbit;
 import org.apache.drill.exec.server.DrillbitContext;
@@ -50,6 +48,8 @@ import org.junit.Test;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 
+import io.netty.buffer.DrillBuf;
+
 public class TestAllocators {
 
   private static final Properties TEST_CONFIGURATIONS = new Properties() {
@@ -109,7 +109,7 @@ public class TestAllocators {
     FragmentContext fragmentContext2 = new FragmentContext(bitContext, pf2, null, functionRegistry);
 
     // Get a few physical operators. Easiest way is to read a physical plan.
-    PhysicalPlanReader planReader = new PhysicalPlanReader(config, config.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance(), storageRegistry);
+    PhysicalPlanReader planReader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(bitContext, storageRegistry);
     PhysicalPlan plan = planReader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile(planFile), Charsets.UTF_8));
     List<PhysicalOperator> physicalOperators = plan.getSortedOperators();
     Iterator<PhysicalOperator> physicalOperatorIterator = physicalOperators.iterator();

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
index 71e5d48..a5f7bdc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/opt/BasicOptimizerTest.java
@@ -18,9 +18,11 @@
 package org.apache.drill.exec.opt;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.logical.LogicalPlan;
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.ExecTest;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.junit.Test;
 
 public class BasicOptimizerTest extends ExecTest {
@@ -28,8 +30,10 @@ public class BasicOptimizerTest extends ExecTest {
     @Test
     public void parseSimplePlan() throws Exception{
         DrillConfig c = DrillConfig.create();
-        LogicalPlan plan = LogicalPlan.parse(c, FileUtils.getResourceAsString("/scan_screen_logical.json"));
-        System.out.println(plan.unparse(c));
+        LogicalPlanPersistence lpp = PhysicalPlanReaderTestFactory.defaultLogicalPlanPersistence(c);
+        LogicalPlan plan = LogicalPlan.parse(lpp, FileUtils.getResourceAsString("/scan_screen_logical.json"));
+        String unparse = plan.unparse(lpp);
+//        System.out.println(unparse);
         //System.out.println( new BasicOptimizer(DrillConfig.create()).convert(plan).unparse(c.getMapper().writer()));
     }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/dbcab0fe/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
index 4ad181f..3bfa347 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/config/TestParsePhysicalPlan.java
@@ -18,10 +18,14 @@
 package org.apache.drill.exec.physical.config;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.physical.PhysicalPlan;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
+import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
 import org.apache.drill.exec.proto.CoordinationProtos;
 import org.junit.Test;
 
@@ -37,10 +41,14 @@ public class TestParsePhysicalPlan extends ExecTest {
   @Test
   public void parseSimplePlan() throws Exception{
     DrillConfig c = DrillConfig.create();
-    PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
-    ObjectReader r = c.getMapper().reader(PhysicalPlan.class);
-    ObjectWriter writer = c.getMapper().writer();
+    ScanResult scanResult = ClassPathScanner.fromPrescan(c);
+    LogicalPlanPersistence lpp = new LogicalPlanPersistence(c, scanResult);
+
+    PhysicalPlanReader reader = new PhysicalPlanReader(c, scanResult, lpp, CoordinationProtos.DrillbitEndpoint.getDefaultInstance(), null);
+    ObjectReader r = lpp.getMapper().reader(PhysicalPlan.class);
+    ObjectWriter writer = lpp.getMapper().writer();
     PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/physical_test1.json"), Charsets.UTF_8));
-    System.out.println(plan.unparse(writer));
+    String unparse = plan.unparse(writer);
+//    System.out.println(unparse);
   }
 }


Mime
View raw message