drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j...@apache.org
Subject [1/4] drill git commit: DRILL-4047: Support querying a table with options
Date Thu, 26 Nov 2015 04:03:57 GMT
Repository: drill
Updated Branches:
  refs/heads/master 367d74a65 -> 5cc3448bc


DRILL-4047: Support querying a table with options

- simplify DrillOperatorTable; separate TranslatableTable from DrillTable
- fix table name in error message; improve tests debugability
- FormatCreator refactor

Update calcite fork version to r9

This closes #246


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/f7786cc7
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/f7786cc7
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/f7786cc7

Branch: refs/heads/master
Commit: f7786cc7c5eb3c7febde90ac588b52b473a2b1ea
Parents: 367d74a
Author: Julien Le Dem <julien@ledem.net>
Authored: Fri Nov 13 11:26:05 2015 -0800
Committer: Jason Altekruse <altekrusejason@gmail.com>
Committed: Wed Nov 25 14:42:33 2015 -0800

----------------------------------------------------------------------
 .../drill/exec/planner/sql/HiveUDFOperator.java |   5 +
 exec/java-exec/pom.xml                          |   5 +
 .../exec/planner/common/DrillScanRelBase.java   |   7 +-
 .../drill/exec/planner/logical/DrillTable.java  |   1 -
 .../planner/logical/DrillTranslatableTable.java |  81 +++++
 .../apache/drill/exec/planner/sql/Checker.java  |   5 +
 .../exec/planner/sql/DrillOperatorTable.java    |  15 +-
 .../drill/exec/store/dfs/FileSystemPlugin.java  |  20 +-
 .../drill/exec/store/dfs/FormatCreator.java     | 112 +++++--
 .../store/dfs/FormatPluginOptionExtractor.java  | 115 +++++++
 .../dfs/FormatPluginOptionsDescriptor.java      | 195 ++++++++++++
 .../exec/store/dfs/WorkspaceSchemaFactory.java  | 310 +++++++++++++++++--
 .../exec/store/easy/json/JSONFormatPlugin.java  |   2 +-
 .../sequencefile/SequenceFileFormatConfig.java  |   4 +-
 .../exec/store/easy/text/TextFormatPlugin.java  |   3 +-
 .../java/org/apache/drill/BaseTestQuery.java    |  15 +-
 .../java/org/apache/drill/DrillTestWrapper.java |   4 +-
 .../org/apache/drill/TestFrameworkTest.java     |  17 +-
 .../org/apache/drill/TestSelectWithOption.java  | 225 ++++++++++++++
 .../dfs/TestFormatPluginOptionExtractor.java    |  67 ++++
 pom.xml                                         |   2 +-
 21 files changed, 1116 insertions(+), 94 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/HiveUDFOperator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/HiveUDFOperator.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/HiveUDFOperator.java
index 377ea75..a9647bd 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/HiveUDFOperator.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/HiveUDFOperator.java
@@ -89,5 +89,10 @@ public class HiveUDFOperator extends SqlFunction {
     public String getAllowedSignatures(SqlOperator op, String opName) {
       return opName + "(HiveUDF - Opaque)";
     }
+
+    @Override
+    public boolean isOptional(int arg) {
+      return false;
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/pom.xml
----------------------------------------------------------------------
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index a5d2e11..ada66eb 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -583,6 +583,11 @@
               <lookAhead>2</lookAhead>
               <isStatic>false</isStatic>
               <outputDirectory>${project.build.directory}/generated-sources/</outputDirectory>
+<!--
+              <debugParser>true</debugParser>
+              <debugLookAhead>true</debugLookAhead>
+              <debugTokenManager>true</debugTokenManager>
+ -->
             </configuration>
           </execution>
         </executions>

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillScanRelBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillScanRelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillScanRelBase.java
index 9da7d36..04caed8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillScanRelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillScanRelBase.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.planner.common;
 
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTranslatableTable;
 import org.apache.calcite.rel.core.TableScan;
 import org.apache.calcite.plan.Convention;
 import org.apache.calcite.plan.RelOptCluster;
@@ -34,7 +35,11 @@ public abstract class DrillScanRelBase extends TableScan implements DrillRelNode
 
   public DrillScanRelBase(Convention convention, RelOptCluster cluster, RelTraitSet traits, RelOptTable table) {
     super(cluster, traits, table);
-    this.drillTable = table.unwrap(DrillTable.class);
+    DrillTable unwrap = table.unwrap(DrillTable.class);
+    if (unwrap == null) {
+      unwrap = table.unwrap(DrillTranslatableTable.class).getDrillTable();
+    }
+    this.drillTable = unwrap;
     assert drillTable != null;
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
index 4ddfcfd..61f242f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
@@ -23,7 +23,6 @@ import org.apache.calcite.schema.Schema.TableType;
 import org.apache.calcite.schema.Statistic;
 import org.apache.calcite.schema.Statistics;
 import org.apache.calcite.schema.Table;
-
 import org.apache.drill.common.JSONOptions;
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.exec.physical.base.GroupScan;

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTranslatableTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTranslatableTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTranslatableTable.java
new file mode 100644
index 0000000..8ec805f
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTranslatableTable.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.logical;
+
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.plan.RelOptTable.ToRelContext;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.schema.Statistic;
+import org.apache.calcite.schema.TranslatableTable;
+import org.apache.calcite.schema.Schema.TableType;
+import org.apache.drill.exec.planner.logical.DrillTable;
+
+/**
+ * TableMacros must return a TranslatableTable
+ * This class adapts the existing DrillTable to a TranslatableTable
+ */
+public class DrillTranslatableTable implements TranslatableTable {
+
+  /** all calls will be delegated to this field */
+  private final DrillTable drillTable;
+
+  public DrillTranslatableTable(DrillTable drillTable) {
+    this.drillTable = drillTable;
+  }
+
+  public DrillTable getDrillTable() {
+    return drillTable;
+  }
+
+  @Override
+  public RelDataType getRowType(RelDataTypeFactory typeFactory) {
+    return drillTable.getRowType(typeFactory);
+  }
+
+  @Override
+  public Statistic getStatistic() {
+    return drillTable.getStatistic();
+  }
+
+  @Override
+  public RelNode toRel(ToRelContext context, RelOptTable table) {
+    return drillTable.toRel(context, table);
+  }
+
+  @Override
+  public TableType getJdbcTableType() {
+    return drillTable.getJdbcTableType();
+  }
+
+  @Override
+  public int hashCode() {
+    return drillTable.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    return drillTable.equals(obj);
+  }
+
+  @Override
+  public String toString() {
+    return drillTable.toString();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/Checker.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/Checker.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/Checker.java
index 338977c..c274d2d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/Checker.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/Checker.java
@@ -49,4 +49,9 @@ class Checker implements SqlOperandTypeChecker {
     return Consistency.NONE;
   }
 
+  @Override
+  public boolean isOptional(int i) {
+    return false;
+  }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillOperatorTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillOperatorTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillOperatorTable.java
index b319863..4dd7963 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillOperatorTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillOperatorTable.java
@@ -50,22 +50,13 @@ public class DrillOperatorTable extends SqlStdOperatorTable {
 
   @Override
   public void lookupOperatorOverloads(SqlIdentifier opName, SqlFunctionCategory category, SqlSyntax syntax, List<SqlOperator> operatorList) {
-    if (syntax == SqlSyntax.FUNCTION) {
-
-      // add optiq.
-      inner.lookupOperatorOverloads(opName, category, syntax, operatorList);
-
-      if(!operatorList.isEmpty()){
-        return;
-      }
+    inner.lookupOperatorOverloads(opName, category, syntax, operatorList);
 
+    if (operatorList.isEmpty() && syntax == SqlSyntax.FUNCTION && opName.isSimple()) {
       List<SqlOperator> drillOps = opMap.get(opName.getSimple().toLowerCase());
-      if(drillOps != null){
+      if (drillOps != null) {
         operatorList.addAll(drillOps);
       }
-
-    } else {
-      inner.lookupOperatorOverloads(opName, category, syntax, operatorList);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index fb3e58a..5e46d4a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -57,7 +57,7 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FileSystemPlugin.class);
 
   private final FileSystemSchemaFactory schemaFactory;
-  private final Map<String, FormatPlugin> formatPluginsByName;
+  private final FormatCreator formatCreator;
   private final Map<FormatPluginConfig, FormatPlugin> formatPluginsByConfig;
   private final FileSystemConfig config;
   private final Configuration fsConf;
@@ -73,10 +73,10 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
       fsConf.set("fs.classpath.impl", ClassPathFileSystem.class.getName());
       fsConf.set("fs.drill-local.impl", LocalSyncableFileSystem.class.getName());
 
-      formatPluginsByName = FormatCreator.getFormatPlugins(context, fsConf, config, context.getClasspathScan());
+      formatCreator = new FormatCreator(context, fsConf, config, context.getClasspathScan());
       List<FormatMatcher> matchers = Lists.newArrayList();
       formatPluginsByConfig = Maps.newHashMap();
-      for (FormatPlugin p : formatPluginsByName.values()) {
+      for (FormatPlugin p : formatCreator.getConfiguredFormatPlugins()) {
         matchers.add(p.getMatcher());
         formatPluginsByConfig.put(p.getConfig(), p);
       }
@@ -85,13 +85,13 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
       List<WorkspaceSchemaFactory> factories = Lists.newArrayList();
       if (!noWorkspace) {
         for (Map.Entry<String, WorkspaceConfig> space : config.workspaces.entrySet()) {
-          factories.add(new WorkspaceSchemaFactory(this, space.getKey(), name, space.getValue(), matchers, context.getLpPersistence()));
+          factories.add(new WorkspaceSchemaFactory(this, space.getKey(), name, space.getValue(), matchers, context.getLpPersistence(), context.getClasspathScan()));
         }
       }
 
       // if the "default" workspace is not given add one.
       if (noWorkspace || !config.workspaces.containsKey(DEFAULT_WS_NAME)) {
-        factories.add(new WorkspaceSchemaFactory(this, DEFAULT_WS_NAME, name, WorkspaceConfig.DEFAULT, matchers, context.getLpPersistence()));
+        factories.add(new WorkspaceSchemaFactory(this, DEFAULT_WS_NAME, name, WorkspaceConfig.DEFAULT, matchers, context.getLpPersistence(), context.getClasspathScan()));
       }
 
       this.schemaFactory = new FileSystemSchemaFactory(name, factories);
@@ -116,12 +116,12 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
     FormatSelection formatSelection = selection.getWith(lpPersistance, FormatSelection.class);
     FormatPlugin plugin;
     if (formatSelection.getFormat() instanceof NamedFormatPluginConfig) {
-      plugin = formatPluginsByName.get( ((NamedFormatPluginConfig) formatSelection.getFormat()).name);
+      plugin = formatCreator.getFormatPluginByName( ((NamedFormatPluginConfig) formatSelection.getFormat()).name);
     } else {
       plugin = formatPluginsByConfig.get(formatSelection.getFormat());
     }
     if (plugin == null) {
-      throw new IOException(String.format("Failure getting requested format plugin named '%s'.  It was not one of the format plugins registered.", formatSelection.getFormat()));
+      plugin = formatCreator.newFormatPlugin(formatSelection.getFormat());
     }
     return plugin.getGroupScan(userName, formatSelection.getSelection(), columns);
   }
@@ -132,12 +132,12 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
   }
 
   public FormatPlugin getFormatPlugin(String name) {
-    return formatPluginsByName.get(name);
+    return formatCreator.getFormatPluginByName(name);
   }
 
   public FormatPlugin getFormatPlugin(FormatPluginConfig config) {
     if (config instanceof NamedFormatPluginConfig) {
-      return formatPluginsByName.get(((NamedFormatPluginConfig) config).name);
+      return formatCreator.getFormatPluginByName(((NamedFormatPluginConfig) config).name);
     } else {
       return formatPluginsByConfig.get(config);
     }
@@ -146,7 +146,7 @@ public class FileSystemPlugin extends AbstractStoragePlugin{
   @Override
   public Set<StoragePluginOptimizerRule> getOptimizerRules(OptimizerRulesContext optimizerRulesContext) {
     Builder<StoragePluginOptimizerRule> setBuilder = ImmutableSet.builder();
-    for(FormatPlugin plugin : this.formatPluginsByName.values()){
+    for(FormatPlugin plugin : formatCreator.getConfiguredFormatPlugins()){
       Set<StoragePluginOptimizerRule> rules = plugin.getOptimizerRules();
       if(rules != null && rules.size() > 0){
         setBuilder.addAll(rules);

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
index 3f932cd..ece3418 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
@@ -20,8 +20,10 @@ package org.apache.drill.exec.store.dfs;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Map;
 
+import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.common.scanner.persistence.ScanResult;
@@ -31,6 +33,9 @@ import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.collect.Maps;
 
+/**
+ * Responsible for instantiating format plugins
+ */
 public class FormatCreator {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FormatCreator.class);
 
@@ -39,17 +44,55 @@ public class FormatCreator {
   private static final ConstructorChecker DEFAULT_BASED = new ConstructorChecker(String.class, DrillbitContext.class,
       Configuration.class, StoragePluginConfig.class);
 
-  static Map<String, FormatPlugin> getFormatPlugins(
+  /**
+   * Returns a Map from the FormatPlugin Config class to the constructor of the format plugin that accepts it.
+   * This is used to create a format plugin instance from its configuration.
+   * @param pluginClasses the FormatPlugin classes to index on their config class
+   * @return a map of type to constructor that taks the config
+   */
+  private static Map<Class<?>, Constructor<?>> initConfigConstructors(Collection<Class<? extends FormatPlugin>> pluginClasses) {
+    Map<Class<?>, Constructor<?>> constructors = Maps.newHashMap();
+    for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
+      for (Constructor<?> c : pluginClass.getConstructors()) {
+        try {
+          if (!FORMAT_BASED.check(c)) {
+            continue;
+          }
+          Class<?> configClass = c.getParameterTypes()[4];
+          constructors.put(configClass, c);
+        } catch (Exception e) {
+          logger.warn(String.format("Failure while trying instantiate FormatPlugin %s.", pluginClass.getName()), e);
+        }
+      }
+    }
+    return constructors;
+  }
+
+
+  private final DrillbitContext context;
+  private final Configuration fsConf;
+  private final FileSystemConfig storageConfig;
+
+  /** format plugins initialized from the drill config, indexed by name */
+  private final Map<String, FormatPlugin> configuredPlugins;
+  /** The format plugin classes retrieved from classpath scanning */
+  private final Collection<Class<? extends FormatPlugin>> pluginClasses;
+  /** a Map from the FormatPlugin Config class to the constructor of the format plugin that accepts it.*/
+  private final Map<Class<?>, Constructor<?>> configConstructors;
+
+  FormatCreator(
       DrillbitContext context,
       Configuration fsConf,
       FileSystemConfig storageConfig,
       ScanResult classpathScan) {
-    Map<String, FormatPlugin> plugins = Maps.newHashMap();
-
-    Collection<Class<? extends FormatPlugin>> pluginClasses = classpathScan.getImplementations(FormatPlugin.class);
+    this.context = context;
+    this.fsConf = fsConf;
+    this.storageConfig = storageConfig;
+    this.pluginClasses = classpathScan.getImplementations(FormatPlugin.class);
+    this.configConstructors = initConfigConstructors(pluginClasses);
 
+    Map<String, FormatPlugin> plugins = Maps.newHashMap();
     if (storageConfig.formats == null || storageConfig.formats.isEmpty()) {
-
       for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
         for (Constructor<?> c : pluginClass.getConstructors()) {
           try {
@@ -63,25 +106,9 @@ public class FormatCreator {
           }
         }
       }
-
     } else {
-      Map<Class<?>, Constructor<?>> constructors = Maps.newHashMap();
-      for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
-        for (Constructor<?> c : pluginClass.getConstructors()) {
-          try {
-            if (!FORMAT_BASED.check(c)) {
-              continue;
-            }
-            Class<?> configClass = c.getParameterTypes()[4];
-            constructors.put(configClass, c);
-          } catch (Exception e) {
-            logger.warn(String.format("Failure while trying instantiate FormatPlugin %s.", pluginClass.getName()), e);
-          }
-        }
-      }
-
       for (Map.Entry<String, FormatPluginConfig> e : storageConfig.formats.entrySet()) {
-        Constructor<?> c = constructors.get(e.getValue().getClass());
+        Constructor<?> c = configConstructors.get(e.getValue().getClass());
         if (c == null) {
           logger.warn("Unable to find constructor for storage config named '{}' of type '{}'.", e.getKey(), e.getValue().getClass().getName());
           continue;
@@ -92,10 +119,47 @@ public class FormatCreator {
           logger.warn("Failure initializing storage config named '{}' of type '{}'.", e.getKey(), e.getValue().getClass().getName(), e1);
         }
       }
-
     }
+    this.configuredPlugins = Collections.unmodifiableMap(plugins);
+  }
+
+  /**
+   * @param name the name of the formatplugin instance in the drill config
+   * @return The configured FormatPlugin for this name
+   */
+  FormatPlugin getFormatPluginByName(String name) {
+    return configuredPlugins.get(name);
+  }
 
-    return plugins;
+  /**
+   * @return all the format plugins from the Drill config
+   */
+  Collection<FormatPlugin> getConfiguredFormatPlugins() {
+    return configuredPlugins.values();
   }
 
+  /**
+   * Instantiate a new format plugin instance from the provided config object
+   * @param fpconfig the conf for the plugin
+   * @return the newly created instance of a FormatPlugin based on provided config
+   */
+  FormatPlugin newFormatPlugin(FormatPluginConfig fpconfig) {
+    Constructor<?> c = configConstructors.get(fpconfig.getClass());
+    if (c == null) {
+      throw UserException.dataReadError()
+        .message(
+            "Unable to find constructor for storage config of type %s",
+            fpconfig.getClass().getName())
+        .build(logger);
+    }
+    try {
+      return (FormatPlugin) c.newInstance(null, context, fsConf, storageConfig, fpconfig);
+    } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
+      throw UserException.dataReadError(e)
+        .message(
+            "Failure initializing storage config of type %s",
+            fpconfig.getClass().getName())
+        .build(logger);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java
new file mode 100644
index 0000000..8b566a1
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.dfs;
+
+import static java.util.Collections.unmodifiableList;
+import static java.util.Collections.unmodifiableMap;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.common.logical.FormatPluginConfigBase;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.TableInstance;
+import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.TableSignature;
+import org.slf4j.Logger;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * manages format plugins options to define table macros
+ */
+final class FormatPluginOptionExtractor {
+  private static final Logger logger = org.slf4j.LoggerFactory.getLogger(FormatPluginOptionExtractor.class);
+
+  private final Map<String, FormatPluginOptionsDescriptor> optionsByTypeName;
+
+  /**
+   * extracts the format plugin options based on the scanned implementations of {@link FormatPluginConfig}
+   * @param scanResult
+   */
+  FormatPluginOptionExtractor(ScanResult scanResult) {
+    Map<String, FormatPluginOptionsDescriptor> result = new HashMap<>();
+    Set<Class<? extends FormatPluginConfig>> pluginConfigClasses = FormatPluginConfigBase.getSubTypes(scanResult);
+    for (Class<? extends FormatPluginConfig> pluginConfigClass : pluginConfigClasses) {
+      FormatPluginOptionsDescriptor optionsDescriptor = new FormatPluginOptionsDescriptor(pluginConfigClass);
+      result.put(optionsDescriptor.typeName.toLowerCase(), optionsDescriptor);
+    }
+    this.optionsByTypeName = unmodifiableMap(result);
+  }
+
+  /**
+   * @return the extracted options
+   */
+  @VisibleForTesting
+  Collection<FormatPluginOptionsDescriptor> getOptions() {
+    return optionsByTypeName.values();
+  }
+
+  /**
+   * give a table name, returns function signatures to configure the FormatPlugin
+   * @param tableName the name of the table (or table function in this context)
+   * @return the available signatures
+   */
+  List<TableSignature> getTableSignatures(String tableName) {
+    List<TableSignature> result = new ArrayList<>();
+    for (FormatPluginOptionsDescriptor optionsDescriptor : optionsByTypeName.values()) {
+      TableSignature sig = optionsDescriptor.getTableSignature(tableName);
+      result.add(sig);
+    }
+    return unmodifiableList(result);
+  }
+
+  /**
+   * given a table function signature and the corresponding parameters
+   * return the corresponding formatPlugin configuration
+   * @param t the signature and parameters (it should be one of the signatures returned by {@link FormatPluginOptionExtractor#getTableSignatures(String)})
+   * @return the config
+   */
+  FormatPluginConfig createConfigForTable(TableInstance t) {
+    if (!t.sig.params.get(0).name.equals("type")) {
+      throw UserException.parseError()
+        .message("unknown first param for %s", t.sig)
+        .addContext("table", t.sig.name)
+        .build(logger);
+    }
+    String type = (String)t.params.get(0);
+    if (type == null) {
+      throw UserException.parseError()
+          .message("type param must be present but was missing")
+          .addContext("table", t.sig.name)
+          .build(logger);
+    }
+    FormatPluginOptionsDescriptor optionsDescriptor = optionsByTypeName.get(type.toLowerCase());
+    if (optionsDescriptor == null) {
+      throw UserException.parseError()
+          .message(
+              "unknown type %s, expected one of %s",
+              type, optionsByTypeName.keySet())
+          .addContext("table", t.sig.name)
+          .build(logger);
+    }
+    return optionsDescriptor.createConfigForTable(t);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionsDescriptor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionsDescriptor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionsDescriptor.java
new file mode 100644
index 0000000..34a20e8
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionsDescriptor.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.dfs;
+
+import static java.util.Collections.unmodifiableMap;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.TableInstance;
+import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.TableParamDef;
+import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.TableSignature;
+import org.slf4j.Logger;
+
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+/**
+ * Describes the options for a format plugin
+ * extracted from the FormatPluginConfig subclass
+ */
+final class FormatPluginOptionsDescriptor {
+  private static final Logger logger = org.slf4j.LoggerFactory.getLogger(FormatPluginOptionsDescriptor.class);
+
+  final Class<? extends FormatPluginConfig> pluginConfigClass;
+  final String typeName;
+  private final Map<String, TableParamDef> functionParamsByName;
+
+  /**
+   * Uses reflection to extract options based on the fields of the provided config class
+   * ("List extensions" field is ignored, pending removal, Char is turned into String)
+   * The class must be annotated with {@code @JsonTypeName("type name")}
+   * @param pluginConfigClass the config class we want to extract options from through reflection
+   */
+  FormatPluginOptionsDescriptor(Class<? extends FormatPluginConfig> pluginConfigClass) {
+    this.pluginConfigClass = pluginConfigClass;
+    Map<String, TableParamDef> paramsByName = new LinkedHashMap<>();
+    Field[] fields = pluginConfigClass.getDeclaredFields();
+    // @JsonTypeName("text")
+    JsonTypeName annotation = pluginConfigClass.getAnnotation(JsonTypeName.class);
+    this.typeName = annotation != null ? annotation.value() : null;
+    if (this.typeName != null) {
+      paramsByName.put("type", new TableParamDef("type", String.class));
+    }
+    for (Field field : fields) {
+      if (Modifier.isStatic(field.getModifiers())
+          // we want to deprecate this field
+          || (field.getName().equals("extensions") && field.getType() == List.class)) {
+        continue;
+      }
+      Class<?> fieldType = field.getType();
+      if (fieldType == char.class) {
+        // calcite does not like char type. Just use String and enforce later that length == 1
+        fieldType = String.class;
+      }
+      paramsByName.put(field.getName(), new TableParamDef(field.getName(), fieldType).optional());
+    }
+    this.functionParamsByName = unmodifiableMap(paramsByName);
+  }
+
+  /**
+   * returns the table function signature for this format plugin config class
+   * @param tableName the table for which we want a table function signature
+   * @return the signature
+   */
+  TableSignature getTableSignature(String tableName) {
+    return new TableSignature(tableName, params());
+  }
+
+  /**
+   * @return the parameters extracted from the provided format plugin config class
+   */
+  private List<TableParamDef> params() {
+    return new ArrayList<>(functionParamsByName.values());
+  }
+
+  /**
+   * @return a readable String of the parameters and their names
+   */
+  String presentParams() {
+    StringBuilder sb = new StringBuilder("(");
+    List<TableParamDef> params = params();
+    for (int i = 0; i < params.size(); i++) {
+      TableParamDef paramDef = params.get(i);
+      if (i != 0) {
+        sb.append(", ");
+      }
+      sb.append(paramDef.name).append(": ").append(paramDef.type.getSimpleName());
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  /**
+   * creates an instance of the FormatPluginConfig based on the passed parameters
+   * @param t the signature and the parameters passed to the table function
+   * @return the corresponding config
+   */
+  FormatPluginConfig createConfigForTable(TableInstance t) {
+    // Per the constructor, the first param is always "type"
+    TableParamDef typeParamDef = t.sig.params.get(0);
+    Object typeParam = t.params.get(0);
+    if (!typeParamDef.name.equals("type")
+        || typeParamDef.type != String.class
+        || !(typeParam instanceof String)
+        || !typeName.equalsIgnoreCase((String)typeParam)) {
+      // if we reach here, there's a bug as all signatures generated start with a type parameter
+      throw UserException.parseError()
+          .message(
+              "This function signature is not supported: %s\n"
+              + "expecting %s",
+              t.presentParams(), this.presentParams())
+          .addContext("table", t.sig.name)
+          .build(logger);
+    }
+    FormatPluginConfig config;
+    try {
+      config = pluginConfigClass.newInstance();
+    } catch (InstantiationException | IllegalAccessException e) {
+      throw UserException.parseError(e)
+          .message(
+              "configuration for format of type %s can not be created (class: %s)",
+              this.typeName, pluginConfigClass.getName())
+          .addContext("table", t.sig.name)
+          .build(logger);
+    }
+    for (int i = 1; i < t.params.size(); i++) {
+      Object param = t.params.get(i);
+      if (param == null) {
+        // when null is passed, we leave the default defined in the config class
+        continue;
+      }
+      TableParamDef paramDef = t.sig.params.get(i);
+      TableParamDef expectedParamDef = this.functionParamsByName.get(paramDef.name);
+      if (expectedParamDef == null || expectedParamDef.type != paramDef.type) {
+        throw UserException.parseError()
+        .message(
+            "The parameters provided are not applicable to the type specified:\n"
+                + "provided: %s\nexpected: %s",
+            t.presentParams(), this.presentParams())
+        .addContext("table", t.sig.name)
+        .build(logger);
+      }
+      try {
+        Field field = pluginConfigClass.getField(paramDef.name);
+        field.setAccessible(true);
+        if (field.getType() == char.class && param instanceof String) {
+          String stringParam = (String) param;
+          if (stringParam.length() != 1) {
+            throw UserException.parseError()
+              .message("Expected single character but was String: %s", stringParam)
+              .addContext("table", t.sig.name)
+              .addContext("parameter", paramDef.name)
+              .build(logger);
+          }
+          param = stringParam.charAt(0);
+        }
+        field.set(config, param);
+      } catch (IllegalAccessException | NoSuchFieldException | SecurityException e) {
+        throw UserException.parseError(e)
+            .message("can not set value %s to parameter %s: %s", param, paramDef.name, paramDef.type)
+            .addContext("table", t.sig.name)
+            .addContext("parameter", paramDef.name)
+            .build(logger);
+      }
+    }
+    return config;
+  }
+
+  @Override
+  public String toString() {
+    return "OptionsDescriptor [pluginConfigClass=" + pluginConfigClass + ", typeName=" + typeName
+        + ", functionParamsByName=" + functionParamsByName + "]";
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index 37da606..65e387e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -17,9 +17,15 @@
  */
 package org.apache.drill.exec.store.dfs;
 
+import static com.google.common.collect.Collections2.transform;
+import static com.google.common.collect.Sets.newHashSet;
+import static java.util.Collections.unmodifiableList;
+
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
@@ -28,10 +34,18 @@ import java.util.Set;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.regex.Pattern;
 
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.schema.Function;
+import org.apache.calcite.schema.FunctionParameter;
 import org.apache.calcite.schema.Table;
+import org.apache.calcite.schema.TableMacro;
+import org.apache.calcite.schema.TranslatableTable;
 import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.dotdrill.DotDrillFile;
 import org.apache.drill.exec.dotdrill.DotDrillType;
@@ -39,8 +53,11 @@ import org.apache.drill.exec.dotdrill.DotDrillUtil;
 import org.apache.drill.exec.dotdrill.View;
 import org.apache.drill.exec.planner.logical.CreateTableEntry;
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTranslatableTable;
 import org.apache.drill.exec.planner.logical.DrillViewTable;
+import org.apache.drill.exec.planner.logical.DynamicDrillTable;
 import org.apache.drill.exec.planner.logical.FileSystemCreateTableEntry;
+import org.apache.drill.exec.planner.sql.DrillOperatorTable;
 import org.apache.drill.exec.planner.sql.ExpandingConcurrentMap;
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.PartitionNotFoundException;
@@ -61,7 +78,7 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
 public class WorkspaceSchemaFactory {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
+  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
 
   private final List<FormatMatcher> fileMatchers;
   private final List<FormatMatcher> dropFileMatchers;
@@ -76,14 +93,16 @@ public class WorkspaceSchemaFactory {
   private final LogicalPlanPersistence logicalPlanPersistence;
   private final Path wsPath;
 
+  private final FormatPluginOptionExtractor optionExtractor;
+
   public WorkspaceSchemaFactory(
       FileSystemPlugin plugin,
       String schemaName,
       String storageEngineName,
       WorkspaceConfig config,
       List<FormatMatcher> formatMatchers,
-      LogicalPlanPersistence logicalPlanPersistence)
-    throws ExecutionSetupException, IOException {
+      LogicalPlanPersistence logicalPlanPersistence,
+      ScanResult scanResult) throws ExecutionSetupException, IOException {
     this.logicalPlanPersistence = logicalPlanPersistence;
     this.fsConf = plugin.getFsConf();
     this.plugin = plugin;
@@ -94,6 +113,7 @@ public class WorkspaceSchemaFactory {
     this.storageEngineName = storageEngineName;
     this.schemaName = schemaName;
     this.wsPath = new Path(config.getLocation());
+    this.optionExtractor = new FormatPluginOptionExtractor(scanResult);
 
     for (FormatMatcher m : formatMatchers) {
       if (m.supportDirectoryReads()) {
@@ -148,12 +168,207 @@ public class WorkspaceSchemaFactory {
     return DotDrillType.VIEW.getPath(config.getLocation(), name);
   }
 
-  public WorkspaceSchema createSchema(List<String> parentSchemaPath, SchemaConfig schemaConfig) throws  IOException {
+  public WorkspaceSchema createSchema(List<String> parentSchemaPath, SchemaConfig schemaConfig) throws IOException {
     return new WorkspaceSchema(parentSchemaPath, schemaName, schemaConfig);
   }
 
-  public class WorkspaceSchema extends AbstractSchema implements ExpandingConcurrentMap.MapValueFactory<String, DrillTable> {
-    private final ExpandingConcurrentMap<String, DrillTable> tables = new ExpandingConcurrentMap<>(this);
+  /**
+   * Implementation of a table macro that generates a table based on parameters
+   */
+  static final class WithOptionsTableMacro implements TableMacro {
+
+    private final TableSignature sig;
+    private final WorkspaceSchema schema;
+
+    WithOptionsTableMacro(TableSignature sig, WorkspaceSchema schema) {
+      super();
+      this.sig = sig;
+      this.schema = schema;
+    }
+
+    @Override
+    public List<FunctionParameter> getParameters() {
+      List<FunctionParameter> result = new ArrayList<>();
+      for (int i = 0; i < sig.params.size(); i++) {
+        final TableParamDef p = sig.params.get(i);
+        final int ordinal = i;
+        result.add(new FunctionParameter() {
+          @Override
+          public int getOrdinal() {
+            return ordinal;
+          }
+
+          @Override
+          public String getName() {
+            return p.name;
+          }
+
+          @Override
+          public RelDataType getType(RelDataTypeFactory typeFactory) {
+            return typeFactory.createJavaType(p.type);
+          }
+
+          @Override
+          public boolean isOptional() {
+            return p.optional;
+          }
+        });
+      }
+      return result;
+    }
+
+    @Override
+    public TranslatableTable apply(List<Object> arguments) {
+      return new DrillTranslatableTable(schema.getDrillTable(new TableInstance(sig, arguments)));
+    }
+
+  }
+
+  private static Object[] array(Object... objects) {
+    return objects;
+  }
+
+  static final class TableInstance {
+    final TableSignature sig;
+    final List<Object> params;
+
+    TableInstance(TableSignature sig, List<Object> params) {
+      super();
+      if (params.size() != sig.params.size()) {
+        throw UserException.parseError()
+            .message(
+                "should have as many params (%d) as signature (%d)",
+                params.size(), sig.params.size())
+            .addContext("table", sig.name)
+            .build(logger);
+      }
+      this.sig = sig;
+      this.params = unmodifiableList(params);
+    }
+
+    String presentParams() {
+      StringBuilder sb = new StringBuilder("(");
+      boolean first = true;
+      for (int i = 0; i < params.size(); i++) {
+        Object param = params.get(i);
+        if (param != null) {
+          if (first) {
+            first = false;
+          } else {
+            sb.append(", ");
+          }
+          TableParamDef paramDef = sig.params.get(i);
+          sb.append(paramDef.name).append(": ").append(paramDef.type.getSimpleName()).append(" => ").append(param);
+        }
+      }
+      sb.append(")");
+      return sb.toString();
+    }
+
+    private Object[] toArray() {
+      return array(sig, params);
+    }
+
+    @Override
+    public int hashCode() {
+      return Arrays.hashCode(toArray());
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj instanceof TableInstance) {
+        return Arrays.equals(this.toArray(), ((TableInstance)obj).toArray());
+      }
+      return false;
+    }
+
+    @Override
+    public String toString() {
+      return sig.name + (params.size() == 0 ? "" : presentParams());
+    }
+  }
+
+  static final class TableParamDef {
+    final String name;
+    final Class<?> type;
+    final boolean optional;
+
+    TableParamDef(String name, Class<?> type) {
+      this(name, type, false);
+    }
+
+    TableParamDef(String name, Class<?> type, boolean optional) {
+      this.name = name;
+      this.type = type;
+      this.optional = optional;
+    }
+
+    TableParamDef optional() {
+      return new TableParamDef(name, type, true);
+    }
+
+    private Object[] toArray() {
+      return array(name, type, optional);
+    }
+
+    @Override
+    public int hashCode() {
+      return Arrays.hashCode(toArray());
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj instanceof TableParamDef) {
+        return Arrays.equals(this.toArray(), ((TableParamDef)obj).toArray());
+      }
+      return false;
+    }
+
+    @Override
+    public String toString() {
+      String p = name + ": " + type;
+      return optional ? "[" + p + "]" : p;
+    }
+  }
+
+  static final class TableSignature {
+    final String name;
+    final List<TableParamDef> params;
+
+    TableSignature(String name, TableParamDef... params) {
+      this(name, Arrays.asList(params));
+    }
+
+    TableSignature(String name, List<TableParamDef> params) {
+      this.name = name;
+      this.params = unmodifiableList(params);
+    }
+
+    private Object[] toArray() {
+      return array(name, params);
+    }
+
+    @Override
+    public int hashCode() {
+      return Arrays.hashCode(toArray());
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj instanceof TableSignature) {
+        return Arrays.equals(this.toArray(), ((TableSignature)obj).toArray());
+      }
+      return false;
+    }
+
+    @Override
+    public String toString() {
+      return name + params;
+    }
+  }
+
+  public class WorkspaceSchema extends AbstractSchema implements ExpandingConcurrentMap.MapValueFactory<TableInstance, DrillTable> {
+    private final ExpandingConcurrentMap<TableInstance, DrillTable> tables = new ExpandingConcurrentMap<>(this);
     private final SchemaConfig schemaConfig;
     private final DrillFileSystem fs;
 
@@ -163,6 +378,10 @@ public class WorkspaceSchemaFactory {
       this.fs = ImpersonationUtil.createFileSystem(schemaConfig.getUserName(), fsConf);
     }
 
+    DrillTable getDrillTable(TableInstance key) {
+      return tables.get(key);
+    }
+
     @Override
     public boolean createView(View view) throws IOException {
       Path viewPath = getViewPath(view.getName());
@@ -201,7 +420,7 @@ public class WorkspaceSchemaFactory {
       List<DotDrillFile> files;
       try {
         files = DotDrillUtil.getDotDrills(fs, new Path(config.getLocation()), DotDrillType.VIEW);
-        for(DotDrillFile f : files) {
+        for (DotDrillFile f : files) {
           viewSet.add(f.getBaseName());
         }
       } catch (UnsupportedOperationException e) {
@@ -221,41 +440,68 @@ public class WorkspaceSchemaFactory {
       return viewSet;
     }
 
+    private Set<String> rawTableNames() {
+      return newHashSet(
+          transform(tables.keySet(), new com.google.common.base.Function<TableInstance, String>() {
+        @Override
+        public String apply(TableInstance input) {
+          return input.sig.name;
+        }
+      }));
+    }
+
     @Override
     public Set<String> getTableNames() {
-      return Sets.union(tables.keySet(), getViews());
+      return Sets.union(rawTableNames(), getViews());
     }
 
-    private View getView(DotDrillFile f) throws IOException{
+    @Override
+    public Set<String> getFunctionNames() {
+      return rawTableNames();
+    }
+
+    @Override
+    public List<Function> getFunctions(String name) {
+      List<TableSignature> sigs = optionExtractor.getTableSignatures(name);
+      return Lists.transform(sigs, new com.google.common.base.Function<TableSignature, Function>() {
+        @Override
+        public Function apply(TableSignature input) {
+          return new WithOptionsTableMacro(input, WorkspaceSchema.this);
+        }
+      });
+    }
+
+    private View getView(DotDrillFile f) throws IOException {
       assert f.getType() == DotDrillType.VIEW;
       return f.getView(logicalPlanPersistence);
     }
 
     @Override
-    public Table getTable(String name) {
+    public Table getTable(String tableName) {
+      TableInstance tableKey = new TableInstance(new TableSignature(tableName), ImmutableList.of());
       // first check existing tables.
-      if(tables.alreadyContainsKey(name)) {
-        return tables.get(name);
+      if (tables.alreadyContainsKey(tableKey)) {
+        return tables.get(tableKey);
       }
 
       // then look for files that start with this name and end in .drill.
       List<DotDrillFile> files = Collections.emptyList();
       try {
         try {
-          files = DotDrillUtil.getDotDrills(fs, new Path(config.getLocation()), name, DotDrillType.VIEW);
-        } catch(AccessControlException e) {
+          files = DotDrillUtil.getDotDrills(fs, new Path(config.getLocation()), tableName, DotDrillType.VIEW);
+        } catch (AccessControlException e) {
           if (!schemaConfig.getIgnoreAuthErrors()) {
             logger.debug(e.getMessage());
             throw UserException.permissionError(e)
               .message("Not authorized to list or query tables in schema [%s]", getFullSchemaName())
               .build(logger);
           }
-        } catch(IOException e) {
-          logger.warn("Failure while trying to list view tables in workspace [{}]", name, getFullSchemaName(), e);
+        } catch (IOException e) {
+          logger.warn("Failure while trying to list view tables in workspace [{}]", tableName, getFullSchemaName(), e);
         }
 
-        for(DotDrillFile f : files) {
-          switch(f.getType()) {
+        for (DotDrillFile f : files) {
+          switch (f.getType()) {
           case VIEW:
             try {
               return new DrillViewTable(getView(f), f.getOwner(), schemaConfig.getViewExpansionContext());
@@ -263,11 +509,11 @@ public class WorkspaceSchemaFactory {
               if (!schemaConfig.getIgnoreAuthErrors()) {
                 logger.debug(e.getMessage());
                 throw UserException.permissionError(e)
-                  .message("Not authorized to read view [%s] in schema [%s]", name, getFullSchemaName())
+                  .message("Not authorized to read view [%s] in schema [%s]", tableName, getFullSchemaName())
                   .build(logger);
               }
             } catch (IOException e) {
-              logger.warn("Failure while trying to load {}.view.drill file in workspace [{}]", name, getFullSchemaName(), e);
+              logger.warn("Failure while trying to load {}.view.drill file in workspace [{}]", tableName, getFullSchemaName(), e);
             }
           }
         }
@@ -275,7 +521,7 @@ public class WorkspaceSchemaFactory {
         logger.debug("The filesystem for this workspace does not support this operation.", e);
       }
 
-      return tables.get(name);
+      return tables.get(tableKey);
     }
 
     @Override
@@ -313,19 +559,29 @@ public class WorkspaceSchemaFactory {
       return FileSystemConfig.NAME;
     }
 
+    private DrillTable isReadable(FormatMatcher m, FileSelection fileSelection) throws IOException {
+      return m.isReadable(fs, fileSelection, plugin, storageEngineName, schemaConfig.getUserName());
+    }
+
     @Override
-    public DrillTable create(String key) {
+    public DrillTable create(TableInstance key) {
       try {
-        final FileSelection selection = FileSelection.create(fs, config.getLocation(), key);
-        if (selection == null) {
+        final FileSelection fileSelection = FileSelection.create(fs, config.getLocation(), key.sig.name);
+        if (fileSelection == null) {
           return null;
         }
 
-        final boolean hasDirectories = selection.containsDirectories(fs);
+        final boolean hasDirectories = fileSelection.containsDirectories(fs);
+        if (key.sig.params.size() > 0) {
+          FormatPluginConfig fconfig = optionExtractor.createConfigForTable(key);
+          return new DynamicDrillTable(
+              plugin, storageEngineName, schemaConfig.getUserName(),
+              new FormatSelection(fconfig, fileSelection));
+        }
         if (hasDirectories) {
           for (final FormatMatcher matcher : dirMatchers) {
             try {
-              DrillTable table = matcher.isReadable(fs, selection, plugin, storageEngineName, schemaConfig.getUserName());
+              DrillTable table = matcher.isReadable(fs, fileSelection, plugin, storageEngineName, schemaConfig.getUserName());
               if (table != null) {
                 return table;
               }
@@ -335,7 +591,7 @@ public class WorkspaceSchemaFactory {
           }
         }
 
-        final FileSelection newSelection = hasDirectories ? selection.minusDirectories(fs) : selection;
+        final FileSelection newSelection = hasDirectories ? fileSelection.minusDirectories(fs) : fileSelection;
         if (newSelection == null) {
           return null;
         }

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
index 75d1486..56c6c7d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
@@ -91,7 +91,7 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
   @JsonTypeName("json")
   public static class JSONFormatConfig implements FormatPluginConfig {
 
-    public List<String> extensions;
+    public List<String> extensions = ImmutableList.of("json");
     private static final List<String> DEFAULT_EXTS = ImmutableList.of("json");
 
     @JsonInclude(JsonInclude.Include.NON_DEFAULT)

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileFormatConfig.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileFormatConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileFormatConfig.java
index 75e30c5..bdd6040 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileFormatConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileFormatConfig.java
@@ -19,6 +19,8 @@ package org.apache.drill.exec.store.easy.sequencefile;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonTypeName;
+import com.google.common.collect.ImmutableList;
+
 import org.apache.drill.common.logical.FormatPluginConfig;
 
 import java.util.List;
@@ -26,7 +28,7 @@ import java.util.List;
 @JsonTypeName("sequencefile") @JsonInclude(JsonInclude.Include.NON_DEFAULT)
 public class SequenceFileFormatConfig implements FormatPluginConfig {
 
-  public List<String> extensions;
+  public List<String> extensions = ImmutableList.of();
 
   @Override
   public int hashCode() {

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
index f7551ae..01543a1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
@@ -59,6 +59,7 @@ import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonInclude.Include;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Maps;
 
 public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextFormatConfig> {
@@ -133,7 +134,7 @@ public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextForm
   @JsonTypeName("text") @JsonInclude(Include.NON_DEFAULT)
   public static class TextFormatConfig implements FormatPluginConfig {
 
-    public List<String> extensions;
+    public List<String> extensions = ImmutableList.of();
     public String lineDelimiter = "\n";
     public char fieldDelimiter = '\n';
     public char quote = '"';

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
index 8737c29..42345c0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 
+import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintWriter;
@@ -365,15 +366,17 @@ public class BaseTestQuery extends ExecTest {
    * @param expectedErrorMsg Expected error message.
    */
   protected static void errorMsgTestHelper(final String testSqlQuery, final String expectedErrorMsg) throws Exception {
-    UserException expException = null;
     try {
       test(testSqlQuery);
-    } catch (final UserException ex) {
-      expException = ex;
+      fail("Expected a UserException when running " + testSqlQuery);
+    } catch (final UserException actualException) {
+      try {
+        assertThat("message of UserException when running " + testSqlQuery, actualException.getMessage(), containsString(expectedErrorMsg));
+      } catch (AssertionError e) {
+        e.addSuppressed(actualException);
+        throw e;
+      }
     }
-
-    assertNotNull("Expected a UserException", expException);
-    assertThat(expException.getMessage(), containsString(expectedErrorMsg));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java b/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java
index 6a07597..6f78f8c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java
@@ -395,6 +395,8 @@ public class DrillTestWrapper {
     }
 
     compareMergedVectors(expectedSuperVectors, actualSuperVectors);
+    } catch (Exception e) {
+      throw new Exception(e.getMessage() + "\nFor query: " + query , e);
     } finally {
       cleanupBatches(expected, actual);
     }
@@ -624,7 +626,7 @@ public class DrillTestWrapper {
         missingCols += colName + ", ";
       }
     }
-    return "Expected column(s) " + missingCols + " not found in result set.";
+    return "Expected column(s) " + missingCols + " not found in result set: " + actual + ".";
   }
 
   private String printRecord(Map<String, Object> record) {

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java b/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java
index 9d9c1a7..f6c8859 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java
@@ -19,6 +19,14 @@ package org.apache.drill;
 
 import static org.apache.drill.TestBuilder.listOf;
 import static org.apache.drill.TestBuilder.mapOf;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+
+import java.math.BigDecimal;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.Types;
@@ -26,13 +34,6 @@ import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.hamcrest.CoreMatchers;
 import org.junit.Test;
 
-import java.math.BigDecimal;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
 // TODO - update framework to remove any dependency on the Drill engine for reading baseline result sets
 // currently using it with the assumption that the csv and json readers are well tested, and handling diverse
 // types in the test framework would require doing some redundant work to enable casting outside of Drill or
@@ -221,7 +222,7 @@ public class TestFrameworkTest extends BaseTestQuery{
           .baselineColumns("employee_id", "first_name", "last_name", "address")
           .build().run();
     } catch (Exception ex) {
-      assertEquals("Expected column(s) `address`,  not found in result set.", ex.getMessage());
+      assertTrue(ex.getMessage(), ex.getMessage().startsWith("Expected column(s) `address`,  not found in result set"));
       // this indicates successful completion of the test
       return;
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java b/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java
new file mode 100644
index 0000000..c74480b
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java
@@ -0,0 +1,225 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill;
+
+import static java.lang.String.format;
+import static org.apache.drill.TestBuilder.listOf;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+
+import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory;
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestSelectWithOption extends BaseTestQuery {
+  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
+
+  private File genCSVFile(String name, String... rows) throws IOException {
+    File file = new File(format("target/%s_%s.csv", this.getClass().getName(), name));
+    try (FileWriter fw = new FileWriter(file)) {
+      for (int i = 0; i < rows.length; i++) {
+        fw.append(rows[i] + "\n");
+      }
+    }
+    return file;
+  }
+
+  private String genCSVTable(String name, String... rows) throws IOException {
+    File f = genCSVFile(name, rows);
+    return format("dfs.`${WORKING_PATH}/%s`", f.getPath());
+  }
+
+  private void testWithResult(String query, Object... expectedResult) throws Exception {
+    TestBuilder builder = testBuilder()
+        .sqlQuery(query)
+        .ordered()
+        .baselineColumns("columns");
+    for (Object o : expectedResult) {
+      builder = builder.baselineValues(o);
+    }
+    builder.build().run();
+  }
+
+  @Test
+  public void testTextFieldDelimiter() throws Exception {
+    String tableName = genCSVTable("testTextFieldDelimiter",
+        "\"b\"|\"0\"",
+        "\"b\"|\"1\"",
+        "\"b\"|\"2\"");
+
+    String queryTemplate =
+        "select columns from table(%s (type => 'TeXT', fieldDelimiter => '%s'))";
+    testWithResult(format(queryTemplate, tableName, ","),
+        listOf("b\"|\"0"),
+        listOf("b\"|\"1"),
+        listOf("b\"|\"2")
+      );
+    testWithResult(format(queryTemplate, tableName, "|"),
+        listOf("b", "0"),
+        listOf("b", "1"),
+        listOf("b", "2")
+      );
+  }
+
+  @Test @Ignore // It does not look like lineDelimiter is working
+  public void testTextLineDelimiter() throws Exception {
+    String tableName = genCSVTable("testTextLineDelimiter",
+        "\"b\"|\"0\"",
+        "\"b\"|\"1\"",
+        "\"b\"|\"2\"");
+
+    testWithResult(format("select columns from table(%s(type => 'TeXT', lineDelimiter => '|'))", tableName),
+        listOf("\"b\""),
+        listOf("\"0\"", "\"b\""),
+        listOf("\"1\"", "\"b\""),
+        listOf("\"2\"")
+      );
+  }
+
+  @Test
+  public void testTextQuote() throws Exception {
+    String tableName = genCSVTable("testTextQuote",
+        "\"b\"|\"0\"",
+        "\"b\"|\"1\"",
+        "\"b\"|\"2\"");
+
+    testWithResult(format("select columns from table(%s(type => 'TeXT', fieldDelimiter => '|', quote => '@'))", tableName),
+        listOf("\"b\"", "\"0\""),
+        listOf("\"b\"", "\"1\""),
+        listOf("\"b\"", "\"2\"")
+        );
+
+    String quoteTableName = genCSVTable("testTextQuote2",
+        "@b@|@0@",
+        "@b$@c@|@1@");
+    // It seems that a parameter can not be called "escape"
+    testWithResult(format("select columns from table(%s(`escape` => '$', type => 'TeXT', fieldDelimiter => '|', quote => '@'))", quoteTableName),
+        listOf("b", "0"),
+        listOf("b$@c", "1") // shouldn't $ be removed here?
+        );
+  }
+
+  @Test
+  public void testTextComment() throws Exception {
+      String commentTableName = genCSVTable("testTextComment",
+          "b|0",
+          "@ this is a comment",
+          "b|1");
+      testWithResult(format("select columns from table(%s(type => 'TeXT', fieldDelimiter => '|', comment => '@'))", commentTableName),
+          listOf("b", "0"),
+          listOf("b", "1")
+          );
+  }
+
+  @Test
+  public void testTextHeader() throws Exception {
+    String headerTableName = genCSVTable("testTextHeader",
+        "b|a",
+        "b|0",
+        "b|1");
+    testWithResult(format("select columns from table(%s(type => 'TeXT', fieldDelimiter => '|', skipFirstLine => true))", headerTableName),
+        listOf("b", "0"),
+        listOf("b", "1")
+        );
+
+    testBuilder()
+        .sqlQuery(format("select a, b from table(%s(type => 'TeXT', fieldDelimiter => '|', extractHeader => true))", headerTableName))
+        .ordered()
+        .baselineColumns("b", "a")
+        .baselineValues("b", "0")
+        .baselineValues("b", "1")
+        .build().run();
+  }
+
+  @Test
+  public void testVariationsCSV() throws Exception {
+    String csvTableName = genCSVTable("testVariationsCSV",
+        "a,b",
+        "c|d");
+    // Using the defaults in TextFormatConfig (the field delimiter is neither "," not "|")
+    String[] csvQueries = {
+//        format("select columns from %s ('TeXT')", csvTableName),
+//        format("select columns from %s('TeXT')", csvTableName),
+        format("select columns from table(%s ('TeXT'))", csvTableName),
+        format("select columns from table(%s (type => 'TeXT'))", csvTableName),
+//        format("select columns from %s (type => 'TeXT')", csvTableName)
+    };
+    for (String csvQuery : csvQueries) {
+      testWithResult(csvQuery,
+          listOf("a,b"),
+          listOf("c|d"));
+    }
+    // the drill config file binds .csv to "," delimited
+    testWithResult(format("select columns from %s", csvTableName),
+          listOf("a", "b"),
+          listOf("c|d"));
+    // setting the delimiter
+    testWithResult(format("select columns from table(%s (type => 'TeXT', fieldDelimiter => ','))", csvTableName),
+        listOf("a", "b"),
+        listOf("c|d"));
+    testWithResult(format("select columns from table(%s (type => 'TeXT', fieldDelimiter => '|'))", csvTableName),
+        listOf("a,b"),
+        listOf("c", "d"));
+  }
+
+  @Test
+  public void testVariationsJSON() throws Exception {
+    String jsonTableName = genCSVTable("testVariationsJSON",
+        "{\"columns\": [\"f\",\"g\"]}");
+    // the extension is actually csv
+    testWithResult(format("select columns from %s", jsonTableName),
+        listOf("{\"columns\": [\"f\"", "g\"]}\n")
+        );
+    String[] jsonQueries = {
+        format("select columns from table(%s ('JSON'))", jsonTableName),
+        format("select columns from table(%s(type => 'JSON'))", jsonTableName),
+//        format("select columns from %s ('JSON')", jsonTableName),
+//        format("select columns from %s (type => 'JSON')", jsonTableName),
+//        format("select columns from %s(type => 'JSON')", jsonTableName),
+        // we can use named format plugin configurations too!
+        format("select columns from table(%s(type => 'Named', name => 'json'))", jsonTableName),
+    };
+    for (String jsonQuery : jsonQueries) {
+      testWithResult(jsonQuery, listOf("f","g"));
+    }
+  }
+
+  @Test
+  public void testUse() throws Exception {
+    File f = genCSVFile("testUse",
+        "{\"columns\": [\"f\",\"g\"]}");
+    String jsonTableName = format("`${WORKING_PATH}/%s`", f.getPath());
+    // the extension is actually csv
+    test("use dfs");
+    try {
+      String[] jsonQueries = {
+          format("select columns from table(%s ('JSON'))", jsonTableName),
+          format("select columns from table(%s(type => 'JSON'))", jsonTableName),
+      };
+      for (String jsonQuery : jsonQueries) {
+        testWithResult(jsonQuery, listOf("f","g"));
+      }
+
+      testWithResult(format("select length(columns[0]) as columns from table(%s ('JSON'))", jsonTableName), 1L);
+    } finally {
+      test("use sys");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
new file mode 100644
index 0000000..cdeafae
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.dfs;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.util.Collection;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.scanner.RunTimeScan;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig;
+import org.junit.Test;
+
+import com.fasterxml.jackson.annotation.JsonTypeName;
+
+
+public class TestFormatPluginOptionExtractor {
+
+  @Test
+  public void test() {
+    DrillConfig config = DrillConfig.create();
+    ScanResult scanResult = RunTimeScan.fromPrescan(config);
+    FormatPluginOptionExtractor e = new FormatPluginOptionExtractor(scanResult);
+    Collection<FormatPluginOptionsDescriptor> options = e.getOptions();
+    for (FormatPluginOptionsDescriptor d : options) {
+      assertEquals(d.pluginConfigClass.getAnnotation(JsonTypeName.class).value(), d.typeName);
+      switch (d.typeName) {
+        case "text":
+          assertEquals(TextFormatConfig.class, d.pluginConfigClass);
+          assertEquals(
+              "(type: String, lineDelimiter: String, fieldDelimiter: String, quote: String, escape: String, comment: String, skipFirstLine: boolean, extractHeader: boolean)",
+              d.presentParams()
+          );
+          break;
+        case "named":
+          assertEquals(NamedFormatPluginConfig.class, d.pluginConfigClass);
+          assertEquals("(type: String, name: String)", d.presentParams());
+          break;
+        case "json":
+        case "sequencefile":
+        case "parquet":
+        case "avro":
+          assertEquals(d.typeName, "(type: String)", d.presentParams());
+          break;
+        default:
+          fail("add validation for format plugin type " + d.typeName);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/f7786cc7/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e76b547..07ba4c9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1258,7 +1258,7 @@
           <dependency>
             <groupId>org.apache.calcite</groupId>
             <artifactId>calcite-core</artifactId>
-            <version>1.4.0-drill-r8</version>
+            <version>1.4.0-drill-r9</version>
             <exclusions>
               <exclusion>
                 <groupId>org.jgrapht</groupId>


Mime
View raw message