drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j..@apache.org
Subject [05/27] drill git commit: DRILL-4730: Update JDBC DatabaseMetaData implementation to use new Metadata APIs
Date Thu, 02 Mar 2017 20:59:32 GMT
DRILL-4730: Update JDBC DatabaseMetaData implementation to use new Metadata APIs

Update JDBC driver to use Metadata APIs instead of executing SQL queries

close #613


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/17f888d9
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/17f888d9
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/17f888d9

Branch: refs/heads/master
Commit: 17f888d9058be2be8953cb1ea5b37297b7d2fef3
Parents: 16aa081
Author: Laurent Goujon <laurent@dremio.com>
Authored: Fri Nov 4 13:32:44 2016 -0700
Committer: Jinfeng Ni <jni@apache.org>
Committed: Wed Mar 1 23:15:31 2017 -0800

----------------------------------------------------------------------
 .../org/apache/drill/exec/ops/QueryContext.java |  10 +-
 .../drill/exec/ops/ViewExpansionContext.java    |  22 +-
 .../apache/drill/exec/store/SchemaConfig.java   |  10 +-
 .../drill/exec/store/SchemaTreeProvider.java    |  14 +-
 .../drill/exec/store/ischema/Records.java       |  12 +
 .../exec/work/metadata/MetadataProvider.java    |  39 +-
 .../drill/jdbc/DrillConnectionConfig.java       |   6 +-
 .../drill/jdbc/impl/DrillConnectionImpl.java    |   8 +
 .../jdbc/impl/DrillDatabaseMetaDataImpl.java    |  31 +-
 .../apache/drill/jdbc/impl/DrillMetaImpl.java   | 663 ++++++++++++++++++-
 .../drill/jdbc/impl/DrillResultSetImpl.java     |  24 +-
 .../apache/drill/jdbc/impl/WrappedAccessor.java | 448 +++++++++++++
 .../jdbc/DatabaseMetaDataGetColumnsTest.java    |  30 +-
 .../apache/drill/jdbc/DatabaseMetaDataTest.java |  28 +-
 .../LegacyDatabaseMetaDataGetColumnsTest.java   |  73 ++
 .../drill/jdbc/LegacyDatabaseMetaDataTest.java  |  39 ++
 .../drill/jdbc/LegacyPreparedStatementTest.java |   4 +-
 .../drill/jdbc/test/TestJdbcMetadata.java       |   7 +
 .../drill/jdbc/test/TestLegacyJdbcMetadata.java |  36 +
 19 files changed, 1424 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
index 4ee8a9d..264af29 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
@@ -17,10 +17,6 @@
  */
 package org.apache.drill.exec.ops;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Maps;
-import io.netty.buffer.DrillBuf;
-
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -55,7 +51,11 @@ import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.testing.ExecutionControls;
 import org.apache.drill.exec.util.Utilities;
 
+import com.google.common.base.Function;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+import io.netty.buffer.DrillBuf;
 
 // TODO - consider re-name to PlanningContext, as the query execution context actually appears
 // in fragment contexts
@@ -151,6 +151,7 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext, Schem
    * @param userName User who owns the schema tree.
    * @return Root of the schema tree.
    */
+  @Override
   public SchemaPlus getRootSchema(final String userName) {
     return schemaTreeProvider.createRootSchema(userName, this);
   }
@@ -168,6 +169,7 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext, Schem
    * Get the user name of the user who issued the query that is managed by this QueryContext.
    * @return
    */
+  @Override
   public String getQueryUserName() {
     return session.getCredentials().getUserName();
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
index e5d565c..57c1a71 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
@@ -22,7 +22,10 @@ import static org.apache.drill.exec.ExecConstants.IMPERSONATION_MAX_CHAINED_USER
 import org.apache.calcite.plan.RelOptTable;
 import org.apache.calcite.plan.RelOptTable.ToRelContext;
 import org.apache.calcite.schema.SchemaPlus;
+import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.store.SchemaConfig.SchemaConfigInfoProvider;
 
 import com.carrotsearch.hppc.ObjectIntHashMap;
 import com.google.common.base.Preconditions;
@@ -70,20 +73,25 @@ import com.google.common.base.Preconditions;
 public class ViewExpansionContext {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ViewExpansionContext.class);
 
-  private final QueryContext queryContext;
+  private final SchemaConfigInfoProvider schemaConfigInfoProvider;
   private final int maxChainedUserHops;
   private final String queryUser;
   private final ObjectIntHashMap<String> userTokens = new ObjectIntHashMap<>();
+  private final boolean impersonationEnabled;
 
   public ViewExpansionContext(QueryContext queryContext) {
-    this.queryContext = queryContext;
-    this.maxChainedUserHops =
-        queryContext.getConfig().getInt(IMPERSONATION_MAX_CHAINED_USER_HOPS);
-    this.queryUser = queryContext.getQueryUserName();
+    this(queryContext.getConfig(), queryContext);
+  }
+
+  public ViewExpansionContext(DrillConfig config, SchemaConfigInfoProvider schemaConfigInfoProvider) {
+    this.schemaConfigInfoProvider = schemaConfigInfoProvider;
+    this.maxChainedUserHops = config.getInt(IMPERSONATION_MAX_CHAINED_USER_HOPS);
+    this.queryUser = schemaConfigInfoProvider.getQueryUserName();
+    this.impersonationEnabled = config.getBoolean(ExecConstants.IMPERSONATION_ENABLED);
   }
 
   public boolean isImpersonationEnabled() {
-    return queryContext.isImpersonationEnabled();
+    return impersonationEnabled;
   }
 
   /**
@@ -160,7 +168,7 @@ public class ViewExpansionContext {
      */
     public SchemaPlus getSchemaTree() {
       Preconditions.checkState(!released, "Trying to use released token.");
-      return queryContext.getRootSchema(viewOwner);
+      return schemaConfigInfoProvider.getRootSchema(viewOwner);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
index 3e8f1c2..fa720f3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
@@ -17,13 +17,13 @@
  */
 package org.apache.drill.exec.store;
 
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.drill.exec.ops.ViewExpansionContext;
 import org.apache.drill.exec.server.options.OptionValue;
 
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+
 /**
  * Contains information needed by {@link org.apache.drill.exec.store.AbstractSchema} implementations.
  */
@@ -100,6 +100,10 @@ public class SchemaConfig {
   public interface SchemaConfigInfoProvider {
     ViewExpansionContext getViewExpansionContext();
 
+    SchemaPlus getRootSchema(String userName);
+
+    String getQueryUserName();
+
     OptionValue getOption(String optionKey);
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
index 4f426bb..5a8bfb2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
@@ -17,6 +17,9 @@
  */
 package org.apache.drill.exec.store;
 
+import java.io.IOException;
+import java.util.List;
+
 import org.apache.calcite.jdbc.SimpleCalciteSchema;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.drill.common.AutoCloseables;
@@ -31,9 +34,6 @@ import org.apache.drill.exec.util.ImpersonationUtil;
 
 import com.google.common.collect.Lists;
 
-import java.io.IOException;
-import java.util.List;
-
 /**
  * Class which creates new schema trees. It keeps track of newly created schema trees and closes them safely as
  * part of {@link #close()}.
@@ -69,6 +69,14 @@ public class SchemaTreeProvider implements AutoCloseable {
       public OptionValue getOption(String optionKey) {
         return options.getOption(optionKey);
       }
+
+      @Override public SchemaPlus getRootSchema(String userName) {
+        return createRootSchema(userName, this);
+      }
+
+      @Override public String getQueryUserName() {
+        return ImpersonationUtil.getProcessUserName();
+      }
     };
 
     final SchemaConfig schemaConfig = SchemaConfig.newBuilder(

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java
index 2ff9bc6..49d1423 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java
@@ -182,6 +182,18 @@ public class Records {
           this.INTERVAL_PRECISION = null;
           break;
 
+        case BOOLEAN:
+          this.COLUMN_SIZE = 1;
+          this.CHARACTER_MAXIMUM_LENGTH = null;
+          this.CHARACTER_OCTET_LENGTH = null;
+          this.NUMERIC_PRECISION = null;
+          this.NUMERIC_PRECISION_RADIX = null;
+          this.NUMERIC_SCALE = null;
+          this.DATETIME_PRECISION = null;
+          this.INTERVAL_TYPE = null;
+          this.INTERVAL_PRECISION = null;
+          break;
+
         case TINYINT:
         case SMALLINT:
         case INTEGER:

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java
index 8365418..6ababf4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java
@@ -34,6 +34,7 @@ import java.util.List;
 import java.util.UUID;
 
 import org.apache.calcite.schema.SchemaPlus;
+import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.ErrorHelper;
 import org.apache.drill.exec.ops.ViewExpansionContext;
 import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
@@ -148,6 +149,10 @@ public class MetadataProvider {
      * @return A {@link Response} message. Response must be returned in any case.
      */
     protected abstract Response runInternal(UserSession session, SchemaTreeProvider schemaProvider);
+
+    public DrillConfig getConfig() {
+      return dContext.getConfig();
+    }
   }
 
   /**
@@ -177,7 +182,7 @@ public class MetadataProvider {
 
       try {
         final PojoRecordReader<Catalog> records =
-            getPojoRecordReader(CATALOGS, filter, schemaProvider, session);
+            getPojoRecordReader(CATALOGS, filter, getConfig(), schemaProvider, session);
 
         List<CatalogMetadata> metadata = new ArrayList<>();
         for(Catalog c : records) {
@@ -233,7 +238,7 @@ public class MetadataProvider {
 
       try {
         final PojoRecordReader<Schema> records =
-            getPojoRecordReader(SCHEMATA, filter, schemaProvider, session);
+            getPojoRecordReader(SCHEMATA, filter, getConfig(), schemaProvider, session);
 
         List<SchemaMetadata> metadata = new ArrayList<>();
         for(Schema s : records) {
@@ -293,7 +298,7 @@ public class MetadataProvider {
 
       try {
         final PojoRecordReader<Table> records =
-            getPojoRecordReader(TABLES, filter, schemaProvider, session);
+            getPojoRecordReader(TABLES, filter, getConfig(), schemaProvider, session);
 
         List<TableMetadata> metadata = new ArrayList<>();
         for(Table t : records) {
@@ -354,7 +359,7 @@ public class MetadataProvider {
 
       try {
         final PojoRecordReader<Column> records =
-            getPojoRecordReader(COLUMNS, filter, schemaProvider, session);
+            getPojoRecordReader(COLUMNS, filter, getConfig(), schemaProvider, session);
 
         List<ColumnMetadata> metadata = new ArrayList<>();
         for(Column c : records) {
@@ -382,6 +387,10 @@ public class MetadataProvider {
             columnBuilder.setCharOctetLength(c.CHARACTER_OCTET_LENGTH);
           }
 
+          if (c.NUMERIC_SCALE != null) {
+            columnBuilder.setNumericScale(c.NUMERIC_SCALE);
+          }
+
           if (c.NUMERIC_PRECISION != null) {
             columnBuilder.setNumericPrecision(c.NUMERIC_PRECISION);
           }
@@ -531,30 +540,42 @@ public class MetadataProvider {
    * @param userSession
    * @return
    */
-  private static <S> PojoRecordReader<S> getPojoRecordReader(final InfoSchemaTableType tableType, final InfoSchemaFilter filter,
+  private static <S> PojoRecordReader<S> getPojoRecordReader(final InfoSchemaTableType tableType, final InfoSchemaFilter filter, final DrillConfig config,
       final SchemaTreeProvider provider, final UserSession userSession) {
     final SchemaPlus rootSchema =
-        provider.createRootSchema(userSession.getCredentials().getUserName(), newSchemaConfigInfoProvider(userSession));
+        provider.createRootSchema(userSession.getCredentials().getUserName(), newSchemaConfigInfoProvider(config, userSession, provider));
     return tableType.getRecordReader(rootSchema, filter, userSession.getOptions());
   }
 
   /**
    * Helper method to create a {@link SchemaConfigInfoProvider} instance for metadata purposes.
    * @param session
+   * @param schemaTreeProvider
    * @return
    */
-  private static SchemaConfigInfoProvider newSchemaConfigInfoProvider(final UserSession session) {
+  private static SchemaConfigInfoProvider newSchemaConfigInfoProvider(final DrillConfig config, final UserSession session, final SchemaTreeProvider schemaTreeProvider) {
     return new SchemaConfigInfoProvider() {
+      private final ViewExpansionContext viewExpansionContext = new ViewExpansionContext(config, this);
+
       @Override
       public ViewExpansionContext getViewExpansionContext() {
-        // Metadata APIs don't expect to expand the views.
-        throw new UnsupportedOperationException("View expansion context is not supported");
+        return viewExpansionContext;
+      }
+
+      @Override
+      public SchemaPlus getRootSchema(String userName) {
+        return schemaTreeProvider.createRootSchema(userName, this);
       }
 
       @Override
       public OptionValue getOption(String optionKey) {
         return session.getOptions().getOption(optionKey);
       }
+
+      @Override
+      public String getQueryUserName() {
+        return session.getCredentials().getUserName();
+      }
     };
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillConnectionConfig.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillConnectionConfig.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillConnectionConfig.java
index 55cb1ff..15f676c 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillConnectionConfig.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillConnectionConfig.java
@@ -67,7 +67,11 @@ public class DrillConnectionConfig extends ConnectionConfigImpl {
   }
 
   public boolean disableServerPreparedStatement() {
-    return Boolean.valueOf(props.getProperty("preparedstatement.server.disabled"));
+    return Boolean.valueOf(props.getProperty("server.preparedstatement.disabled"));
+  }
+
+  public boolean disableServerMetadata() {
+    return Boolean.valueOf(props.getProperty("server.metadata.disabled"));
   }
 
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java
index 830f137..94d5dd8 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java
@@ -25,6 +25,7 @@ import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.NClob;
 import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.sql.SQLClientInfoException;
 import java.sql.SQLException;
 import java.sql.SQLFeatureNotSupportedException;
@@ -43,6 +44,7 @@ import org.apache.calcite.avatica.AvaticaConnection;
 import org.apache.calcite.avatica.AvaticaFactory;
 import org.apache.calcite.avatica.AvaticaStatement;
 import org.apache.calcite.avatica.Meta.ExecuteResult;
+import org.apache.calcite.avatica.Meta.MetaResultSet;
 import org.apache.calcite.avatica.UnregisteredDriver;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
@@ -165,6 +167,12 @@ class DrillConnectionImpl extends AvaticaConnection
     }
   }
 
+
+  @Override
+  protected ResultSet createResultSet(MetaResultSet metaResultSet) throws SQLException {
+    return super.createResultSet(metaResultSet);
+  }
+
   @Override
   protected ExecuteResult prepareAndExecuteInternal(AvaticaStatement statement, String sql, long maxRowCount)
       throws SQLException {

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillDatabaseMetaDataImpl.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillDatabaseMetaDataImpl.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillDatabaseMetaDataImpl.java
index 1c350f3..3d19f82 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillDatabaseMetaDataImpl.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillDatabaseMetaDataImpl.java
@@ -27,9 +27,12 @@ import java.sql.SQLFeatureNotSupportedException;
 import org.apache.calcite.avatica.AvaticaConnection;
 import org.apache.calcite.avatica.AvaticaDatabaseMetaData;
 import org.apache.drill.common.Version;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.jdbc.AlreadyClosedSqlException;
 import org.apache.drill.jdbc.DrillDatabaseMetaData;
 
+import com.google.common.base.Throwables;
+
 
 /**
  * Drill's implementation of {@link DatabaseMetaData}.
@@ -818,20 +821,35 @@ class DrillDatabaseMetaDataImpl extends AvaticaDatabaseMetaData
                              String tableNamePattern,
                              String[] types) throws SQLException {
     throwIfClosed();
-    return super.getTables(catalog, schemaPattern,tableNamePattern, types);
+    try {
+      return super.getTables(catalog, schemaPattern,tableNamePattern, types);
+    } catch(DrillRuntimeException e) {
+      Throwables.propagateIfInstanceOf(e.getCause(), SQLException.class);
+      throw e;
+    }
   }
 
 
   @Override
   public ResultSet getSchemas() throws SQLException {
     throwIfClosed();
-    return super.getSchemas();
+    try {
+      return super.getSchemas();
+    } catch(DrillRuntimeException e) {
+      Throwables.propagateIfInstanceOf(e.getCause(), SQLException.class);
+      throw e;
+    }
   }
 
   @Override
   public ResultSet getCatalogs() throws SQLException {
     throwIfClosed();
-    return super.getCatalogs();
+    try {
+      return super.getCatalogs();
+    } catch(DrillRuntimeException e) {
+      Throwables.propagateIfInstanceOf(e.getCause(), SQLException.class);
+      throw e;
+    }
   }
 
   @Override
@@ -844,7 +862,12 @@ class DrillDatabaseMetaDataImpl extends AvaticaDatabaseMetaData
   public ResultSet getColumns(String catalog, String schema, String table,
                               String columnNamePattern) throws SQLException {
     throwIfClosed();
-    return super.getColumns(catalog, schema, table, columnNamePattern);
+    try {
+      return super.getColumns(catalog, schema, table, columnNamePattern);
+    } catch(DrillRuntimeException e) {
+      Throwables.propagateIfInstanceOf(e.getCause(), SQLException.class);
+      throw e;
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java
index 096b4f0..10d4225 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java
@@ -17,17 +17,48 @@
  */
 package org.apache.drill.jdbc.impl;
 
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
 import java.sql.DatabaseMetaData;
 import java.sql.SQLException;
+import java.sql.Time;
+import java.sql.Timestamp;
 import java.sql.Types;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
+
+import javax.validation.constraints.NotNull;
 
 import org.apache.calcite.avatica.AvaticaParameter;
 import org.apache.calcite.avatica.AvaticaStatement;
+import org.apache.calcite.avatica.AvaticaUtils;
+import org.apache.calcite.avatica.ColumnMetaData;
+import org.apache.calcite.avatica.ColumnMetaData.StructType;
+import org.apache.calcite.avatica.Meta;
 import org.apache.calcite.avatica.MetaImpl;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.util.DrillStringUtils;
+import org.apache.drill.exec.client.ServerMethod;
+import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
+import org.apache.drill.exec.proto.UserProtos.CatalogMetadata;
+import org.apache.drill.exec.proto.UserProtos.ColumnMetadata;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsResp;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsResp;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasResp;
+import org.apache.drill.exec.proto.UserProtos.GetTablesResp;
+import org.apache.drill.exec.proto.UserProtos.LikeFilter;
+import org.apache.drill.exec.proto.UserProtos.RequestStatus;
+import org.apache.drill.exec.proto.UserProtos.SchemaMetadata;
+import org.apache.drill.exec.proto.UserProtos.TableMetadata;
+import org.apache.drill.exec.rpc.DrillRpcFuture;
+import org.apache.drill.exec.rpc.RpcException;
+
+import com.google.common.base.Function;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+
 
 
 class DrillMetaImpl extends MetaImpl {
@@ -61,7 +92,8 @@ class DrillMetaImpl extends MetaImpl {
         sql,
         Collections.<AvaticaParameter> emptyList(),
         Collections.<String, Object>emptyMap(),
-        CursorFactory.OBJECT);
+        null // CursorFactory set to null, as SQL requests use DrillCursor
+        );
   }
 
   private MetaResultSet s(String s) {
@@ -78,19 +110,237 @@ class DrillMetaImpl extends MetaImpl {
     }
   }
 
+  /** Information about type mapping. */
+  private static class TypeInfo {
+    private static final Map<Class<?>, TypeInfo> MAPPING = ImmutableMap.<Class<?>, TypeInfo> builder()
+        .put(boolean.class, of(Types.BOOLEAN, "BOOLEAN"))
+        .put(Boolean.class, of(Types.BOOLEAN, "BOOLEAN"))
+        .put(Byte.TYPE, of(Types.TINYINT, "TINYINT"))
+        .put(Byte.class, of(Types.TINYINT, "TINYINT"))
+        .put(Short.TYPE, of(Types.SMALLINT, "SMALLINT"))
+        .put(Short.class, of(Types.SMALLINT, "SMALLINT"))
+        .put(Integer.TYPE, of(Types.INTEGER, "INTEGER"))
+        .put(Integer.class, of(Types.INTEGER, "INTEGER"))
+        .put(Long.TYPE,  of(Types.BIGINT, "BIGINT"))
+        .put(Long.class, of(Types.BIGINT, "BIGINT"))
+        .put(Float.TYPE, of(Types.FLOAT, "FLOAT"))
+        .put(Float.class,  of(Types.FLOAT, "FLOAT"))
+        .put(Double.TYPE,  of(Types.DOUBLE, "DOUBLE"))
+        .put(Double.class, of(Types.DOUBLE, "DOUBLE"))
+        .put(String.class, of(Types.VARCHAR, "CHARACTER VARYING"))
+        .put(java.sql.Date.class, of(Types.DATE, "DATE"))
+        .put(Time.class, of(Types.TIME, "TIME"))
+        .put(Timestamp.class, of(Types.TIMESTAMP, "TIMESTAMP"))
+        .build();
+
+    private final int sqlType;
+    private final String sqlTypeName;
+
+    public TypeInfo(int sqlType, String sqlTypeName) {
+      this.sqlType = sqlType;
+      this.sqlTypeName = sqlTypeName;
+    }
 
+    private static TypeInfo of(int sqlType, String sqlTypeName) {
+      return new TypeInfo(sqlType, sqlTypeName);
+    }
 
-  @Override
-  protected <E> MetaResultSet createEmptyResultSet(Class<E> clazz) {
-    return s(
-        "SELECT '' AS `Interim zero-row result set` "  // dummy row type
-        + "FROM INFORMATION_SCHEMA.CATALOGS "          // any table
-        + "LIMIT 0"                                    // zero rows
-        );
+    public static TypeInfo get(Class<?> clazz) {
+      return MAPPING.get(clazz);
+    }
   }
 
-  @Override
-  public MetaResultSet getTables(String catalog, final Pat schemaPattern, final Pat tableNamePattern,
+  /** Metadata describing a column.
+   * Copied from Avatica with several fixes
+   * */
+  public static class MetaColumn implements Named {
+    public final String tableCat;
+    public final String tableSchem;
+    public final String tableName;
+    public final String columnName;
+    public final int dataType;
+    public final String typeName;
+    public final Integer columnSize;
+    public final Integer bufferLength = null;
+    public final Integer decimalDigits;
+    public final Integer numPrecRadix;
+    public final int nullable;
+    public final String remarks = null;
+    public final String columnDef = null;
+    public final Integer sqlDataType = null;
+    public final Integer sqlDatetimeSub = null;
+    public final Integer charOctetLength;
+    public final int ordinalPosition;
+    @NotNull
+    public final String isNullable;
+    public final String scopeCatalog = null;
+    public final String scopeSchema = null;
+    public final String scopeTable = null;
+    public final Short sourceDataType = null;
+    @NotNull
+    public final String isAutoincrement = "";
+    @NotNull
+    public final String isGeneratedcolumn = "";
+
+    public MetaColumn(
+        String tableCat,
+        String tableSchem,
+        String tableName,
+        String columnName,
+        int dataType,
+        String typeName,
+        Integer columnSize,
+        Integer decimalDigits,
+        Integer numPrecRadix,
+        int nullable,
+        Integer charOctetLength,
+        int ordinalPosition,
+        String isNullable) {
+      this.tableCat = tableCat;
+      this.tableSchem = tableSchem;
+      this.tableName = tableName;
+      this.columnName = columnName;
+      this.dataType = dataType;
+      this.typeName = typeName;
+      this.columnSize = columnSize;
+      this.decimalDigits = decimalDigits;
+      this.numPrecRadix = numPrecRadix;
+      this.nullable = nullable;
+      this.charOctetLength = charOctetLength;
+      this.ordinalPosition = ordinalPosition;
+      this.isNullable = isNullable;
+    }
+
+    @Override
+    public String getName() {
+      return columnName;
+    }
+  }
+
+  private static LikeFilter newLikeFilter(final Pat pattern) {
+    if (pattern == null || pattern.s == null) {
+      return null;
+    }
+
+    return LikeFilter.newBuilder().setPattern(pattern.s).setEscape("\\").build();
+  }
+
+  /**
+   * Quote the provided string as a LIKE pattern
+   *
+   * @param v the value to quote
+   * @return a LIKE pattern matching exactly v, or {@code null} if v is {@code null}
+   */
+  private static Pat quote(String v) {
+    if (v == null) {
+      return null;
+    }
+
+    StringBuilder sb = new StringBuilder(v.length());
+    for(int index = 0; index<v.length(); index++) {
+      char c = v.charAt(index);
+      switch(c) {
+      case '%':
+      case '_':
+      case '\\':
+        sb.append('\\').append(c);
+        break;
+
+      default:
+        sb.append(c);
+      }
+    }
+
+    return Pat.of(sb.toString());
+  }
+
+  // Overriding fieldMetaData as Calcite version create ColumnMetaData with invalid offset
+  protected static ColumnMetaData.StructType drillFieldMetaData(Class<?> clazz) {
+    final List<ColumnMetaData> list = new ArrayList<>();
+    for (Field field : clazz.getFields()) {
+      if (Modifier.isPublic(field.getModifiers())
+          && !Modifier.isStatic(field.getModifiers())) {
+        NotNull notNull = field.getAnnotation(NotNull.class);
+        boolean notNullable = (notNull != null || field.getType().isPrimitive());
+        list.add(
+            drillColumnMetaData(
+                AvaticaUtils.camelToUpper(field.getName()),
+                list.size(), field.getType(), notNullable));
+      }
+    }
+    return ColumnMetaData.struct(list);
+  }
+
+
+  protected static ColumnMetaData drillColumnMetaData(String name, int index,
+      Class<?> type, boolean notNullable) {
+    TypeInfo pair = TypeInfo.get(type);
+    ColumnMetaData.Rep rep =
+        ColumnMetaData.Rep.VALUE_MAP.get(type);
+    ColumnMetaData.AvaticaType scalarType =
+        ColumnMetaData.scalar(pair.sqlType, pair.sqlTypeName, rep);
+    return new ColumnMetaData(
+        index, false, true, false, false,
+        notNullable
+            ? DatabaseMetaData.columnNoNulls
+            : DatabaseMetaData.columnNullable,
+        true, -1, name, name, null,
+        0, 0, null, null, scalarType, true, false, false,
+        scalarType.columnClassName());
+  }
+
+  abstract private class MetadataAdapter<CalciteMetaType, Response, ResponseValue> {
+    private final Class<? extends CalciteMetaType> clazz;
+
+    public MetadataAdapter(Class<? extends CalciteMetaType> clazz) {
+      this.clazz = clazz;
+    }
+
+    MetaResultSet getMeta(DrillRpcFuture<Response> future) {
+      Response response;
+      try {
+        response = future.checkedGet();
+      } catch (RpcException e) {
+        throw new DrillRuntimeException(new SQLException("Failure getting metadata", e));
+      }
+
+      // Manage errors
+      if (getStatus(response) != RequestStatus.OK) {
+        DrillPBError error = getError(response);
+        throw new DrillRuntimeException(new SQLException("Failure getting metadata: " + error.getMessage()));
+      }
+
+      try {
+        List<Object> tables = Lists.transform(getResult(response), new Function<ResponseValue, Object>() {
+          @Override
+          public Object apply(ResponseValue input) {
+            return adapt(input);
+          }
+        });
+
+        Meta.Frame frame = Meta.Frame.create(0, true, tables);
+        StructType fieldMetaData = drillFieldMetaData(clazz);
+        Meta.Signature signature = Meta.Signature.create(
+            fieldMetaData.columns, "",
+            Collections.<AvaticaParameter>emptyList(), CursorFactory.record(clazz));
+
+        AvaticaStatement statement = connection.createStatement();
+        return MetaResultSet.create(connection.id, statement.getId(), true,
+            signature, frame);
+      } catch (SQLException e) {
+        // Wrap in RuntimeException because Avatica's abstract method declarations
+        // didn't allow for SQLException!
+        throw new DrillRuntimeException(new SQLException("Failure while attempting to get DatabaseMetadata.", e));
+      }
+    }
+
+    abstract protected RequestStatus getStatus(Response response);
+    abstract protected DrillPBError getError(Response response);
+    abstract protected List<ResponseValue> getResult(Response response);
+    abstract protected CalciteMetaType adapt(ResponseValue protoValue);
+  }
+
+  private MetaResultSet clientGetTables(String catalog, final Pat schemaPattern, final Pat tableNamePattern,
       final List<String> typeList) {
     StringBuilder sb = new StringBuilder();
     sb.append("select "
@@ -134,11 +384,51 @@ class DrillMetaImpl extends MetaImpl {
     return s(sb.toString());
   }
 
+  private MetaResultSet serverGetTables(String catalog, final Pat schemaPattern, final Pat tableNamePattern,
+      final List<String> typeList) {
+    // Catalog is not a pattern
+    final LikeFilter catalogNameFilter = newLikeFilter(quote(catalog));
+    final LikeFilter schemaNameFilter = newLikeFilter(schemaPattern);
+    final LikeFilter tableNameFilter = newLikeFilter(tableNamePattern);
+
+    return new MetadataAdapter<MetaImpl.MetaTable, GetTablesResp, TableMetadata>(MetaTable.class) {
+
+      @Override
+      protected RequestStatus getStatus(GetTablesResp response) {
+        return response.getStatus();
+      };
+
+      @Override
+      protected DrillPBError getError(GetTablesResp response) {
+        return response.getError();
+      };
+
+      @Override
+      protected List<TableMetadata> getResult(GetTablesResp response) {
+        return response.getTablesList();
+      }
+
+      @Override
+      protected MetaImpl.MetaTable adapt(TableMetadata protoValue) {
+        return new MetaImpl.MetaTable(protoValue.getCatalogName(), protoValue.getSchemaName(), protoValue.getTableName(), protoValue.getType());
+      };
+    }.getMeta(connection.getClient().getTables(catalogNameFilter, schemaNameFilter, tableNameFilter, typeList));
+  }
+
   /**
-   * Implements {@link DatabaseMetaData#getColumns}.
+   * Implements {@link DatabaseMetaData#getTables}.
    */
   @Override
-  public MetaResultSet getColumns(String catalog, Pat schemaPattern,
+  public MetaResultSet getTables(String catalog, final Pat schemaPattern, final Pat tableNamePattern,
+      final List<String> typeList) {
+    if (connection.getConfig().disableServerMetadata() || ! connection.getClient().getSupportedMethods().contains(ServerMethod.GET_TABLES)) {
+      return clientGetTables(catalog, schemaPattern, tableNamePattern, typeList);
+    }
+
+    return serverGetTables(catalog, schemaPattern, tableNamePattern, typeList);
+  }
+
+  private MetaResultSet clientGetColumns(String catalog, Pat schemaPattern,
                               Pat tableNamePattern, Pat columnNamePattern) {
     StringBuilder sb = new StringBuilder();
     // TODO:  Resolve the various questions noted below.
@@ -257,6 +547,9 @@ class DrillMetaImpl extends MetaImpl {
          * characters needed to display a value).
          */
         + "\n  CASE DATA_TYPE "
+        // 0. "For boolean and bit ... 1":
+        + "\n    WHEN 'BOOLEAN', 'BIT'"
+        + "\n                         THEN 1 "
 
         // 1. "For numeric data, ... the maximum precision":
         + "\n    WHEN 'TINYINT', 'SMALLINT', 'INTEGER', 'BIGINT', "
@@ -410,8 +703,304 @@ class DrillMetaImpl extends MetaImpl {
     return s(sb.toString());
   }
 
+  private MetaResultSet serverGetColumns(String catalog, Pat schemaPattern,
+                              Pat tableNamePattern, Pat columnNamePattern) {
+    final LikeFilter catalogNameFilter = newLikeFilter(quote(catalog));
+    final LikeFilter schemaNameFilter = newLikeFilter(schemaPattern);
+    final LikeFilter tableNameFilter = newLikeFilter(tableNamePattern);
+    final LikeFilter columnNameFilter = newLikeFilter(columnNamePattern);
+
+    return new MetadataAdapter<MetaColumn, GetColumnsResp, ColumnMetadata>(MetaColumn.class) {
+      @Override
+      protected RequestStatus getStatus(GetColumnsResp response) {
+        return response.getStatus();
+      }
+
+      @Override
+      protected DrillPBError getError(GetColumnsResp response) {
+        return response.getError();
+      }
+
+      @Override
+      protected List<ColumnMetadata> getResult(GetColumnsResp response) {
+        return response.getColumnsList();
+      };
+
+      private int getDataType(ColumnMetadata value) {
+        switch (value.getDataType()) {
+        case "ARRAY":
+          return Types.ARRAY;
+
+        case "BIGINT":
+          return Types.BIGINT;
+        case "BINARY":
+          return Types.BINARY;
+        case "BINARY LARGE OBJECT":
+          return Types.BLOB;
+        case "BINARY VARYING":
+          return Types.VARBINARY;
+        case "BIT":
+          return Types.BIT;
+        case "BOOLEAN":
+          return Types.BOOLEAN;
+        case "CHARACTER":
+          return Types.CHAR;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "CHARACTER LARGE OBJECT":
+          return Types.CLOB;
+        case "CHARACTER VARYING":
+          return Types.VARCHAR;
+
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "DATALINK":
+          return Types.DATALINK;
+        case "DATE":
+          return Types.DATE;
+        case "DECIMAL":
+          return Types.DECIMAL;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "DISTINCT":
+          return Types.DISTINCT;
+        case "DOUBLE":
+        case "DOUBLE PRECISION":
+          return Types.DOUBLE;
+
+        case "FLOAT":
+          return Types.FLOAT;
+
+        case "INTEGER":
+          return Types.INTEGER;
+        case "INTERVAL":
+          return Types.OTHER;
+
+        // Resolve: Not seen in Drill yet. Can it ever appear?:
+        case "JAVA_OBJECT":
+          return Types.JAVA_OBJECT;
+
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "LONGNVARCHAR":
+          return Types.LONGNVARCHAR;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "LONGVARBINARY":
+          return Types.LONGVARBINARY;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "LONGVARCHAR":
+          return Types.LONGVARCHAR;
+
+        case "MAP":
+          return Types.OTHER;
+
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "NATIONAL CHARACTER":
+          return Types.NCHAR;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "NATIONAL CHARACTER LARGE OBJECT":
+          return Types.NCLOB;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "NATIONAL CHARACTER VARYING":
+          return Types.NVARCHAR;
+
+        // TODO: Resolve following about NULL (and then update comment and
+        // code):
+        // It is not clear whether Types.NULL can represent a type (perhaps the
+        // type of the literal NULL when no further type information is known?)
+        // or
+        // whether 'NULL' can appear in INFORMATION_SCHEMA.COLUMNS.DATA_TYPE.
+        // For now, since it shouldn't hurt, include 'NULL'/Types.NULL in
+        // mapping.
+        case "NULL":
+          return Types.NULL;
+        // (No NUMERIC--Drill seems to map any to DECIMAL currently.)
+        case "NUMERIC":
+          return Types.NUMERIC;
+
+        // Resolve: Unexpectedly, has appeared in Drill. Should it?
+        case "OTHER":
+          return Types.OTHER;
+
+        case "REAL":
+          return Types.REAL;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "REF":
+          return Types.REF;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "ROWID":
+          return Types.ROWID;
+
+        case "SMALLINT":
+          return Types.SMALLINT;
+        // Resolve: Not seen in Drill yet. Can it appear?:
+        case "SQLXML":
+          return Types.SQLXML;
+        case "STRUCT":
+          return Types.STRUCT;
+
+        case "TIME":
+          return Types.TIME;
+        case "TIMESTAMP":
+          return Types.TIMESTAMP;
+        case "TINYINT":
+          return Types.TINYINT;
+
+        default:
+          return Types.OTHER;
+        }
+      }
+
+      Integer getDecimalDigits(ColumnMetadata value) {
+        switch(value.getDataType()) {
+        case "TINYINT":
+        case "SMALLINT":
+        case "INTEGER":
+        case "BIGINT":
+        case "DECIMAL":
+        case "NUMERIC":
+          return value.hasNumericScale() ? value.getNumericScale() : null;
+
+        case "REAL":
+          return DECIMAL_DIGITS_REAL;
+
+        case "FLOAT":
+          return DECIMAL_DIGITS_FLOAT;
+
+        case "DOUBLE":
+          return DECIMAL_DIGITS_DOUBLE;
+
+        case "DATE":
+        case "TIME":
+        case "TIMESTAMP":
+        case "INTERVAL":
+          return value.getDateTimePrecision();
+
+        default:
+          return null;
+        }
+      }
+
+      private Integer getNumPrecRadix(ColumnMetadata value) {
+        switch(value.getDataType()) {
+        case "TINYINT":
+        case "SMALLINT":
+        case "INTEGER":
+        case "BIGINT":
+        case "DECIMAL":
+        case "NUMERIC":
+        case "REAL":
+        case "FLOAT":
+        case "DOUBLE":
+          return value.getNumericPrecisionRadix();
+
+        case "INTERVAL":
+          return RADIX_INTERVAL;
+
+        case "DATE":
+        case "TIME":
+        case "TIMESTAMP":
+          return RADIX_DATETIME;
+
+        default:
+          return null;
+        }
+      }
+
+      private int getNullable(ColumnMetadata value) {
+        if (!value.hasIsNullable()) {
+          return DatabaseMetaData.columnNullableUnknown;
+        }
+        return  value.getIsNullable() ? DatabaseMetaData.columnNullable : DatabaseMetaData.columnNoNulls;
+      }
+
+      private String getIsNullable(ColumnMetadata value) {
+        if (!value.hasIsNullable()) {
+          return "";
+        }
+        return  value.getIsNullable() ? "YES" : "NO";
+      }
+
+      private Integer getCharOctetLength(ColumnMetadata value) {
+        if (!value.hasCharMaxLength()) {
+          return null;
+        }
+
+        switch(value.getDataType()) {
+        case "CHARACTER":
+        case "CHARACTER LARGE OBJECT":
+        case "CHARACTER VARYING":
+        case "LONGVARCHAR":
+        case "LONGNVARCHAR":
+        case "NATIONAL CHARACTER":
+        case "NATIONAL CHARACTER LARGE OBJECT":
+        case "NATIONAL CHARACTER VARYING":
+          return value.getCharOctetLength();
+
+        default:
+          return null;
+        }
+      }
+
+      @Override
+      protected MetaColumn adapt(ColumnMetadata value) {
+        return new MetaColumn(
+            value.getCatalogName(),
+            value.getSchemaName(),
+            value.getTableName(),
+            value.getColumnName(),
+            getDataType(value), // It might require the full SQL type
+            value.getDataType(),
+            value.getColumnSize(),
+            getDecimalDigits(value),
+            getNumPrecRadix(value),
+            getNullable(value),
+            getCharOctetLength(value),
+            value.getOrdinalPosition(),
+            getIsNullable(value));
+      }
+    }.getMeta(connection.getClient().getColumns(catalogNameFilter, schemaNameFilter, tableNameFilter, columnNameFilter));
+  }
+
+  /**
+   * Implements {@link DatabaseMetaData#getColumns}.
+   */
   @Override
-  public MetaResultSet getSchemas(String catalog, Pat schemaPattern) {
+  public MetaResultSet getColumns(String catalog, Pat schemaPattern,
+                              Pat tableNamePattern, Pat columnNamePattern) {
+    if (connection.getConfig().disableServerMetadata() || ! connection.getClient().getSupportedMethods().contains(ServerMethod.GET_COLUMNS)) {
+      return clientGetColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern);
+    }
+
+    return serverGetColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern);
+  }
+
+
+  private MetaResultSet serverGetSchemas(String catalog, Pat schemaPattern) {
+    final LikeFilter catalogNameFilter = newLikeFilter(quote(catalog));
+    final LikeFilter schemaNameFilter = newLikeFilter(schemaPattern);
+
+    return new MetadataAdapter<MetaImpl.MetaSchema, GetSchemasResp, SchemaMetadata>(MetaImpl.MetaSchema.class) {
+      @Override
+      protected RequestStatus getStatus(GetSchemasResp response) {
+        return response.getStatus();
+      }
+
+      @Override
+      protected List<SchemaMetadata> getResult(GetSchemasResp response) {
+        return response.getSchemasList();
+      }
+
+      @Override
+      protected DrillPBError getError(GetSchemasResp response) {
+        return response.getError();
+      }
+
+      @Override
+      protected MetaSchema adapt(SchemaMetadata value) {
+        return new MetaImpl.MetaSchema(value.getCatalogName(), value.getSchemaName());
+      }
+    }.getMeta(connection.getClient().getSchemas(catalogNameFilter, schemaNameFilter));
+  }
+
+
+  private MetaResultSet clientGetSchemas(String catalog, Pat schemaPattern) {
     StringBuilder sb = new StringBuilder();
     sb.append("select "
         + "SCHEMA_NAME as TABLE_SCHEM, "
@@ -429,8 +1018,43 @@ class DrillMetaImpl extends MetaImpl {
     return s(sb.toString());
   }
 
+  /**
+   * Implements {@link DatabaseMetaData#getSchemas}.
+   */
   @Override
-  public MetaResultSet getCatalogs() {
+  public MetaResultSet getSchemas(String catalog, Pat schemaPattern) {
+    if (connection.getConfig().disableServerMetadata() || ! connection.getClient().getSupportedMethods().contains(ServerMethod.GET_SCHEMAS)) {
+      return clientGetSchemas(catalog, schemaPattern);
+    }
+
+    return serverGetSchemas(catalog, schemaPattern);
+  }
+
+  private MetaResultSet serverGetCatalogs() {
+    return new MetadataAdapter<MetaImpl.MetaCatalog, GetCatalogsResp, CatalogMetadata>(MetaImpl.MetaCatalog.class) {
+      @Override
+      protected RequestStatus getStatus(GetCatalogsResp response) {
+        return response.getStatus();
+      }
+
+      @Override
+      protected List<CatalogMetadata> getResult(GetCatalogsResp response) {
+        return response.getCatalogsList();
+      }
+
+      @Override
+      protected DrillPBError getError(GetCatalogsResp response) {
+        return response.getError();
+      }
+
+      @Override
+      protected MetaImpl.MetaCatalog adapt(CatalogMetadata protoValue) {
+        return new MetaImpl.MetaCatalog(protoValue.getCatalogName());
+      }
+    }.getMeta(connection.getClient().getCatalogs(null));
+  }
+
+  private MetaResultSet clientGetCatalogs() {
     StringBuilder sb = new StringBuilder();
     sb.append("select "
         + "CATALOG_NAME as TABLE_CAT "
@@ -441,6 +1065,17 @@ class DrillMetaImpl extends MetaImpl {
     return s(sb.toString());
   }
 
+  /**
+   * Implements {@link DatabaseMetaData#getCatalogs}.
+   */
+  @Override
+  public MetaResultSet getCatalogs() {
+    if (connection.getConfig().disableServerMetadata() || ! connection.getClient().getSupportedMethods().contains(ServerMethod.GET_CATALOGS)) {
+      return clientGetCatalogs();
+    }
+
+    return serverGetCatalogs();
+  }
 
   interface Named {
     String getName();

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillResultSetImpl.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillResultSetImpl.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillResultSetImpl.java
index e406348..c8b4e3d 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillResultSetImpl.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillResultSetImpl.java
@@ -37,7 +37,9 @@ import java.sql.SQLXML;
 import java.sql.Time;
 import java.sql.Timestamp;
 import java.sql.Types;
+import java.util.ArrayList;
 import java.util.Calendar;
+import java.util.List;
 import java.util.Map;
 import java.util.TimeZone;
 
@@ -47,6 +49,7 @@ import org.apache.calcite.avatica.AvaticaStatement;
 import org.apache.calcite.avatica.ColumnMetaData;
 import org.apache.calcite.avatica.Meta;
 import org.apache.calcite.avatica.util.Cursor;
+import org.apache.calcite.avatica.util.Cursor.Accessor;
 import org.apache.drill.jdbc.AlreadyClosedSqlException;
 import org.apache.drill.jdbc.DrillResultSet;
 import org.apache.drill.jdbc.ExecutionCanceledSqlException;
@@ -1874,12 +1877,23 @@ class DrillResultSetImpl extends AvaticaResultSet implements DrillResultSet {
   protected DrillResultSetImpl execute() throws SQLException{
     connection.getDriver().handler.onStatementExecute(statement, null);
 
-    DrillCursor drillCursor = new DrillCursor(connection, statement, signature);
-    super.execute2(drillCursor, this.signature.columns);
+    if (signature.cursorFactory != null) {
+      // Avatica accessors have to be wrapped to match Drill behaviour regarding exception thrown
+      super.execute();
+      List<Accessor> wrappedAccessorList = new ArrayList<>(accessorList.size());
+      for(Accessor accessor: accessorList) {
+        wrappedAccessorList.add(new WrappedAccessor(accessor));
+      }
+      this.accessorList = wrappedAccessorList;
+    }
+    else {
+      DrillCursor drillCursor = new DrillCursor(connection, statement, signature);
+      super.execute2(drillCursor, this.signature.columns);
 
-    // Read first (schema-only) batch to initialize result-set metadata from
-    // (initial) schema before Statement.execute...(...) returns result set:
-    drillCursor.loadInitialSchema();
+      // Read first (schema-only) batch to initialize result-set metadata from
+      // (initial) schema before Statement.execute...(...) returns result set:
+      drillCursor.loadInitialSchema();
+    }
 
     return this;
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/WrappedAccessor.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/WrappedAccessor.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/WrappedAccessor.java
new file mode 100644
index 0000000..4cdc2ae
--- /dev/null
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/WrappedAccessor.java
@@ -0,0 +1,448 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+
+package org.apache.drill.jdbc.impl;
+
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.sql.Date;
+import java.sql.NClob;
+import java.sql.Ref;
+import java.sql.SQLException;
+import java.sql.SQLXML;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Calendar;
+import java.util.Map;
+
+import org.apache.calcite.avatica.util.Cursor.Accessor;
+
+/**
+ * Wraps Avatica {@code Accessor} instances to catch convertion exception
+ * which are thrown as {@code RuntimeException} and throws {@code SQLException}
+ * instead
+ *
+ */
+class WrappedAccessor implements Accessor {
+  private final Accessor delegate;
+
+  public WrappedAccessor(Accessor delegate) {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public boolean wasNull() throws SQLException {
+    return delegate.wasNull();
+  }
+
+  @Override
+  public String getString() throws SQLException {
+    try {
+      return delegate.getString();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public boolean getBoolean() throws SQLException {
+    try {
+      return delegate.getBoolean();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public byte getByte() throws SQLException {
+    try {
+      return delegate.getByte();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public short getShort() throws SQLException {
+    try {
+      return delegate.getShort();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public int getInt() throws SQLException {
+    try {
+      return delegate.getInt();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public long getLong() throws SQLException {
+    try {
+      return delegate.getLong();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public float getFloat() throws SQLException {
+    try {
+      return delegate.getFloat();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public double getDouble() throws SQLException {
+    try {
+      return delegate.getDouble();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public BigDecimal getBigDecimal() throws SQLException {
+    try {
+      return delegate.getBigDecimal();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public BigDecimal getBigDecimal(int scale) throws SQLException {
+    try {
+      return delegate.getBigDecimal(scale);
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public byte[] getBytes() throws SQLException {
+    try {
+      return delegate.getBytes();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public InputStream getAsciiStream() throws SQLException {
+    try {
+      return delegate.getAsciiStream();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public InputStream getUnicodeStream() throws SQLException {
+    try {
+      return delegate.getUnicodeStream();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public InputStream getBinaryStream() throws SQLException {
+    try {
+      return delegate.getBinaryStream();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Object getObject() throws SQLException {
+    try {
+      return delegate.getObject();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Reader getCharacterStream() throws SQLException {
+    try {
+      return delegate.getCharacterStream();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Object getObject(Map<String, Class<?>> map) throws SQLException {
+    try {
+      return delegate.getObject(map);
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Ref getRef() throws SQLException {
+    try {
+      return delegate.getRef();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Blob getBlob() throws SQLException {
+    try {
+      return delegate.getBlob();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Clob getClob() throws SQLException {
+    try {
+      return delegate.getClob();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Array getArray() throws SQLException {
+    try {
+      return delegate.getArray();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Date getDate(Calendar calendar) throws SQLException {
+    try {
+      return delegate.getDate(calendar);
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Time getTime(Calendar calendar) throws SQLException {
+    try {
+      return delegate.getTime(calendar);
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Timestamp getTimestamp(Calendar calendar) throws SQLException {
+    try {
+      return delegate.getTimestamp(calendar);
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public URL getURL() throws SQLException {
+    try {
+      return delegate.getURL();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public NClob getNClob() throws SQLException {
+    try {
+      return delegate.getNClob();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public SQLXML getSQLXML() throws SQLException {
+    try {
+      return delegate.getSQLXML();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public String getNString() throws SQLException {
+    try {
+      return delegate.getNString();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public Reader getNCharacterStream() throws SQLException {
+    try {
+      return delegate.getNCharacterStream();
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+  @Override
+  public <T> T getObject(Class<T> type) throws SQLException {
+    try {
+      return delegate.getObject(type);
+    } catch(RuntimeException e) {
+      String message = e.getMessage();
+      if (message != null && message.startsWith("cannot convert to")) {
+        throw new SQLException(e.getMessage(), e);
+      }
+      throw e;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
index bca6325..8e65869 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
@@ -88,14 +88,14 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
       DatabaseMetaDataGetColumnsTest.class.getSimpleName() + "_View";
 
   /** The one shared JDBC connection to Drill. */
-  private static Connection connection;
+  protected static Connection connection;
 
   /** Overall (connection-level) metadata. */
-  private static DatabaseMetaData dbMetadata;
+  protected static DatabaseMetaData dbMetadata;
 
   /** getColumns result metadata.  For checking columns themselves (not cell
    *  values or row order). */
-  private static ResultSetMetaData rowsMetadata;
+  protected static ResultSetMetaData rowsMetadata;
 
 
   ////////////////////
@@ -181,8 +181,7 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
   }
 
   @BeforeClass
-  public static void setUpConnectionAndMetadataToCheck() throws Exception {
-
+  public static void setUpConnection() throws Exception {
     // Get JDBC connection to Drill:
     // (Note: Can't use JdbcTest's connect(...) because JdbcTest closes
     // Connection--and other JDBC objects--on test method failure, but this test
@@ -190,6 +189,11 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
     connection = new Driver().connect( "jdbc:drill:zk=local",
                                        JdbcAssert.getDefaultProperties() );
     dbMetadata = connection.getMetaData();
+
+    setUpMetadataToCheck();
+  }
+
+  protected static void setUpMetadataToCheck() throws Exception {
     final Statement stmt = connection.createStatement();
 
     ResultSet util;
@@ -346,7 +350,7 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
 
   @AfterClass
   public static void tearDownConnection() throws SQLException {
-    final ResultSet util =
+    ResultSet util =
         connection.createStatement().executeQuery( "DROP VIEW " + VIEW_NAME + "" );
     assertTrue( util.next() );
     assertTrue( "Error dropping temporary test-columns view " + VIEW_NAME + ": "
@@ -960,7 +964,7 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
 
   @Test
   public void test_COLUMN_SIZE_hasRightValue_mdrOptBOOLEAN() throws SQLException {
-    assertThat( getIntOrNull( mdrOptBOOLEAN, "COLUMN_SIZE" ), nullValue() );
+    assertThat( getIntOrNull( mdrOptBOOLEAN, "COLUMN_SIZE" ), equalTo(1) );
   }
 
   @Ignore( "TODO(DRILL-2470): unignore when TINYINT is implemented" )
@@ -2702,7 +2706,7 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
 
   @Test
   public void test_SOURCE_DATA_TYPE_hasRightValue_mdrOptBOOLEAN() throws SQLException {
-    assertThat( getIntOrNull( mdrOptBOOLEAN, "SOURCE_DATA_TYPE" ), nullValue() );
+    assertThat( mdrOptBOOLEAN.getString( "SOURCE_DATA_TYPE" ), nullValue() );
   }
 
   @Test
@@ -2712,22 +2716,18 @@ public class DatabaseMetaDataGetColumnsTest extends JdbcTestBase {
 
   @Test
   public void test_SOURCE_DATA_TYPE_hasRightTypeString() throws SQLException {
-    // TODO(DRILL-2135):  Resolve workaround:
-    //assertThat( rsMetadata.getColumnTypeName( 22 ), equalTo( "SMALLINT" ) );
-    assertThat( rowsMetadata.getColumnTypeName( 22 ), equalTo( "INTEGER" ) );
+    assertThat( rowsMetadata.getColumnTypeName( 22 ), equalTo( "SMALLINT" ) );
   }
 
   @Test
   public void test_SOURCE_DATA_TYPE_hasRightTypeCode() throws SQLException {
-    // TODO(DRILL-2135):  Resolve workaround:
-    //assertThat( rsMetadata.getColumnType( 22 ), equalTo( Types.SMALLINT ) );
-    assertThat( rowsMetadata.getColumnType( 22 ), equalTo( Types.INTEGER ) );
+    assertThat( rowsMetadata.getColumnType( 22 ), equalTo( Types.SMALLINT ) );
   }
 
   @Test
   public void test_SOURCE_DATA_TYPE_hasRightClass() throws SQLException {
     assertThat( rowsMetadata.getColumnClassName( 22 ),
-                equalTo( Integer.class.getName() ) );
+                equalTo( Short.class.getName() ) );
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataTest.java
index 738f1a2..0ec5c8b 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataTest.java
@@ -17,25 +17,25 @@
  */
 package org.apache.drill.jdbc;
 
-import static org.junit.Assert.assertTrue;
+import static java.sql.Connection.TRANSACTION_NONE;
+import static java.sql.Connection.TRANSACTION_READ_COMMITTED;
+import static java.sql.Connection.TRANSACTION_READ_UNCOMMITTED;
+import static java.sql.Connection.TRANSACTION_REPEATABLE_READ;
+import static java.sql.Connection.TRANSACTION_SERIALIZABLE;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.notNullValue;
 import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
 import static org.junit.Assert.assertThat;
-import static org.hamcrest.CoreMatchers.*;
-
-import org.apache.drill.jdbc.Driver;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import static org.junit.Assert.assertTrue;
 
-import static java.sql.Connection.*;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
-import java.sql.SQLFeatureNotSupportedException;
-import java.sql.Savepoint;
 import java.sql.SQLException;
 
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
 /**
  * Test for Drill's implementation of DatabaseMetaData's methods (other than
  * those tested separately, e.g., {@code getColumn(...)}, tested in
@@ -43,8 +43,8 @@ import java.sql.SQLException;
  */
 public class DatabaseMetaDataTest {
 
-  private static Connection connection;
-  private static DatabaseMetaData dbmd;
+  protected static Connection connection;
+  protected static DatabaseMetaData dbmd;
 
   @BeforeClass
   public static void setUpConnection() throws SQLException {

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataGetColumnsTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataGetColumnsTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataGetColumnsTest.java
new file mode 100644
index 0000000..fbd9379
--- /dev/null
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataGetColumnsTest.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.jdbc;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.Assert.assertThat;
+
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.Properties;
+
+import org.apache.drill.jdbc.test.JdbcAssert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test compatibility with older versions of the server
+ */
+public class LegacyDatabaseMetaDataGetColumnsTest extends DatabaseMetaDataGetColumnsTest {
+
+  @BeforeClass
+  public static void setUpConnection() throws Exception {
+    // Get JDBC connection to Drill:
+    // (Note: Can't use JdbcTest's connect(...) because JdbcTest closes
+    // Connection--and other JDBC objects--on test method failure, but this test
+    // class uses some objects across methods.)
+    Properties defaultProperties = JdbcAssert.getDefaultProperties();
+    defaultProperties.setProperty("server.metadata.disabled", "true");
+
+    connection = new Driver().connect( "jdbc:drill:zk=local",
+                                       defaultProperties );
+    dbMetadata = connection.getMetaData();
+
+    DatabaseMetaDataGetColumnsTest.setUpMetadataToCheck();
+  }
+
+
+  // Override because of DRILL-1959
+
+  @Override
+  @Test
+  public void test_SOURCE_DATA_TYPE_hasRightTypeString() throws SQLException {
+    assertThat( rowsMetadata.getColumnTypeName( 22 ), equalTo( "INTEGER" ) );
+  }
+
+  @Override
+  @Test
+  public void test_SOURCE_DATA_TYPE_hasRightTypeCode() throws SQLException {
+    assertThat( rowsMetadata.getColumnType( 22 ), equalTo( Types.INTEGER ) );
+  }
+
+  @Override
+  @Test
+  public void test_SOURCE_DATA_TYPE_hasRightClass() throws SQLException {
+    assertThat( rowsMetadata.getColumnClassName( 22 ),
+                equalTo( Integer.class.getName() ) );
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataTest.java
new file mode 100644
index 0000000..ba5b700
--- /dev/null
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyDatabaseMetaDataTest.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.jdbc;
+
+import java.sql.SQLException;
+import java.util.Properties;
+
+import org.junit.BeforeClass;
+
+/**
+ * Test compatibility with older versions of the server
+ */
+public class LegacyDatabaseMetaDataTest extends DatabaseMetaDataTest {
+  @BeforeClass
+  public static void setUpConnection() throws SQLException {
+    Properties properties = new Properties();
+    properties.setProperty("server.metadata.disabled", "true");
+    // (Note: Can't use JdbcTest's connect(...) because JdbcTest closes
+    // Connection--and other JDBC objects--on test method failure, but this test
+    // class uses some objects across methods.)
+    connection = new Driver().connect( "jdbc:drill:zk=local", properties );
+    dbmd = connection.getMetaData();
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyPreparedStatementTest.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyPreparedStatementTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyPreparedStatementTest.java
index 46d675f..b482835 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyPreparedStatementTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/LegacyPreparedStatementTest.java
@@ -57,9 +57,11 @@ public class LegacyPreparedStatementTest extends JdbcTestBase {
   public static void setUpConnection() throws SQLException {
     Driver.load();
     Properties properties = new Properties();
-    properties.setProperty("preparedstatement.server.disabled", "true");
+    properties.setProperty("server.preparedstatement.disabled", "true");
 
     connection = DriverManager.getConnection( "jdbc:drill:zk=local", properties);
+    assertTrue(((DrillConnection) connection).getConfig().isServerPreparedStatementDisabled());
+
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcMetadata.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcMetadata.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcMetadata.java
index 6d766bd..b859650 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcMetadata.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcMetadata.java
@@ -37,6 +37,7 @@ public class TestJdbcMetadata extends JdbcTestActionBase {
   @Test
   public void catalogs() throws Exception{
     this.testAction(new JdbcAction(){
+      @Override
       public ResultSet getResult(Connection c) throws SQLException {
         return c.getMetaData().getCatalogs();
       }
@@ -46,6 +47,7 @@ public class TestJdbcMetadata extends JdbcTestActionBase {
   @Test
   public void allSchemas() throws Exception{
     this.testAction(new JdbcAction(){
+      @Override
       public ResultSet getResult(Connection c) throws SQLException {
         return c.getMetaData().getSchemas();
       }
@@ -55,6 +57,7 @@ public class TestJdbcMetadata extends JdbcTestActionBase {
   @Test
   public void schemasWithConditions() throws Exception{
     this.testAction(new JdbcAction(){
+      @Override
       public ResultSet getResult(Connection c) throws SQLException {
         return c.getMetaData().getSchemas("DRILL", "%fs%");
       }
@@ -64,6 +67,7 @@ public class TestJdbcMetadata extends JdbcTestActionBase {
   @Test
   public void allTables() throws Exception{
     this.testAction(new JdbcAction(){
+      @Override
       public ResultSet getResult(Connection c) throws SQLException {
         return c.getMetaData().getTables(null, null, null, null);
       }
@@ -73,6 +77,7 @@ public class TestJdbcMetadata extends JdbcTestActionBase {
   @Test
   public void tablesWithConditions() throws Exception{
     this.testAction(new JdbcAction(){
+      @Override
       public ResultSet getResult(Connection c) throws SQLException {
         return c.getMetaData().getTables("DRILL", "sys", "opt%", new String[]{"SYSTEM_TABLE", "SYSTEM_VIEW"});
       }
@@ -82,6 +87,7 @@ public class TestJdbcMetadata extends JdbcTestActionBase {
   @Test
   public void allColumns() throws Exception{
     this.testAction(new JdbcAction(){
+      @Override
       public ResultSet getResult(Connection c) throws SQLException {
         return c.getMetaData().getColumns(null, null, null, null);
       }
@@ -91,6 +97,7 @@ public class TestJdbcMetadata extends JdbcTestActionBase {
   @Test
   public void columnsWithConditions() throws Exception{
     this.testAction(new JdbcAction(){
+      @Override
       public ResultSet getResult(Connection c) throws SQLException {
         return c.getMetaData().getColumns("DRILL", "sys", "opt%", "%ame");
       }

http://git-wip-us.apache.org/repos/asf/drill/blob/17f888d9/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestLegacyJdbcMetadata.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestLegacyJdbcMetadata.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestLegacyJdbcMetadata.java
new file mode 100644
index 0000000..97f7931
--- /dev/null
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestLegacyJdbcMetadata.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.jdbc.test;
+
+import java.sql.DriverManager;
+import java.util.Properties;
+
+import org.junit.BeforeClass;
+
+/**
+ * Test compatibility with older versions of the server
+ */
+public class TestLegacyJdbcMetadata extends TestJdbcMetadata {
+  @BeforeClass
+  public static void openClient() throws Exception {
+    Properties defaultProperties = JdbcAssert.getDefaultProperties();
+    defaultProperties.setProperty("server.metadata.disabled", "true");
+
+    connection = DriverManager.getConnection("jdbc:drill:zk=local", defaultProperties);
+  }
+}


Mime
View raw message