impala-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From k...@apache.org
Subject [2/6] incubator-impala git commit: IMPALA-3809: Show Kudu-specific column metadata in DESCRIBE.
Date Wed, 23 Nov 2016 08:25:23 GMT
IMPALA-3809: Show Kudu-specific column metadata in DESCRIBE.

TODO:
- Corresponding changes to DESCRIBE EXTENDED/FORMATTED.

Testing:
A private core/hdfs run passed.

Change-Id: I83c91b540bc6d27cb4f21535fe12f3f8658c233e
Reviewed-on: http://gerrit.cloudera.org:8080/5125
Reviewed-by: Alex Behm <alex.behm@cloudera.com>
Tested-by: Internal Jenkins


Project: http://git-wip-us.apache.org/repos/asf/incubator-impala/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-impala/commit/8f2bb2f7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-impala/tree/8f2bb2f7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-impala/diff/8f2bb2f7

Branch: refs/heads/master
Commit: 8f2bb2f72fc94307034881f500e23f335ec08eba
Parents: 4821ea6
Author: Alex Behm <alex.behm@cloudera.com>
Authored: Wed Nov 16 16:22:16 2016 -0800
Committer: Internal Jenkins <cloudera-hudson@gerrit.cloudera.org>
Committed: Tue Nov 22 23:06:05 2016 +0000

----------------------------------------------------------------------
 common/thrift/Frontend.thrift                   | 12 ++--
 .../impala/analysis/DescribeTableStmt.java      | 50 ++++++++------
 .../impala/service/DescribeResultFactory.java   | 71 ++++++++++++++++----
 .../org/apache/impala/service/Frontend.java     | 46 ++++++-------
 .../org/apache/impala/service/JniFrontend.java  | 18 +++--
 .../apache/impala/analysis/AnalyzeDDLTest.java  | 28 ++++----
 .../queries/QueryTest/kudu_describe.test        | 47 +++++++++++++
 .../queries/QueryTest/kudu_stats.test           | 10 +--
 tests/query_test/test_kudu.py                   | 23 +++++--
 9 files changed, 213 insertions(+), 92 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/common/thrift/Frontend.thrift
----------------------------------------------------------------------
diff --git a/common/thrift/Frontend.thrift b/common/thrift/Frontend.thrift
index 6fdf972..135fa7b 100644
--- a/common/thrift/Frontend.thrift
+++ b/common/thrift/Frontend.thrift
@@ -147,14 +147,14 @@ struct TDescribeDbParams {
 // given TDescribeOutputStyle.
 // NOTE: This struct should only be used for intra-process communication.
 struct TDescribeTableParams {
-  1: required string db
-  2: required string table_name
-
   // Controls the output style for this describe command.
-  3: required TDescribeOutputStyle output_style
+  1: required TDescribeOutputStyle output_style
+
+  // Set when describing a table.
+  2: optional CatalogObjects.TTableName table_name
 
-  // Struct type with fields to display for the MINIMAL output style.
-  4: optional Types.TColumnType result_struct
+  // Set when describing a path to a nested collection.
+  3: optional Types.TColumnType result_struct
 }
 
 // Results of a call to describeDb() and describeTable()

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java b/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
index b947e8e..6977f3b 100644
--- a/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
@@ -20,29 +20,30 @@ package org.apache.impala.analysis;
 import java.util.ArrayList;
 
 import org.apache.commons.lang3.StringUtils;
-
-import parquet.Strings;
-
 import org.apache.impala.analysis.Path.PathType;
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.authorization.PrivilegeRequestBuilder;
 import org.apache.impala.catalog.StructType;
+import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.TableLoadingException;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TDescribeOutputStyle;
 import org.apache.impala.thrift.TDescribeTableParams;
+
+import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 
 /**
- * Representation of a DESCRIBE table statement which returns metadata on
- * a specified table:
+ * Representation of a DESCRIBE statement which returns metadata on a specified
+ * table or path:
  * Syntax: DESCRIBE <path>
  *         DESCRIBE FORMATTED|EXTENDED <table>
  *
  * If FORMATTED|EXTENDED is not specified and the path refers to a table, the statement
- * only returns info on the given table's column definition (column name, data type, and
- * comment). If the path refers to a complex typed field within a column, the statement
- * returns the field names, types, and comments.
+ * only returns info on the given table's column definitions (column name, data type,
+ * comment, and table-type-specific info like nullability, etc.). If the path refers to
+ * a complex typed field within a column, the statement returns the field names, types,
+ * and comments.
  * If FORMATTED|EXTENDED is specified, extended metadata on the table is returned
  * (in addition to the column definitions). This metadata includes info about the table
  * properties, SerDe properties, StorageDescriptor properties, and more.
@@ -51,15 +52,16 @@ public class DescribeTableStmt extends StatementBase {
   private final TDescribeOutputStyle outputStyle_;
 
   /// "."-separated path from the describe statement.
-  private ArrayList<String> rawPath_;
+  private final ArrayList<String> rawPath_;
 
   /// The resolved path to describe, set after analysis.
   private Path path_;
 
   /// The fully qualified name of the root table, set after analysis.
-  private TableName tableName_;
+  private Table table_;
 
   /// Struct type with the fields to display for the described path.
+  /// Only set when describing a path to a nested collection.
   private StructType resultStruct_;
 
   public DescribeTableStmt(ArrayList<String> rawPath, TDescribeOutputStyle outputStyle)
{
@@ -68,7 +70,6 @@ public class DescribeTableStmt extends StatementBase {
     rawPath_ = rawPath;
     outputStyle_ = outputStyle;
     path_ = null;
-    tableName_ = null;
     resultStruct_ = null;
   }
 
@@ -81,10 +82,9 @@ public class DescribeTableStmt extends StatementBase {
     return sb.toString() + StringUtils.join(rawPath_, ".");
   }
 
-  public TableName getTableName() { return tableName_; }
+  public Table getTable() { return table_; }
   public TDescribeOutputStyle getOutputStyle() { return outputStyle_; }
 
-
   /**
    * Get the privilege requirement, which depends on the output style.
    */
@@ -122,31 +122,37 @@ public class DescribeTableStmt extends StatementBase {
       throw new AnalysisException(tle.getMessage(), tle);
     }
 
-    tableName_ = analyzer.getFqTableName(path_.getRootTable().getTableName());
-    analyzer.getTable(tableName_, getPrivilegeRequirement());
+    table_ = path_.getRootTable();
+    // Register authorization and audit events.
+    analyzer.getTable(table_.getTableName(), getPrivilegeRequirement());
 
-    if (path_.destTable() != null) {
-      resultStruct_ = path_.getRootTable().getHiveColumnsAsStruct();
-    } else if (path_.destType().isComplexType()) {
+    // Describing a table.
+    if (path_.destTable() != null) return;
+
+    if (path_.destType().isComplexType()) {
       if (outputStyle_ == TDescribeOutputStyle.FORMATTED ||
           outputStyle_ == TDescribeOutputStyle.EXTENDED) {
         throw new AnalysisException("DESCRIBE FORMATTED|EXTENDED must refer to a table");
       }
+      // Describing a nested collection.
       Preconditions.checkState(outputStyle_ == TDescribeOutputStyle.MINIMAL);
       resultStruct_ = Path.getTypeAsStruct(path_.destType());
     } else {
       throw new AnalysisException("Cannot describe path '" +
-          Strings.join(rawPath_, ".") + "' targeting scalar type: " +
+          Joiner.on('.').join(rawPath_) + "' targeting scalar type: " +
           path_.destType().toSql());
     }
   }
 
   public TDescribeTableParams toThrift() {
     TDescribeTableParams params = new TDescribeTableParams();
-    params.setTable_name(getTableName().getTbl());
-    params.setDb(getTableName().getDb());
     params.setOutput_style(outputStyle_);
-    params.setResult_struct(resultStruct_.toThrift());
+    if (resultStruct_ != null) {
+      params.setResult_struct(resultStruct_.toThrift());
+    } else {
+      Preconditions.checkNotNull(table_);
+      params.setTable_name(table_.getTableName().toThrift());
+    }
     return params;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/fe/src/main/java/org/apache/impala/service/DescribeResultFactory.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/DescribeResultFactory.java b/fe/src/main/java/org/apache/impala/service/DescribeResultFactory.java
index eb917a6..05e05f9 100644
--- a/fe/src/main/java/org/apache/impala/service/DescribeResultFactory.java
+++ b/fe/src/main/java/org/apache/impala/service/DescribeResultFactory.java
@@ -25,9 +25,10 @@ import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
-
 import org.apache.impala.catalog.Column;
 import org.apache.impala.catalog.Db;
+import org.apache.impala.catalog.KuduColumn;
+import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.StructField;
 import org.apache.impala.catalog.StructType;
 import org.apache.impala.catalog.Table;
@@ -35,9 +36,12 @@ import org.apache.impala.thrift.TColumnValue;
 import org.apache.impala.thrift.TDescribeOutputStyle;
 import org.apache.impala.thrift.TDescribeResult;
 import org.apache.impala.thrift.TResultRow;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
 
-/*
+/**
  * Builds results for DESCRIBE DATABASE statements by constructing and
  * populating a TDescribeResult object.
  */
@@ -173,15 +177,15 @@ public class DescribeResultFactory {
     return descResult;
   }
 
-  /*
+  /**
    * Builds a TDescribeResult that contains the result of a DESCRIBE FORMATTED|EXTENDED
    * <table> command. For the formatted describe output the goal is to be exactly the
    * same as what Hive (via HiveServer2) outputs, for compatibility reasons. To do this,
    * Hive's MetadataFormatUtils class is used to build the results.
    */
   public static TDescribeResult buildDescribeFormattedResult(Table table) {
-    TDescribeResult descResult = new TDescribeResult();
-    descResult.results = Lists.newArrayList();
+    TDescribeResult result = new TDescribeResult();
+    result.results = Lists.newArrayList();
 
     org.apache.hadoop.hive.metastore.api.Table msTable =
         table.getMetaStoreTable().deepCopy();
@@ -218,29 +222,72 @@ public class DescribeResultFactory {
         }
         resultRow.addToColVals(colVal);
       }
-      descResult.results.add(resultRow);
+      result.results.add(resultRow);
     }
-    return descResult;
+    return result;
   }
 
-  /*
-   * Builds a TDescribeResult that contains the result of a DESCRIBE <path> command:
-   * the names and types of fields of the table or complex type referred to by the path.
+  /**
+   * Builds a TDescribeResult for a nested collection whose fields are represented
+   * by the given StructType.
    */
   public static TDescribeResult buildDescribeMinimalResult(StructType type) {
     TDescribeResult descResult = new TDescribeResult();
     descResult.results = Lists.newArrayList();
-
     for (StructField field: type.getFields()) {
       TColumnValue colNameCol = new TColumnValue();
       colNameCol.setString_val(field.getName());
       TColumnValue dataTypeCol = new TColumnValue();
       dataTypeCol.setString_val(field.getType().prettyPrint().toLowerCase());
       TColumnValue commentCol = new TColumnValue();
-      commentCol.setString_val(field.getComment() != null ? field.getComment() : "");
+      commentCol.setString_val(Strings.nullToEmpty(field.getComment()));
       descResult.results.add(
           new TResultRow(Lists.newArrayList(colNameCol, dataTypeCol, commentCol)));
     }
     return descResult;
   }
+
+  /**
+   * Builds a TDescribeResult for a table.
+   */
+  public static TDescribeResult buildDescribeMinimalResult(Table table) {
+    if (!(table instanceof KuduTable)) {
+      return buildDescribeMinimalResult(table.getHiveColumnsAsStruct());
+    }
+
+    TDescribeResult descResult = new TDescribeResult();
+    descResult.results = Lists.newArrayList();
+    for (Column c: table.getColumnsInHiveOrder()) {
+      Preconditions.checkState(c instanceof KuduColumn);
+      KuduColumn kuduColumn = (KuduColumn) c;
+      // General describe info.
+      TColumnValue colNameCol = new TColumnValue();
+      colNameCol.setString_val(kuduColumn.getName());
+      TColumnValue dataTypeCol = new TColumnValue();
+      dataTypeCol.setString_val(kuduColumn.getType().prettyPrint().toLowerCase());
+      TColumnValue commentCol = new TColumnValue();
+      commentCol.setString_val(Strings.nullToEmpty(kuduColumn.getComment()));
+      // Kudu-specific describe info.
+      TColumnValue pkCol = new TColumnValue();
+      pkCol.setString_val(Boolean.toString(kuduColumn.isKey()));
+      TColumnValue nullableCol = new TColumnValue();
+      nullableCol.setString_val(Boolean.toString(kuduColumn.isNullable()));
+      TColumnValue defaultValCol = new TColumnValue();
+      if (kuduColumn.hasDefaultValue()) {
+        defaultValCol.setString_val(kuduColumn.getDefaultValue().getStringValue());
+      } else {
+        defaultValCol.setString_val("");
+      }
+      TColumnValue encodingCol = new TColumnValue();
+      encodingCol.setString_val(kuduColumn.getEncoding().toString());
+      TColumnValue compressionCol = new TColumnValue();
+      compressionCol.setString_val(kuduColumn.getCompression().toString());
+      TColumnValue blockSizeCol = new TColumnValue();
+      blockSizeCol.setString_val(Integer.toString(kuduColumn.getBlockSize()));
+      descResult.results.add(new TResultRow(
+          Lists.newArrayList(colNameCol, dataTypeCol, commentCol, pkCol, nullableCol,
+              defaultValCol, encodingCol, compressionCol, blockSizeCol)));
+    }
+    return descResult;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/fe/src/main/java/org/apache/impala/service/Frontend.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/Frontend.java b/fe/src/main/java/org/apache/impala/service/Frontend.java
index 8657182..5e0307c 100644
--- a/fe/src/main/java/org/apache/impala/service/Frontend.java
+++ b/fe/src/main/java/org/apache/impala/service/Frontend.java
@@ -20,13 +20,11 @@ package org.apache.impala.service;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 import java.util.UUID;
@@ -35,22 +33,18 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 
-import org.apache.impala.catalog.KuduTable;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hive.service.cli.thrift.TGetColumnsReq;
 import org.apache.hive.service.cli.thrift.TGetFunctionsReq;
 import org.apache.hive.service.cli.thrift.TGetSchemasReq;
 import org.apache.hive.service.cli.thrift.TGetTablesReq;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.impala.analysis.AnalysisContext;
-import org.apache.impala.analysis.Analyzer;
 import org.apache.impala.analysis.CreateDataSrcStmt;
 import org.apache.impala.analysis.CreateDropRoleStmt;
 import org.apache.impala.analysis.CreateUdaStmt;
 import org.apache.impala.analysis.CreateUdfStmt;
+import org.apache.impala.analysis.DescribeTableStmt;
 import org.apache.impala.analysis.DescriptorTable;
 import org.apache.impala.analysis.DropDataSrcStmt;
 import org.apache.impala.analysis.DropFunctionStmt;
@@ -66,7 +60,6 @@ import org.apache.impala.analysis.ShowGrantRoleStmt;
 import org.apache.impala.analysis.ShowRolesStmt;
 import org.apache.impala.analysis.TableName;
 import org.apache.impala.analysis.TruncateStmt;
-import org.apache.impala.analysis.TupleDescriptor;
 import org.apache.impala.authorization.AuthorizationChecker;
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.authorization.ImpalaInternalAdminUser;
@@ -85,7 +78,7 @@ import org.apache.impala.catalog.Function;
 import org.apache.impala.catalog.HBaseTable;
 import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.catalog.ImpaladCatalog;
-import org.apache.impala.catalog.StructType;
+import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
@@ -93,7 +86,6 @@ import org.apache.impala.common.FileSystemUtil;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.common.NotImplementedException;
-import org.apache.impala.common.RuntimeEnv;
 import org.apache.impala.planner.PlanFragment;
 import org.apache.impala.planner.Planner;
 import org.apache.impala.planner.ScanNode;
@@ -101,7 +93,6 @@ import org.apache.impala.thrift.TCatalogOpRequest;
 import org.apache.impala.thrift.TCatalogOpType;
 import org.apache.impala.thrift.TCatalogServiceRequestHeader;
 import org.apache.impala.thrift.TColumn;
-import org.apache.impala.thrift.TColumnType;
 import org.apache.impala.thrift.TColumnValue;
 import org.apache.impala.thrift.TCreateDropRoleParams;
 import org.apache.impala.thrift.TDdlExecRequest;
@@ -110,7 +101,6 @@ import org.apache.impala.thrift.TDescribeOutputStyle;
 import org.apache.impala.thrift.TDescribeResult;
 import org.apache.impala.thrift.TErrorCode;
 import org.apache.impala.thrift.TExecRequest;
-import org.apache.impala.thrift.TExplainLevel;
 import org.apache.impala.thrift.TExplainResult;
 import org.apache.impala.thrift.TFinalizeParams;
 import org.apache.impala.thrift.TFunctionCategory;
@@ -122,7 +112,6 @@ import org.apache.impala.thrift.TLoadDataResp;
 import org.apache.impala.thrift.TMetadataOpRequest;
 import org.apache.impala.thrift.TPlanExecInfo;
 import org.apache.impala.thrift.TPlanFragment;
-import org.apache.impala.thrift.TPlanFragmentTree;
 import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TQueryExecRequest;
 import org.apache.impala.thrift.TResetMetadataRequest;
@@ -141,11 +130,13 @@ import org.apache.impala.util.MembershipSnapshot;
 import org.apache.impala.util.PatternMatcher;
 import org.apache.impala.util.TResultRowBuilder;
 import org.apache.impala.util.TSessionStateUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicates;
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
 /**
@@ -319,11 +310,22 @@ public class Frontend {
           new TColumn("comment", Type.STRING.toThrift())));
     } else if (analysis.isDescribeTableStmt()) {
       ddl.op_type = TCatalogOpType.DESCRIBE_TABLE;
-      ddl.setDescribe_table_params(analysis.getDescribeTableStmt().toThrift());
-      metadata.setColumns(Arrays.asList(
+      DescribeTableStmt descStmt = analysis.getDescribeTableStmt();
+      ddl.setDescribe_table_params(descStmt.toThrift());
+      List<TColumn> columns = Lists.newArrayList(
           new TColumn("name", Type.STRING.toThrift()),
           new TColumn("type", Type.STRING.toThrift()),
-          new TColumn("comment", Type.STRING.toThrift())));
+          new TColumn("comment", Type.STRING.toThrift()));
+      if (descStmt.getTable() instanceof KuduTable
+          && descStmt.getOutputStyle() == TDescribeOutputStyle.MINIMAL) {
+        columns.add(new TColumn("primary_key", Type.STRING.toThrift()));
+        columns.add(new TColumn("nullable", Type.STRING.toThrift()));
+        columns.add(new TColumn("default_value", Type.STRING.toThrift()));
+        columns.add(new TColumn("encoding", Type.STRING.toThrift()));
+        columns.add(new TColumn("compression", Type.STRING.toThrift()));
+        columns.add(new TColumn("block_size", Type.STRING.toThrift()));
+      }
+      metadata.setColumns(columns);
     } else if (analysis.isAlterTableStmt()) {
       ddl.op_type = TCatalogOpType.DDL;
       TDdlExecRequest req = new TDdlExecRequest();
@@ -773,16 +775,14 @@ public class Frontend {
    * Throws an exception if the table or db is not found or if there is an error loading
    * the table metadata.
    */
-  public TDescribeResult describeTable(String dbName, String tableName,
-      TDescribeOutputStyle outputStyle, TColumnType tResultStruct)
-          throws ImpalaException {
+  public TDescribeResult describeTable(TTableName tableName,
+      TDescribeOutputStyle outputStyle) throws ImpalaException {
+    Table table = impaladCatalog_.getTable(tableName.db_name, tableName.table_name);
     if (outputStyle == TDescribeOutputStyle.MINIMAL) {
-      StructType resultStruct = (StructType)Type.fromThrift(tResultStruct);
-      return DescribeResultFactory.buildDescribeMinimalResult(resultStruct);
+      return DescribeResultFactory.buildDescribeMinimalResult(table);
     } else {
       Preconditions.checkArgument(outputStyle == TDescribeOutputStyle.FORMATTED ||
           outputStyle == TDescribeOutputStyle.EXTENDED);
-      Table table = impaladCatalog_.getTable(dbName, tableName);
       return DescribeResultFactory.buildDescribeFormattedResult(table);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/fe/src/main/java/org/apache/impala/service/JniFrontend.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/JniFrontend.java b/fe/src/main/java/org/apache/impala/service/JniFrontend.java
index 5e41af4..b343369 100644
--- a/fe/src/main/java/org/apache/impala/service/JniFrontend.java
+++ b/fe/src/main/java/org/apache/impala/service/JniFrontend.java
@@ -42,15 +42,18 @@ import org.apache.impala.catalog.DataSource;
 import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.Function;
 import org.apache.impala.catalog.Role;
+import org.apache.impala.catalog.StructType;
+import org.apache.impala.catalog.Type;
 import org.apache.impala.common.FileSystemUtil;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.common.JniUtil;
-import org.apache.impala.service.BackendConfig;
+import org.apache.impala.thrift.TBackendGflags;
 import org.apache.impala.thrift.TBuildTestDescriptorTableParams;
 import org.apache.impala.thrift.TCatalogObject;
 import org.apache.impala.thrift.TDatabase;
 import org.apache.impala.thrift.TDescribeDbParams;
+import org.apache.impala.thrift.TDescribeOutputStyle;
 import org.apache.impala.thrift.TDescribeResult;
 import org.apache.impala.thrift.TDescribeTableParams;
 import org.apache.impala.thrift.TDescriptorTable;
@@ -82,7 +85,6 @@ import org.apache.impala.thrift.TTableName;
 import org.apache.impala.thrift.TUniqueId;
 import org.apache.impala.thrift.TUpdateCatalogCacheRequest;
 import org.apache.impala.thrift.TUpdateMembershipRequest;
-import org.apache.impala.thrift.TBackendGflags;
 import org.apache.impala.util.GlogAppender;
 import org.apache.impala.util.PatternMatcher;
 import org.apache.impala.util.TSessionStateUtil;
@@ -447,9 +449,15 @@ public class JniFrontend {
     TDescribeTableParams params = new TDescribeTableParams();
     JniUtil.deserializeThrift(protocolFactory_, params, thriftDescribeTableParams);
 
-    TDescribeResult result = frontend_.describeTable(
-        params.getDb(), params.getTable_name(), params.getOutput_style(),
-        params.getResult_struct());
+    Preconditions.checkState(params.isSetTable_name() ^ params.isSetResult_struct());
+    TDescribeResult result = null;
+    if (params.isSetTable_name()) {
+      result = frontend_.describeTable(params.getTable_name(), params.output_style);
+    } else {
+      Preconditions.checkState(params.output_style == TDescribeOutputStyle.MINIMAL);
+      StructType structType = (StructType)Type.fromThrift(params.result_struct);
+      result = DescribeResultFactory.buildDescribeMinimalResult(structType);
+    }
 
     TSerializer serializer = new TSerializer(protocolFactory_);
     try {

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
index ed900bf..dbfdd60 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
@@ -39,17 +39,17 @@ import org.apache.impala.catalog.DataSourceTable;
 import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.PrimitiveType;
 import org.apache.impala.catalog.ScalarType;
-import org.apache.impala.catalog.StructField;
-import org.apache.impala.catalog.StructType;
 import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.FileSystemUtil;
 import org.apache.impala.common.FrontendTestBase;
 import org.apache.impala.common.RuntimeEnv;
 import org.apache.impala.testutil.TestUtils;
+import org.apache.impala.thrift.TDescribeTableParams;
 import org.apache.impala.util.MetaStoreUtil;
 import org.apache.kudu.ColumnSchema.CompressionAlgorithm;
 import org.apache.kudu.ColumnSchema.Encoding;
+import org.junit.Assert;
 import org.junit.Test;
 
 import com.google.common.base.Joiner;
@@ -57,8 +57,6 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 
-import junit.framework.Assert;
-
 public class AnalyzeDDLTest extends FrontendTestBase {
 
   @Test
@@ -3090,14 +3088,11 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     // Single element path can only be resolved as <table>.
     DescribeTableStmt describe = (DescribeTableStmt)AnalyzesOk("describe ambig",
         createAnalyzer("ambig"));
-    Assert.assertEquals("ambig", describe.toThrift().db);
-    Assert.assertEquals("ambig", describe.toThrift().table_name, "ambig");
-    StructType colStructType = new StructType(Lists.newArrayList(
-        new StructField("ambig", new ArrayType(Type.INT))));
-    StructType tableStructType = new StructType(Lists.newArrayList(
-        new StructField("ambig", colStructType)));
-    Assert.assertEquals(tableStructType.toSql(),
-        Type.fromThrift(describe.toThrift().result_struct).toSql());
+    TDescribeTableParams tdesc = (TDescribeTableParams) describe.toThrift();
+    Assert.assertTrue(tdesc.isSetTable_name());
+    Assert.assertEquals("ambig", tdesc.table_name.getDb_name());
+    Assert.assertEquals("ambig", tdesc.table_name.getTable_name(), "ambig");
+    Assert.assertFalse(tdesc.isSetResult_struct());
 
     // Path could be resolved as either <db>.<table> or <table>.<complex
field>
     AnalysisError("describe ambig.ambig", createAnalyzer("ambig"),
@@ -3106,8 +3101,13 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalysisError("describe ambig.ambig.ambig", createAnalyzer("ambig"),
         "Path is ambiguous: 'ambig.ambig.ambig'");
     // 4 element path can only be resolved to nested array.
-    AnalyzesOk("describe ambig.ambig.ambig.ambig", createAnalyzer("ambig"));
-
+    describe = (DescribeTableStmt) AnalyzesOk(
+        "describe ambig.ambig.ambig.ambig", createAnalyzer("ambig"));
+    tdesc = (TDescribeTableParams) describe.toThrift();
+    Type expectedType =
+        org.apache.impala.analysis.Path.getTypeAsStruct(new ArrayType(Type.INT));
+    Assert.assertTrue(tdesc.isSetResult_struct());
+    Assert.assertEquals(expectedType, Type.fromThrift(tdesc.getResult_struct()));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/testdata/workloads/functional-query/queries/QueryTest/kudu_describe.test
----------------------------------------------------------------------
diff --git a/testdata/workloads/functional-query/queries/QueryTest/kudu_describe.test b/testdata/workloads/functional-query/queries/QueryTest/kudu_describe.test
new file mode 100644
index 0000000..280c87d
--- /dev/null
+++ b/testdata/workloads/functional-query/queries/QueryTest/kudu_describe.test
@@ -0,0 +1,47 @@
+====
+---- QUERY
+describe functional_kudu.alltypes
+---- LABELS
+NAME,TYPE,COMMENT,PRIMARY_KEY,NULLABLE,DEFAULT_VALUE,ENCODING,COMPRESSION,BLOCK_SIZE
+---- RESULTS
+'bigint_col','bigint','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'bool_col','boolean','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'date_string_col','string','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'double_col','double','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'float_col','float','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'id','int','','true','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'int_col','int','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'month','int','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'smallint_col','smallint','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'string_col','string','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'timestamp_col','string','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'tinyint_col','tinyint','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'year','int','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+---- TYPES
+STRING,STRING,STRING,STRING,STRING,STRING,STRING,STRING,STRING
+====
+---- QUERY
+# Test composite primary key and column options.
+create table describe_test
+(pk1 int,
+ pk2 int,
+ pk3 string,
+ c1 string null default 'abc' comment 'testing',
+ c2 int default 100 encoding plain_encoding compression snappy,
+ c3 int null block_size 8388608,
+ primary key (pk1, pk2, pk3))
+distribute by hash (pk1) into 3 buckets
+stored as kudu;
+describe describe_test;
+---- LABELS
+NAME,TYPE,COMMENT,PRIMARY_KEY,NULLABLE,DEFAULT_VALUE,ENCODING,COMPRESSION,BLOCK_SIZE
+---- RESULTS
+'pk1','int','','true','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'pk2','int','','true','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'pk3','string','','true','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'c1','string','','false','true','abc','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'c2','int','','false','false','100','PLAIN_ENCODING','SNAPPY','0'
+'c3','int','','false','true','','AUTO_ENCODING','DEFAULT_COMPRESSION','8388608'
+---- TYPES
+STRING,STRING,STRING,STRING,STRING,STRING,STRING,STRING,STRING
+====

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/testdata/workloads/functional-query/queries/QueryTest/kudu_stats.test
----------------------------------------------------------------------
diff --git a/testdata/workloads/functional-query/queries/QueryTest/kudu_stats.test b/testdata/workloads/functional-query/queries/QueryTest/kudu_stats.test
index 6914944..3ae4f69 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/kudu_stats.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/kudu_stats.test
@@ -22,10 +22,10 @@ INT,STRING,STRING,STRING,INT
 compute stats simple;
 describe simple;
 ---- RESULTS
-'id','int',''
-'name','string',''
-'valf','float',''
-'vali','bigint',''
+'id','int','','true','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'name','string','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'valf','float','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
+'vali','bigint','','false','false','','AUTO_ENCODING','DEFAULT_COMPRESSION','0'
 ---- TYPES
-STRING,STRING,STRING
+STRING,STRING,STRING,STRING,STRING,STRING,STRING,STRING,STRING
 ====

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/8f2bb2f7/tests/query_test/test_kudu.py
----------------------------------------------------------------------
diff --git a/tests/query_test/test_kudu.py b/tests/query_test/test_kudu.py
index 996931d..4ea8770 100644
--- a/tests/query_test/test_kudu.py
+++ b/tests/query_test/test_kudu.py
@@ -64,6 +64,9 @@ class TestKuduOperations(KuduTestSuite):
   def test_kudu_stats(self, vector, unique_database):
     self.run_test_case('QueryTest/kudu_stats', vector, use_db=unique_database)
 
+  def test_kudu_describe(self, vector, unique_database):
+    self.run_test_case('QueryTest/kudu_describe', vector, use_db=unique_database)
+
   def test_kudu_column_options(self, cursor, kudu_client, unique_database):
     encodings = ["ENCODING PLAIN_ENCODING", ""]
     compressions = ["COMPRESSION SNAPPY", ""]
@@ -120,7 +123,7 @@ class TestCreateExternalTable(KuduTestSuite):
       with self.drop_impala_table_after_context(cursor, impala_table_name):
         cursor.execute("DESCRIBE %s" % impala_table_name)
         kudu_schema = kudu_table.schema
-        for i, (col_name, col_type, _) in enumerate(cursor):
+        for i, (col_name, col_type, _, _, _, _, _, _, _) in enumerate(cursor):
           kudu_col = kudu_schema[i]
           assert col_name == kudu_col.name
           assert col_type.upper() == \
@@ -199,7 +202,9 @@ class TestCreateExternalTable(KuduTestSuite):
                 impala_table_name, preferred_kudu_table.name))
         with self.drop_impala_table_after_context(cursor, impala_table_name):
           cursor.execute("DESCRIBE %s" % impala_table_name)
-          assert cursor.fetchall() == [("a", "bigint", "")]
+          assert cursor.fetchall() == \
+              [("a", "bigint", "", "true", "false", "", "AUTO_ENCODING",
+                "DEFAULT_COMPRESSION", "0")]
 
   def test_explicit_name_doesnt_exist(self, cursor, kudu_client):
     kudu_table_name = self.random_table_name()
@@ -432,7 +437,9 @@ class TestImpalaKuduIntegration(KuduTestSuite):
       cursor.execute("CREATE EXTERNAL TABLE %s STORED AS KUDU %s" % (
           impala_table_name, props))
       cursor.execute("DESCRIBE %s" % (impala_table_name))
-      assert cursor.fetchall() == [("a", "int", "")]
+      assert cursor.fetchall() == \
+          [("a", "int", "", "true", "false", "", "AUTO_ENCODING",
+            "DEFAULT_COMPRESSION", "0")]
 
       # Drop the underlying Kudu table and replace it with another Kudu table that has
       # the same name but different schema
@@ -448,7 +455,11 @@ class TestImpalaKuduIntegration(KuduTestSuite):
         # Kudu.
         cursor.execute("REFRESH %s" % (impala_table_name))
         cursor.execute("DESCRIBE %s" % (impala_table_name))
-        assert cursor.fetchall() == [("b", "string", ""), ("c", "string", "")]
+        assert cursor.fetchall() == \
+            [("b", "string", "", "true", "false", "", "AUTO_ENCODING",
+              "DEFAULT_COMPRESSION", "0"),
+             ("c", "string", "", "false", "true", "", "AUTO_ENCODING",
+              "DEFAULT_COMPRESSION", "0")]
 
   def test_delete_external_kudu_table(self, cursor, kudu_client):
     """Check that Impala can recover from the case where the underlying Kudu table of
@@ -462,7 +473,9 @@ class TestImpalaKuduIntegration(KuduTestSuite):
       cursor.execute("CREATE EXTERNAL TABLE %s STORED AS KUDU %s" % (
           impala_table_name, props))
       cursor.execute("DESCRIBE %s" % (impala_table_name))
-      assert cursor.fetchall() == [("a", "int", "")]
+      assert cursor.fetchall() == \
+          [("a", "int", "", "true", "false", "", "AUTO_ENCODING",
+            "DEFAULT_COMPRESSION", "0")]
       # Drop the underlying Kudu table
       kudu_client.delete_table(kudu_table.name)
       assert not kudu_client.table_exists(kudu_table.name)


Mime
View raw message