drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ve...@apache.org
Subject git commit: DRILL-1636: Shorten the output of toString() methods of HiveTable and HivePartition.
Date Wed, 05 Nov 2014 05:29:00 GMT
Repository: incubator-drill
Updated Branches:
  refs/heads/master fde819cee -> 5adadfac5


DRILL-1636: Shorten the output of toString() methods of HiveTable and HivePartition.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/5adadfac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/5adadfac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/5adadfac

Branch: refs/heads/master
Commit: 5adadfac5fc0f475b12d449eb00af8e9a5daadec
Parents: fde819c
Author: vkorukanti <venki.korukanti@gmail.com>
Authored: Tue Nov 4 16:33:54 2014 -0800
Committer: vkorukanti <venki.korukanti@gmail.com>
Committed: Tue Nov 4 20:43:24 2014 -0800

----------------------------------------------------------------------
 .../drill/exec/store/hive/HiveReadEntry.java    | 11 +++++++++
 .../apache/drill/exec/store/hive/HiveScan.java  | 21 ++++++-----------
 .../apache/drill/exec/store/hive/HiveTable.java | 24 ++++++++++++++++++++
 3 files changed, 42 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/5adadfac/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
index 70f8a5b..e964128 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
@@ -24,6 +24,7 @@ import java.util.Map;
 
 import net.hydromatic.optiq.Schema.TableType;
 
+import org.apache.drill.exec.store.hive.HiveTable.HivePartition;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -70,6 +71,16 @@ public class HiveReadEntry {
   }
 
   @JsonIgnore
+  public HiveTable getHiveTableWrapper() {
+    return table;
+  }
+
+  @JsonIgnore
+  public List<HivePartition> getHivePartitionWrappers() {
+    return partitions;
+  }
+
+  @JsonIgnore
   public TableType getJdbcTableType() {
     if (table.getTable().getTableType().equals(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW.toString()))
{
       return TableType.VIEW;

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/5adadfac/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
index d1cc09b..ddbc100 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveScan.java
@@ -68,8 +68,6 @@ public class HiveScan extends AbstractGroupScan {
   @JsonProperty("hive-table")
   public HiveReadEntry hiveReadEntry;
   @JsonIgnore
-  private final Table table;
-  @JsonIgnore
   private List<InputSplit> inputSplits = Lists.newArrayList();
   @JsonIgnore
   public HiveStoragePlugin storagePlugin;
@@ -77,8 +75,6 @@ public class HiveScan extends AbstractGroupScan {
   public String storagePluginName;
 
   @JsonIgnore
-  public List<Partition> partitions;
-  @JsonIgnore
   private final Collection<DrillbitEndpoint> endpoints;
 
   @JsonProperty("columns")
@@ -96,20 +92,16 @@ public class HiveScan extends AbstractGroupScan {
                   @JsonProperty("columns") List<SchemaPath> columns,
                   @JacksonInject StoragePluginRegistry pluginRegistry) throws ExecutionSetupException
{
     this.hiveReadEntry = hiveReadEntry;
-    this.table = hiveReadEntry.getTable();
     this.storagePluginName = storagePluginName;
     this.storagePlugin = (HiveStoragePlugin) pluginRegistry.getPlugin(storagePluginName);
     this.columns = columns;
-    this.partitions = hiveReadEntry.getPartitions();
     getSplits();
     endpoints = storagePlugin.getContext().getBits();
   }
 
   public HiveScan(HiveReadEntry hiveReadEntry, HiveStoragePlugin storagePlugin, List<SchemaPath>
columns) throws ExecutionSetupException {
-    this.table = hiveReadEntry.getTable();
     this.hiveReadEntry = hiveReadEntry;
     this.columns = columns;
-    this.partitions = hiveReadEntry.getPartitions();
     this.storagePlugin = storagePlugin;
     getSplits();
     endpoints = storagePlugin.getContext().getBits();
@@ -123,10 +115,8 @@ public class HiveScan extends AbstractGroupScan {
     this.inputSplits = that.inputSplits;
     this.mappings = that.mappings;
     this.partitionMap = that.partitionMap;
-    this.partitions = that.partitions;
     this.storagePlugin = that.storagePlugin;
     this.storagePluginName = that.storagePluginName;
-    this.table = that.table;
   }
 
   public List<SchemaPath> getColumns() {
@@ -135,6 +125,8 @@ public class HiveScan extends AbstractGroupScan {
 
   private void getSplits() throws ExecutionSetupException {
     try {
+      List<Partition> partitions = hiveReadEntry.getPartitions();
+      Table table = hiveReadEntry.getTable();
       if (partitions == null || partitions.size() == 0) {
         Properties properties = MetaStoreUtils.getTableMetadata(table);
         JobConf job = new JobConf();
@@ -144,7 +136,8 @@ public class HiveScan extends AbstractGroupScan {
         for (Map.Entry<String, String> entry : hiveReadEntry.hiveConfigOverride.entrySet())
{
           job.set(entry.getKey(), entry.getValue());
         }
-        InputFormat<?, ?> format = (InputFormat<?, ?>) Class.forName(table.getSd().getInputFormat()).getConstructor().newInstance();
+        InputFormat<?, ?> format = (InputFormat<?, ?>)
+            Class.forName(table.getSd().getInputFormat()).getConstructor().newInstance();
         job.setInputFormat(format.getClass());
         Path path = new Path(table.getSd().getLocation());
         FileSystem fs = FileSystem.get(job);
@@ -305,10 +298,10 @@ public class HiveScan extends AbstractGroupScan {
 
   @Override
   public String toString() {
-    return "HiveScan [table=" + table
+    return "HiveScan [table=" + hiveReadEntry.getHiveTableWrapper()
         + ", inputSplits=" + inputSplits
         + ", columns=" + columns
-        + ", partitions= " + partitions +"]";
+        + ", partitions= " + hiveReadEntry.getHivePartitionWrappers() +"]";
   }
 
   @Override
@@ -325,7 +318,7 @@ public class HiveScan extends AbstractGroupScan {
 
   // Return true if the current table is partitioned false otherwise
   public boolean supportsPartitionFilterPushdown() {
-    List<FieldSchema> partitionKeys = table.getPartitionKeys();
+    List<FieldSchema> partitionKeys = hiveReadEntry.getTable().getPartitionKeys();
     if (partitionKeys == null || partitionKeys.size() == 0) {
       return false;
     }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/5adadfac/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
index c219cfd..99101cc 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
@@ -119,6 +119,21 @@ public class HiveTable {
     return table;
   }
 
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("Table(");
+
+    sb.append("dbName:");
+    sb.append(this.dbName);
+    sb.append(", ");
+
+    sb.append("tableName:");
+    sb.append(this.tableName);
+    sb.append(")");
+
+    return sb.toString();
+  }
+
   public static class HivePartition {
 
     @JsonIgnore
@@ -174,6 +189,15 @@ public class HiveTable {
     public Partition getPartition() {
       return partition;
     }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("Partition(");
+      sb.append("values:");
+      sb.append(this.values);
+      sb.append(")");
+      return sb.toString();
+    }
   }
 
   public static class StorageDescriptorWrapper {


Mime
View raw message