drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ve...@apache.org
Subject [3/5] drill git commit: DRILL-1991: Code indendation and formatting cleanup for few files
Date Fri, 16 Jan 2015 23:14:57 GMT
DRILL-1991: Code indendation and formatting cleanup for few files

+ Reformatted using IDE except removing two unused static variables.


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/10fd9e10
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/10fd9e10
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/10fd9e10

Branch: refs/heads/master
Commit: 10fd9e101207d5d861f023e45fb46c0787bf6da0
Parents: fe6f7ef
Author: vkorukanti <venki.korukanti@gmail.com>
Authored: Fri Jan 9 00:02:47 2015 -0800
Committer: vkorukanti <venki.korukanti@gmail.com>
Committed: Fri Jan 16 14:03:32 2015 -0800

----------------------------------------------------------------------
 .../physical/impl/aggregate/HashAggBatch.java   |  77 +-
 .../impl/aggregate/HashAggTemplate.java         | 110 +--
 .../physical/impl/aggregate/HashAggregator.java |  18 +-
 .../physical/impl/common/ChainedHashTable.java  | 166 +++--
 .../exec/physical/impl/common/HashTable.java    |  27 +-
 .../physical/impl/common/HashTableTemplate.java | 160 +++--
 .../exec/physical/impl/join/HashJoinBatch.java  | 705 +++++++++----------
 7 files changed, 654 insertions(+), 609 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/10fd9e10/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
index 113e883..35faf22 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
@@ -17,8 +17,8 @@
  */
 package org.apache.drill.exec.physical.impl.aggregate;
 
-import java.io.IOException;
-
+import com.sun.codemodel.JExpr;
+import com.sun.codemodel.JVar;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.ErrorCollector;
 import org.apache.drill.common.expression.ErrorCollectorImpl;
@@ -51,8 +51,7 @@ import org.apache.drill.exec.record.selection.SelectionVector2;
 import org.apache.drill.exec.record.selection.SelectionVector4;
 import org.apache.drill.exec.vector.ValueVector;
 
-import com.sun.codemodel.JExpr;
-import com.sun.codemodel.JVar;
+import java.io.IOException;
 
 public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HashAggBatch.class);
@@ -60,21 +59,25 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
   private HashAggregator aggregator;
   private final RecordBatch incoming;
   private LogicalExpression[] aggrExprs;
-  private TypedFieldId[] groupByOutFieldIds ;
-  private TypedFieldId[] aggrOutFieldIds ;      // field ids for the outgoing batch
+  private TypedFieldId[] groupByOutFieldIds;
+  private TypedFieldId[] aggrOutFieldIds;      // field ids for the outgoing batch
 
   private final GeneratorMapping UPDATE_AGGR_INSIDE =
-    GeneratorMapping.create("setupInterior" /* setup method */, "updateAggrValuesInternal" /* eval method */,
-                            "resetValues" /* reset */, "cleanup" /* cleanup */) ;
+      GeneratorMapping.create("setupInterior" /* setup method */, "updateAggrValuesInternal" /* eval method */,
+          "resetValues" /* reset */, "cleanup" /* cleanup */);
 
   private final GeneratorMapping UPDATE_AGGR_OUTSIDE =
-    GeneratorMapping.create("setupInterior" /* setup method */, "outputRecordValues" /* eval method */,
-                            "resetValues" /* reset */, "cleanup" /* cleanup */) ;
+      GeneratorMapping.create("setupInterior" /* setup method */, "outputRecordValues" /* eval method */,
+          "resetValues" /* reset */, "cleanup" /* cleanup */);
 
-  private final MappingSet UpdateAggrValuesMapping = new MappingSet("incomingRowIdx" /* read index */, "outRowIdx" /* write index */, "htRowIdx" /* workspace index */, "incoming" /* read container */, "outgoing" /* write container */, "aggrValuesContainer" /* workspace container */, UPDATE_AGGR_INSIDE, UPDATE_AGGR_OUTSIDE, UPDATE_AGGR_INSIDE);
+  private final MappingSet UpdateAggrValuesMapping =
+      new MappingSet("incomingRowIdx" /* read index */, "outRowIdx" /* write index */,
+          "htRowIdx" /* workspace index */, "incoming" /* read container */, "outgoing" /* write container */,
+          "aggrValuesContainer" /* workspace container */, UPDATE_AGGR_INSIDE, UPDATE_AGGR_OUTSIDE, UPDATE_AGGR_INSIDE);
 
 
-  public HashAggBatch(HashAggregate popConfig, RecordBatch incoming, FragmentContext context) throws ExecutionSetupException {
+  public HashAggBatch(HashAggregate popConfig, RecordBatch incoming, FragmentContext context) throws
+      ExecutionSetupException {
     super(popConfig, context);
     this.incoming = incoming;
   }
@@ -122,7 +125,7 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
       logger.debug("Next outcome of {}", outcome);
       switch (outcome) {
       case NONE:
-//        throw new UnsupportedOperationException("Received NONE on first batch");
+        //        throw new UnsupportedOperationException("Received NONE on first batch");
         return outcome;
       case NOT_YET:
       case STOP:
@@ -144,13 +147,13 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
       return IterOutcome.NONE;
     }
 
-  if (aggregator.buildComplete() && ! aggregator.allFlushed()) {
-    // aggregation is complete and not all records have been output yet
-    IterOutcome outcome = aggregator.outputCurrentBatch();
-    return outcome;
-  }
+    if (aggregator.buildComplete() && !aggregator.allFlushed()) {
+      // aggregation is complete and not all records have been output yet
+      IterOutcome outcome = aggregator.outputCurrentBatch();
+      return outcome;
+    }
 
-  logger.debug("Starting aggregator doWork; incoming record count = {} ", incoming.getRecordCount());
+    logger.debug("Starting aggregator doWork; incoming record count = {} ", incoming.getRecordCount());
 
     while (true) {
       AggOutcome out = aggregator.doWork();
@@ -198,8 +201,10 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
     }
   }
 
-  private HashAggregator createAggregatorInternal() throws SchemaChangeException, ClassTransformationException, IOException{
-    CodeGenerator<HashAggregator> top = CodeGenerator.get(HashAggregator.TEMPLATE_DEFINITION, context.getFunctionRegistry());
+  private HashAggregator createAggregatorInternal() throws SchemaChangeException, ClassTransformationException,
+      IOException {
+    CodeGenerator<HashAggregator> top =
+        CodeGenerator.get(HashAggregator.TEMPLATE_DEFINITION, context.getFunctionRegistry());
     ClassGenerator<HashAggregator> cg = top.getRoot();
     ClassGenerator<HashAggregator> cgInner = cg.getInnerGenerator("BatchHolder");
 
@@ -217,7 +222,8 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
 
     for (i = 0; i < numGroupByExprs; i++) {
       NamedExpression ne = popConfig.getGroupByExprs()[i];
-      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry() );
+      final LogicalExpression expr =
+          ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
       if (expr == null) {
         continue;
       }
@@ -231,7 +237,8 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
 
     for (i = 0; i < numAggrExprs; i++) {
       NamedExpression ne = popConfig.getAggrExprs()[i];
-      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry() );
+      final LogicalExpression expr =
+          ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
 
       if (collector.hasErrors()) {
         throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
@@ -255,17 +262,12 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
     container.buildSchema(SelectionVectorMode.NONE);
     HashAggregator agg = context.getImplementationClass(top);
 
-    HashTableConfig htConfig = new HashTableConfig(context.getOptions().getOption(ExecConstants.MIN_HASH_TABLE_SIZE_KEY).num_val.intValue(),
-                                                   HashTable.DEFAULT_LOAD_FACTOR,
-                                                   popConfig.getGroupByExprs(),
-                                                   null /* no probe exprs */) ;
+    HashTableConfig htConfig =
+        new HashTableConfig(context.getOptions().getOption(ExecConstants.MIN_HASH_TABLE_SIZE_KEY).num_val.intValue(),
+            HashTable.DEFAULT_LOAD_FACTOR, popConfig.getGroupByExprs(), null /* no probe exprs */);
 
-    agg.setup(popConfig, htConfig, context, this.stats,
-              oContext.getAllocator(), incoming, this,
-              aggrExprs,
-              cgInner.getWorkspaceTypes(),
-              groupByOutFieldIds,
-              this.container);
+    agg.setup(popConfig, htConfig, context, this.stats, oContext.getAllocator(), incoming, this, aggrExprs,
+        cgInner.getWorkspaceTypes(), groupByOutFieldIds, this.container);
 
     return agg;
   }
@@ -286,25 +288,23 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
     case FOUR_BYTE: {
       JVar var = cg.declareClassField("sv4_", cg.getModel()._ref(SelectionVector4.class));
       cg.getBlock("doSetup").assign(var, JExpr.direct("incoming").invoke("getSelectionVector4"));
-      cg.getBlock("getVectorIndex")._return(var.invoke("get").arg(JExpr.direct("recordIndex")));;
+      cg.getBlock("getVectorIndex")._return(var.invoke("get").arg(JExpr.direct("recordIndex")));
       return;
     }
     case NONE: {
-      cg.getBlock("getVectorIndex")._return(JExpr.direct("recordIndex"));;
+      cg.getBlock("getVectorIndex")._return(JExpr.direct("recordIndex"));
       return;
     }
     case TWO_BYTE: {
       JVar var = cg.declareClassField("sv2_", cg.getModel()._ref(SelectionVector2.class));
       cg.getBlock("doSetup").assign(var, JExpr.direct("incoming").invoke("getSelectionVector2"));
-      cg.getBlock("getVectorIndex")._return(var.invoke("getIndex").arg(JExpr.direct("recordIndex")));;
+      cg.getBlock("getVectorIndex")._return(var.invoke("getIndex").arg(JExpr.direct("recordIndex")));
       return;
     }
 
     default:
       throw new IllegalStateException();
-
     }
-
   }
 
   @Override
@@ -320,5 +320,4 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
   protected void killIncoming(boolean sendUpstream) {
     incoming.kill(sendUpstream);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/10fd9e10/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
index d7cf904..dc2b05c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
@@ -17,13 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.aggregate;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import javax.inject.Named;
-
+import com.google.common.collect.Lists;
 import org.apache.drill.common.expression.ErrorCollector;
 import org.apache.drill.common.expression.ErrorCollectorImpl;
 import org.apache.drill.common.expression.ExpressionPosition;
@@ -53,17 +47,19 @@ import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.allocator.VectorAllocator;
 
-import com.google.common.collect.Lists;
+import javax.inject.Named;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
 
 public abstract class HashAggTemplate implements HashAggregator {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HashAggregator.class);
 
-  private static final long ALLOCATOR_INITIAL_RESERVATION = 1*1024*1024;
-  private static final long ALLOCATOR_MAX_RESERVATION = 20L*1000*1000*1000;
-
   private static final boolean EXTRA_DEBUG_1 = false;
   private static final boolean EXTRA_DEBUG_2 = false;
-  private static final String TOO_BIG_ERROR = "Couldn't add value to an empty batch.  This likely means that a single value is too long for a varlen field.";
+  private static final String TOO_BIG_ERROR =
+      "Couldn't add value to an empty batch.  This likely means that a single value is too long for a varlen field.";
   private boolean first = true;
   private boolean newSchema = false;
   private int underlyingIndex = 0;
@@ -93,7 +89,7 @@ public abstract class HashAggTemplate implements HashAggregator {
 
   private MaterializedField[] materializedValueFields;
   private boolean allFlushed = false;
-  private boolean  buildComplete = false;
+  private boolean buildComplete = false;
 
   private OperatorStats stats = null;
   private HashTableStats htStats = new HashTableStats();
@@ -113,7 +109,6 @@ public abstract class HashAggTemplate implements HashAggregator {
     }
   }
 
-
   public class BatchHolder {
 
     private VectorContainer aggrValuesContainer; // container for aggr values (workspace variables)
@@ -127,16 +122,16 @@ public abstract class HashAggTemplate implements HashAggregator {
 
       aggrValuesContainer = new VectorContainer();
 
-      ValueVector vector ;
+      ValueVector vector;
 
-      for(int i = 0; i < materializedValueFields.length; i++) {
+      for (int i = 0; i < materializedValueFields.length; i++) {
         MaterializedField outputField = materializedValueFields[i];
         // Create a type-specific ValueVector for this value
-        vector = TypeHelper.getNewVector(outputField, allocator) ;
+        vector = TypeHelper.getNewVector(outputField, allocator);
         vector.allocateNew();
         capacity = Math.min(capacity, vector.getValueCapacity());
 
-        aggrValuesContainer.add(vector) ;
+        aggrValuesContainer.add(vector);
       }
     }
 
@@ -155,9 +150,9 @@ public abstract class HashAggTemplate implements HashAggregator {
       outNumRecordsHolder.value = 0;
       boolean status = true;
       for (int i = batchOutputCount; i <= maxOccupiedIdx; i++) {
-        if (outputRecordValues(i, batchOutputCount) ) {
+        if (outputRecordValues(i, batchOutputCount)) {
           if (EXTRA_DEBUG_2) {
-            logger.debug("Outputting values to output index: {}", batchOutputCount) ;
+            logger.debug("Outputting values to output index: {}", batchOutputCount);
           }
           batchOutputCount++;
           outNumRecordsHolder.value++;
@@ -190,26 +185,26 @@ public abstract class HashAggTemplate implements HashAggregator {
     // Code-generated methods (implemented in HashAggBatch)
 
     @RuntimeOverridden
-    public void setupInterior(@Named("incoming") RecordBatch incoming, @Named("outgoing") RecordBatch outgoing, @Named("aggrValuesContainer") VectorContainer aggrValuesContainer) {}
+    public void setupInterior(@Named("incoming") RecordBatch incoming, @Named("outgoing") RecordBatch outgoing,
+        @Named("aggrValuesContainer") VectorContainer aggrValuesContainer) {
+    }
 
     @RuntimeOverridden
-    public void updateAggrValuesInternal(@Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {}
+    public void updateAggrValuesInternal(@Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {
+    }
 
     @RuntimeOverridden
-    public boolean outputRecordValues(@Named("htRowIdx") int htRowIdx, @Named("outRowIdx") int outRowIdx) {return true;}
+    public boolean outputRecordValues(@Named("htRowIdx") int htRowIdx, @Named("outRowIdx") int outRowIdx) {
+      return true;
+    }
   }
 
 
   @Override
-  public void setup(HashAggregate hashAggrConfig, HashTableConfig htConfig,
-                    FragmentContext context,
-                    OperatorStats stats,
-                    BufferAllocator allocator, RecordBatch incoming, HashAggBatch outgoing,
-                    LogicalExpression[] valueExprs,
-                    List<TypedFieldId> valueFieldIds,
-                    TypedFieldId[] groupByOutFieldIds,
-                    VectorContainer outContainer)
-    throws SchemaChangeException, ClassTransformationException, IOException {
+  public void setup(HashAggregate hashAggrConfig, HashTableConfig htConfig, FragmentContext context,
+      OperatorStats stats, BufferAllocator allocator, RecordBatch incoming, HashAggBatch outgoing,
+      LogicalExpression[] valueExprs, List<TypedFieldId> valueFieldIds, TypedFieldId[] groupByOutFieldIds,
+      VectorContainer outContainer) throws SchemaChangeException, ClassTransformationException, IOException {
 
     if (valueExprs == null || valueFieldIds == null) {
       throw new IllegalArgumentException("Invalid aggr value exprs or workspace variables.");
@@ -235,7 +230,8 @@ public abstract class HashAggTemplate implements HashAggregator {
     // we need to build a hash table on the aggregation column a1.
     // TODO:  This functionality will be added later.
     if (hashAggrConfig.getGroupByExprs().length == 0) {
-      throw new IllegalArgumentException("Currently, hash aggregation is only applicable if there are group-by expressions.");
+      throw new IllegalArgumentException("Currently, hash aggregation is only applicable if there are group-by " +
+          "expressions.");
     }
 
     this.htIdxHolder = new IndexPointer();
@@ -246,15 +242,17 @@ public abstract class HashAggTemplate implements HashAggregator {
 
     if (valueFieldIds.size() > 0) {
       int i = 0;
-      FieldReference ref = new FieldReference("dummy", ExpressionPosition.UNKNOWN, valueFieldIds.get(0).getIntermediateType());
+      FieldReference ref =
+          new FieldReference("dummy", ExpressionPosition.UNKNOWN, valueFieldIds.get(0).getIntermediateType());
       for (TypedFieldId id : valueFieldIds) {
         materializedValueFields[i++] = MaterializedField.create(ref, id.getIntermediateType());
       }
     }
 
-    ChainedHashTable ht = new ChainedHashTable(htConfig, context, allocator, incoming,
-        null /* no incoming probe */, outgoing, true /* nulls are equal */) ;
-    this.htable = ht.createAndSetupHashTable(groupByOutFieldIds) ;
+    ChainedHashTable ht =
+        new ChainedHashTable(htConfig, context, allocator, incoming, null /* no incoming probe */, outgoing,
+            true /* nulls are equal */);
+    this.htable = ht.createAndSetupHashTable(groupByOutFieldIds);
 
     numGroupByOutFields = groupByOutFieldIds.length;
     batchHolders = new ArrayList<BatchHolder>();
@@ -265,19 +263,20 @@ public abstract class HashAggTemplate implements HashAggregator {
 
   @Override
   public AggOutcome doWork() {
-    try{
+    try {
       // Note: Keeping the outer and inner try blocks here to maintain some similarity with
       // StreamingAggregate which does somethings conditionally in the outer try block.
       // In the future HashAggregate may also need to perform some actions conditionally
       // in the outer try block.
 
-      outside: while(true) {
+      outside:
+      while (true) {
         // loop through existing records, aggregating the values as necessary.
         if (EXTRA_DEBUG_1) {
-          logger.debug ("Starting outer loop of doWork()...");
+          logger.debug("Starting outer loop of doWork()...");
         }
         for (; underlyingIndex < incoming.getRecordCount(); incIndex()) {
-          if(EXTRA_DEBUG_2) {
+          if (EXTRA_DEBUG_2) {
             logger.debug("Doing loop with values underlying {}, current {}", underlyingIndex, currentIndex);
           }
           boolean success = checkGroupAndAggrValues(currentIndex);
@@ -369,7 +368,7 @@ public abstract class HashAggTemplate implements HashAggregator {
     // Skip the keys and only allocate for outputting the workspace values
     // (keys will be output through splitAndTransfer)
     Iterator<VectorWrapper<?>> outgoingIter = outContainer.iterator();
-    for (int i=0; i < numGroupByOutFields; i++) {
+    for (int i = 0; i < numGroupByOutFields; i++) {
       outgoingIter.next();
     }
     while (outgoingIter.hasNext()) {
@@ -469,16 +468,18 @@ public abstract class HashAggTemplate implements HashAggregator {
     int numOutputRecords = outNumRecordsHolder.value;
 
     if (EXTRA_DEBUG_1) {
-      logger.debug("After output values: outStartIdx = {}, outNumRecords = {}", outStartIdxHolder.value, outNumRecordsHolder.value);
+      logger.debug("After output values: outStartIdx = {}, outNumRecords = {}", outStartIdxHolder.value,
+          outNumRecordsHolder.value);
     }
     if (outputValuesStatus) {
-      outputKeysStatus = this.htable.outputKeys(outBatchIndex, this.outContainer, outStartIdxHolder.value, outNumRecordsHolder.value) ;
+      outputKeysStatus =
+          this.htable.outputKeys(outBatchIndex, this.outContainer, outStartIdxHolder.value, outNumRecordsHolder.value);
     }
 
     if (outputKeysStatus && outputValuesStatus) {
 
       // set the value count for outgoing batch value vectors
-      for(VectorWrapper<?> v : outgoing) {
+      for (VectorWrapper<?> v : outgoing) {
         v.getValueVector().getMutator().setValueCount(numOutputRecords);
       }
 
@@ -506,14 +507,16 @@ public abstract class HashAggTemplate implements HashAggregator {
       if (!outputKeysStatus) {
         logger.debug("Failed to output keys for current batch index: {} ", outBatchIndex);
         for (VectorWrapper<?> v : outContainer) {
-          logger.debug("At the time of failure, size of valuevector in outContainer = {}.", v.getValueVector().getValueCapacity());
+          logger.debug("At the time of failure, size of valuevector in outContainer = {}.",
+              v.getValueVector().getValueCapacity());
         }
         context.fail(new Exception("Failed to output keys for current batch !"));
       }
       if (!outputValuesStatus) {
         logger.debug("Failed to output values for current batch index: {} ", outBatchIndex);
         for (VectorWrapper<?> v : outContainer) {
-          logger.debug("At the time of failure, size of valuevector in outContainer = {}.", v.getValueVector().getValueCapacity());
+          logger.debug("At the time of failure, size of valuevector in outContainer = {}.",
+              v.getValueVector().getValueCapacity());
         }
         context.fail(new Exception("Failed to output values for current batch !"));
       }
@@ -555,7 +558,7 @@ public abstract class HashAggTemplate implements HashAggregator {
     }
     */
 
-    HashTable.PutStatus putStatus = htable.put(incomingRowIdx, htIdxHolder, 1 /* retry count */) ;
+    HashTable.PutStatus putStatus = htable.put(incomingRowIdx, htIdxHolder, 1 /* retry count */);
 
     if (putStatus != HashTable.PutStatus.PUT_FAILED) {
       int currentIdx = htIdxHolder.value;
@@ -564,11 +567,11 @@ public abstract class HashAggTemplate implements HashAggregator {
       if (currentIdx >= batchHolders.size() * HashTable.BATCH_SIZE) {
         addBatchHolder();
       }
-      BatchHolder bh = batchHolders.get( (currentIdx >>> 16) & HashTable.BATCH_MASK);
+      BatchHolder bh = batchHolders.get((currentIdx >>> 16) & HashTable.BATCH_MASK);
       int idxWithinBatch = currentIdx & HashTable.BATCH_MASK;
 
       // Check if we have almost filled up the workspace vectors and add a batch if necessary
-      if ((idxWithinBatch ==  (bh.capacity - 1)) && (bh.allocatedNextBatch == false)) {
+      if ((idxWithinBatch == (bh.capacity - 1)) && (bh.allocatedNextBatch == false)) {
         htable.addNewKeyBatch();
         addBatchHolder();
         bh.allocatedNextBatch = true;
@@ -585,10 +588,9 @@ public abstract class HashAggTemplate implements HashAggregator {
         //  logger.debug("group-by key = {} already present at hash table index = {}", holder.value, currentIdx) ;
         //}
 
-      }
-      else if (putStatus == HashTable.PutStatus.KEY_ADDED) {
+      } else if (putStatus == HashTable.PutStatus.KEY_ADDED) {
         if (EXTRA_DEBUG_2) {
-          logger.debug("Group-by key was added to hash table, inserting new aggregate values") ;
+          logger.debug("Group-by key was added to hash table, inserting new aggregate values");
         }
 
         // debugging
@@ -617,7 +619,9 @@ public abstract class HashAggTemplate implements HashAggregator {
 
   // Code-generated methods (implemented in HashAggBatch)
   public abstract void doSetup(@Named("incoming") RecordBatch incoming);
+
   public abstract int getVectorIndex(@Named("recordIndex") int recordIndex);
+
   public abstract boolean resetValues();
 
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/10fd9e10/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
index 238242b..0f7f394 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
@@ -17,9 +17,6 @@
  */
 package org.apache.drill.exec.physical.impl.aggregate;
 
-import java.io.IOException;
-import java.util.List;
-
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.exec.compile.TemplateClassDefinition;
 import org.apache.drill.exec.exception.ClassTransformationException;
@@ -34,21 +31,22 @@ import org.apache.drill.exec.record.RecordBatch.IterOutcome;
 import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.record.VectorContainer;
 
+import java.io.IOException;
+import java.util.List;
+
 public interface HashAggregator {
 
-  public static TemplateClassDefinition<HashAggregator> TEMPLATE_DEFINITION = new TemplateClassDefinition<HashAggregator>(HashAggregator.class, HashAggTemplate.class);
+  public static TemplateClassDefinition<HashAggregator> TEMPLATE_DEFINITION =
+      new TemplateClassDefinition<HashAggregator>(HashAggregator.class, HashAggTemplate.class);
 
   public static enum AggOutcome {
     RETURN_OUTCOME, CLEANUP_AND_RETURN, UPDATE_AGGREGATOR
   }
 
   public abstract void setup(HashAggregate hashAggrConfig, HashTableConfig htConfig, FragmentContext context,
-                             OperatorStats stats, BufferAllocator allocator, RecordBatch incoming,
-                             HashAggBatch outgoing, LogicalExpression[] valueExprs,
-                             List<TypedFieldId> valueFieldIds,
-                             TypedFieldId[] keyFieldIds,
-                             VectorContainer outContainer)
-    throws SchemaChangeException, IOException, ClassTransformationException;
+      OperatorStats stats, BufferAllocator allocator, RecordBatch incoming, HashAggBatch outgoing,
+      LogicalExpression[] valueExprs, List<TypedFieldId> valueFieldIds, TypedFieldId[] keyFieldIds,
+      VectorContainer outContainer) throws SchemaChangeException, IOException, ClassTransformationException;
 
   public abstract IterOutcome getOutcome();
 

http://git-wip-us.apache.org/repos/asf/drill/blob/10fd9e10/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
index 0502f7e..322fd1f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
@@ -17,19 +17,13 @@
  */
 package org.apache.drill.exec.physical.impl.common;
 
-import java.io.IOException;
-import java.util.LinkedList;
-import java.util.List;
-
+import com.sun.codemodel.JConditional;
+import com.sun.codemodel.JExpr;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.expression.ErrorCollector;
 import org.apache.drill.common.expression.ErrorCollectorImpl;
 import org.apache.drill.common.expression.LogicalExpression;
-import org.apache.drill.common.expression.FunctionCall;
-import org.apache.drill.common.expression.ExpressionPosition;
-import org.apache.drill.common.exceptions.DrillRuntimeException;
-import org.apache.drill.common.expression.ValueExpressions;
 import org.apache.drill.common.logical.data.NamedExpression;
-import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.compile.sig.GeneratorMapping;
@@ -53,56 +47,68 @@ import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.resolver.TypeCastRules;
 import org.apache.drill.exec.vector.ValueVector;
 
-import com.sun.codemodel.JConditional;
-import com.sun.codemodel.JExpr;
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
 
 
 public class ChainedHashTable {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ChainedHashTable.class);
 
   private static final GeneratorMapping KEY_MATCH_BUILD =
-    GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalBuild" /* eval method */,
-                            null /* reset */, null /* cleanup */);
+      GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalBuild" /* eval method */,
+          null /* reset */, null /* cleanup */);
 
   private static final GeneratorMapping KEY_MATCH_PROBE =
-    GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalProbe" /* eval method */,
-                            null /* reset */, null /* cleanup */);
+      GeneratorMapping.create("setupInterior" /* setup method */, "isKeyMatchInternalProbe" /* eval method */,
+          null /* reset */, null /* cleanup */);
 
   private static final GeneratorMapping GET_HASH_BUILD =
-    GeneratorMapping.create("doSetup" /* setup method */, "getHashBuild" /* eval method */,
-                            null /* reset */, null /* cleanup */);
+      GeneratorMapping.create("doSetup" /* setup method */, "getHashBuild" /* eval method */, null /* reset */,
+          null /* cleanup */);
 
   private static final GeneratorMapping GET_HASH_PROBE =
-    GeneratorMapping.create("doSetup" /* setup method */, "getHashProbe" /* eval method */,
-                            null /* reset */, null /* cleanup */);
+      GeneratorMapping.create("doSetup" /* setup method */, "getHashProbe" /* eval method */, null /* reset */,
+          null /* cleanup */);
 
   private static final GeneratorMapping SET_VALUE =
-    GeneratorMapping.create("setupInterior" /* setup method */, "setValue" /* eval method */,
-                            null /* reset */, null /* cleanup */);
+      GeneratorMapping.create("setupInterior" /* setup method */, "setValue" /* eval method */, null /* reset */,
+          null /* cleanup */);
 
   private static final GeneratorMapping OUTPUT_KEYS =
-    GeneratorMapping.create("setupInterior" /* setup method */, "outputRecordKeys" /* eval method */,
-                            null /* reset */, null /* cleanup */) ;
+      GeneratorMapping.create("setupInterior" /* setup method */, "outputRecordKeys" /* eval method */,
+          null /* reset */, null /* cleanup */);
 
   // GM for putting constant expression into method "setupInterior"
   private static final GeneratorMapping SETUP_INTERIOR_CONSTANT =
       GeneratorMapping.create("setupInterior" /* setup method */, "setupInterior" /* eval method */,
-                              null /* reset */, null /* cleanup */);
+          null /* reset */, null /* cleanup */);
 
   // GM for putting constant expression into method "doSetup"
   private static final GeneratorMapping DO_SETUP_CONSTANT =
-      GeneratorMapping.create("doSetup" /* setup method */, "doSetup" /* eval method */,
-                              null /* reset */, null /* cleanup */);
-
-  private final MappingSet KeyMatchIncomingBuildMapping = new MappingSet("incomingRowIdx", null, "incomingBuild", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_BUILD);
-  private final MappingSet KeyMatchIncomingProbeMapping = new MappingSet("incomingRowIdx", null, "incomingProbe", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_PROBE);
-  private final MappingSet KeyMatchHtableMapping = new MappingSet("htRowIdx", null, "htContainer", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_BUILD);
-  private final MappingSet KeyMatchHtableProbeMapping = new MappingSet("htRowIdx", null, "htContainer", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_PROBE);
-  private final MappingSet GetHashIncomingBuildMapping = new MappingSet("incomingRowIdx", null, "incomingBuild", null, DO_SETUP_CONSTANT, GET_HASH_BUILD);
-  private final MappingSet GetHashIncomingProbeMapping = new MappingSet("incomingRowIdx", null, "incomingProbe", null, DO_SETUP_CONSTANT, GET_HASH_PROBE);
-  private final MappingSet SetValueMapping = new MappingSet("incomingRowIdx" /* read index */, "htRowIdx" /* write index */, "incomingBuild" /* read container */, "htContainer" /* write container */, SETUP_INTERIOR_CONSTANT, SET_VALUE);
-
-  private final MappingSet OutputRecordKeysMapping = new MappingSet("htRowIdx" /* read index */, "outRowIdx" /* write index */, "htContainer" /* read container */, "outgoing" /* write container */, SETUP_INTERIOR_CONSTANT, OUTPUT_KEYS);
+      GeneratorMapping.create("doSetup" /* setup method */, "doSetup" /* eval method */, null /* reset */,
+          null /* cleanup */);
+
+  private final MappingSet KeyMatchIncomingBuildMapping =
+      new MappingSet("incomingRowIdx", null, "incomingBuild", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_BUILD);
+  private final MappingSet KeyMatchIncomingProbeMapping =
+      new MappingSet("incomingRowIdx", null, "incomingProbe", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_PROBE);
+  private final MappingSet KeyMatchHtableMapping =
+      new MappingSet("htRowIdx", null, "htContainer", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_BUILD);
+  private final MappingSet KeyMatchHtableProbeMapping =
+      new MappingSet("htRowIdx", null, "htContainer", null, SETUP_INTERIOR_CONSTANT, KEY_MATCH_PROBE);
+  private final MappingSet GetHashIncomingBuildMapping =
+      new MappingSet("incomingRowIdx", null, "incomingBuild", null, DO_SETUP_CONSTANT, GET_HASH_BUILD);
+  private final MappingSet GetHashIncomingProbeMapping =
+      new MappingSet("incomingRowIdx", null, "incomingProbe", null, DO_SETUP_CONSTANT, GET_HASH_PROBE);
+  private final MappingSet SetValueMapping =
+      new MappingSet("incomingRowIdx" /* read index */, "htRowIdx" /* write index */,
+          "incomingBuild" /* read container */, "htContainer" /* write container */, SETUP_INTERIOR_CONSTANT,
+          SET_VALUE);
+
+  private final MappingSet OutputRecordKeysMapping =
+      new MappingSet("htRowIdx" /* read index */, "outRowIdx" /* write index */, "htContainer" /* read container */,
+          "outgoing" /* write container */, SETUP_INTERIOR_CONSTANT, OUTPUT_KEYS);
 
   private HashTableConfig htConfig;
   private final FragmentContext context;
@@ -112,13 +118,8 @@ public class ChainedHashTable {
   private final RecordBatch outgoing;
   private final boolean areNullsEqual;
 
-  public ChainedHashTable(HashTableConfig htConfig,
-                          FragmentContext context,
-                          BufferAllocator allocator,
-                          RecordBatch incomingBuild,
-                          RecordBatch incomingProbe,
-                          RecordBatch outgoing,
-                          boolean areNullsEqual)  {
+  public ChainedHashTable(HashTableConfig htConfig, FragmentContext context, BufferAllocator allocator,
+      RecordBatch incomingBuild, RecordBatch incomingProbe, RecordBatch outgoing, boolean areNullsEqual) {
 
     this.htConfig = htConfig;
     this.context = context;
@@ -129,14 +130,15 @@ public class ChainedHashTable {
     this.areNullsEqual = areNullsEqual;
   }
 
-  public HashTable createAndSetupHashTable (TypedFieldId[] outKeyFieldIds) throws ClassTransformationException, IOException, SchemaChangeException {
+  public HashTable createAndSetupHashTable(TypedFieldId[] outKeyFieldIds) throws ClassTransformationException,
+      IOException, SchemaChangeException {
     CodeGenerator<HashTable> top = CodeGenerator.get(HashTable.TEMPLATE_DEFINITION, context.getFunctionRegistry());
     ClassGenerator<HashTable> cg = top.getRoot();
     ClassGenerator<HashTable> cgInner = cg.getInnerGenerator("BatchHolder");
 
     LogicalExpression[] keyExprsBuild = new LogicalExpression[htConfig.getKeyExprsBuild().length];
     LogicalExpression[] keyExprsProbe = null;
-    boolean isProbe = (htConfig.getKeyExprsProbe() != null) ;
+    boolean isProbe = (htConfig.getKeyExprsProbe() != null);
     if (isProbe) {
       keyExprsProbe = new LogicalExpression[htConfig.getKeyExprsProbe().length];
     }
@@ -148,7 +150,8 @@ public class ChainedHashTable {
 
     int i = 0;
     for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
-      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingBuild, collector, context.getFunctionRegistry());
+      final LogicalExpression expr =
+          ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingBuild, collector, context.getFunctionRegistry());
       if (collector.hasErrors()) {
         throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
       }
@@ -169,7 +172,9 @@ public class ChainedHashTable {
     if (isProbe) {
       i = 0;
       for (NamedExpression ne : htConfig.getKeyExprsProbe()) {
-        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingProbe, collector, context.getFunctionRegistry());
+        final LogicalExpression expr =
+            ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingProbe, collector,
+                context.getFunctionRegistry());
         if (collector.hasErrors()) {
           throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
         }
@@ -183,7 +188,8 @@ public class ChainedHashTable {
 
     // generate code for isKeyMatch(), setValue(), getHash() and outputRecordKeys()
     setupIsKeyMatchInternal(cgInner, KeyMatchIncomingBuildMapping, KeyMatchHtableMapping, keyExprsBuild, htKeyFieldIds);
-    setupIsKeyMatchInternal(cgInner, KeyMatchIncomingProbeMapping, KeyMatchHtableProbeMapping, keyExprsProbe, htKeyFieldIds) ;
+    setupIsKeyMatchInternal(cgInner, KeyMatchIncomingProbeMapping, KeyMatchHtableProbeMapping, keyExprsProbe,
+        htKeyFieldIds);
 
     setupSetValue(cgInner, keyExprsBuild, htKeyFieldIds);
     if (outgoing != null) {
@@ -203,8 +209,8 @@ public class ChainedHashTable {
      */
     addLeastRestrictiveCasts(keyExprsBuild, keyExprsProbe);
 
-    setupGetHash(cg /* use top level code generator for getHash */,  GetHashIncomingBuildMapping, keyExprsBuild, false);
-    setupGetHash(cg /* use top level code generator for getHash */,  GetHashIncomingProbeMapping, keyExprsProbe, true);
+    setupGetHash(cg /* use top level code generator for getHash */, GetHashIncomingBuildMapping, keyExprsBuild, false);
+    setupGetHash(cg /* use top level code generator for getHash */, GetHashIncomingProbeMapping, keyExprsProbe, true);
 
     HashTable ht = context.getImplementationClass(top);
     ht.setup(htConfig, context, allocator, incomingBuild, incomingProbe, outgoing, htContainerOrig);
@@ -213,9 +219,9 @@ public class ChainedHashTable {
   }
 
 
-  private void setupIsKeyMatchInternal(ClassGenerator<HashTable> cg, MappingSet incomingMapping, MappingSet htableMapping,
-                                       LogicalExpression[] keyExprs, TypedFieldId[] htKeyFieldIds)
-    throws SchemaChangeException {
+  private void setupIsKeyMatchInternal(ClassGenerator<HashTable> cg, MappingSet incomingMapping,
+      MappingSet htableMapping, LogicalExpression[] keyExprs, TypedFieldId[] htKeyFieldIds) throws
+      SchemaChangeException {
     cg.setMappingSet(incomingMapping);
 
     if (keyExprs == null || keyExprs.length == 0) {
@@ -255,24 +261,24 @@ public class ChainedHashTable {
     cg.getEvalBlock()._return(JExpr.TRUE);
   }
 
-  private void setupSetValue(ClassGenerator<HashTable> cg, LogicalExpression[] keyExprs, TypedFieldId[] htKeyFieldIds)
-    throws SchemaChangeException {
+  private void setupSetValue(ClassGenerator<HashTable> cg, LogicalExpression[] keyExprs,
+      TypedFieldId[] htKeyFieldIds) throws SchemaChangeException {
 
     cg.setMappingSet(SetValueMapping);
 
     int i = 0;
     for (LogicalExpression expr : keyExprs) {
-      ValueVectorWriteExpression vvwExpr = new ValueVectorWriteExpression(htKeyFieldIds[i++], expr, true) ;
+      ValueVectorWriteExpression vvwExpr = new ValueVectorWriteExpression(htKeyFieldIds[i++], expr, true);
 
       HoldingContainer hc = cg.addExpr(vvwExpr, false); // this will write to the htContainer at htRowIdx
       cg.getEvalBlock()._if(hc.getValue().eq(JExpr.lit(0)))._then()._return(JExpr.FALSE);
     }
 
     cg.getEvalBlock()._return(JExpr.TRUE);
-
   }
 
-  private void setupOutputRecordKeys(ClassGenerator<HashTable> cg, TypedFieldId[] htKeyFieldIds, TypedFieldId[] outKeyFieldIds) {
+  private void setupOutputRecordKeys(ClassGenerator<HashTable> cg, TypedFieldId[] htKeyFieldIds,
+      TypedFieldId[] outKeyFieldIds) {
 
     cg.setMappingSet(OutputRecordKeysMapping);
 
@@ -314,24 +320,34 @@ public class ChainedHashTable {
         ErrorCollector errorCollector = new ErrorCollectorImpl();
 
         if (result == null) {
-          throw new DrillRuntimeException(String.format("Join conditions cannot be compared failing build expression: %s failing probe expression: %s",
-              buildExpr.getMajorType().toString(), probeExpr.getMajorType().toString()));
-        }
-        else if (result != buildType) {
+          throw new DrillRuntimeException(String.format("Join conditions cannot be compared failing build " +
+              "expression:" + " %s failing probe expression: %s", buildExpr.getMajorType().toString(),
+              probeExpr.getMajorType().toString()));
+        } else if (result != buildType) {
           // Add a cast expression on top of the build expression
-          LogicalExpression castExpr = ExpressionTreeMaterializer.addCastExpression(buildExpr, probeExpr.getMajorType(), context.getFunctionRegistry(), errorCollector);
+          LogicalExpression castExpr =
+              ExpressionTreeMaterializer.addCastExpression(buildExpr, probeExpr.getMajorType(),
+                  context.getFunctionRegistry(), errorCollector);
           // Store the newly casted expression
-          keyExprsBuild[i] = ExpressionTreeMaterializer.materialize(castExpr, incomingBuild, errorCollector, context.getFunctionRegistry());
+          keyExprsBuild[i] =
+              ExpressionTreeMaterializer.materialize(castExpr, incomingBuild, errorCollector,
+                  context.getFunctionRegistry());
         } else if (result != probeType) {
           // Add a cast expression on top of the probe expression
-          LogicalExpression castExpr = ExpressionTreeMaterializer.addCastExpression(probeExpr, buildExpr.getMajorType(), context.getFunctionRegistry(), errorCollector);
+          LogicalExpression castExpr =
+              ExpressionTreeMaterializer.addCastExpression(probeExpr, buildExpr.getMajorType(),
+                  context.getFunctionRegistry(), errorCollector);
           // store the newly casted expression
-          keyExprsProbe[i] = ExpressionTreeMaterializer.materialize(castExpr, incomingProbe, errorCollector, context.getFunctionRegistry());
+          keyExprsProbe[i] =
+              ExpressionTreeMaterializer.materialize(castExpr, incomingProbe, errorCollector,
+                  context.getFunctionRegistry());
         }
       }
     }
   }
-  private void setupGetHash(ClassGenerator<HashTable> cg, MappingSet incomingMapping, LogicalExpression[] keyExprs, boolean isProbe) throws SchemaChangeException {
+
+  private void setupGetHash(ClassGenerator<HashTable> cg, MappingSet incomingMapping, LogicalExpression[] keyExprs,
+      boolean isProbe) throws SchemaChangeException {
 
     cg.setMappingSet(incomingMapping);
 
@@ -349,24 +365,26 @@ public class ChainedHashTable {
       HoldingContainer input = cg.addExpr(expr, false);
 
       // compute the hash(expr)
-      LogicalExpression hashfunc = FunctionGenerationHelper.getFunctionExpression("hash", Types.required(MinorType.INT), context.getFunctionRegistry(), input);
+      LogicalExpression hashfunc =
+          FunctionGenerationHelper.getFunctionExpression("hash", Types.required(MinorType.INT),
+              context.getFunctionRegistry(), input);
       HoldingContainer hashValue = cg.addExpr(hashfunc, false);
 
       if (i == 0) {
         combinedHashValue = hashValue; // first expression..just use the hash value
-      }
-      else {
+      } else {
 
         // compute the combined hash value using XOR
-        LogicalExpression xorfunc = FunctionGenerationHelper.getFunctionExpression("xor", Types.required(MinorType.INT), context.getFunctionRegistry(), hashValue, combinedHashValue);
+        LogicalExpression xorfunc =
+            FunctionGenerationHelper.getFunctionExpression("xor", Types.required(MinorType.INT),
+                context.getFunctionRegistry(), hashValue, combinedHashValue);
         combinedHashValue = cg.addExpr(xorfunc, false);
       }
     }
 
     if (combinedHashValue != null) {
-      cg.getEvalBlock()._return(combinedHashValue.getValue()) ;
-    }
-    else {
+      cg.getEvalBlock()._return(combinedHashValue.getValue());
+    } else {
       cg.getEvalBlock()._return(JExpr.lit(0));
     }
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/10fd9e10/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTable.java
index e8ccd62..6966ba1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTable.java
@@ -25,26 +25,35 @@ import org.apache.drill.exec.record.VectorContainer;
 
 public interface HashTable {
 
-  public static TemplateClassDefinition<HashTable> TEMPLATE_DEFINITION = new TemplateClassDefinition<HashTable>(HashTable.class, HashTableTemplate.class);
+  public static TemplateClassDefinition<HashTable> TEMPLATE_DEFINITION =
+      new TemplateClassDefinition<HashTable>(HashTable.class, HashTableTemplate.class);
 
-  /** The initial default capacity of the hash table (in terms of number of buckets). */
+  /**
+   * The initial default capacity of the hash table (in terms of number of buckets).
+   */
   static final public int DEFAULT_INITIAL_CAPACITY = 1 << 16;
 
-  /** The maximum capacity of the hash table (in terms of number of buckets). */
+  /**
+   * The maximum capacity of the hash table (in terms of number of buckets).
+   */
   static final public int MAXIMUM_CAPACITY = 1 << 30;
 
-  /** The default load factor of a hash table. */
+  /**
+   * The default load factor of a hash table.
+   */
   static final public float DEFAULT_LOAD_FACTOR = 0.75f;
 
-  static public enum PutStatus {KEY_PRESENT, KEY_ADDED, PUT_FAILED ;}
+  static public enum PutStatus {KEY_PRESENT, KEY_ADDED, PUT_FAILED;}
 
-  /** The batch size used for internal batch holders */
-  static final public int BATCH_SIZE = Character.MAX_VALUE+1;
+  /**
+   * The batch size used for internal batch holders
+   */
+  static final public int BATCH_SIZE = Character.MAX_VALUE + 1;
   static final public int BATCH_MASK = 0x0000FFFF;
 
   public void setup(HashTableConfig htConfig, FragmentContext context, BufferAllocator allocator,
-                    RecordBatch incomingBuild, RecordBatch incomingProbe,
-                    RecordBatch outgoing, VectorContainer htContainerOrig);
+      RecordBatch incomingBuild, RecordBatch incomingProbe,
+      RecordBatch outgoing, VectorContainer htContainerOrig);
 
   public void updateBatches();
 

http://git-wip-us.apache.org/repos/asf/drill/blob/10fd9e10/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
index 5b56f8e..c80e97a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
@@ -17,11 +17,6 @@
  */
 package org.apache.drill.exec.physical.impl.common;
 
-import java.util.ArrayList;
-import java.util.Iterator;
-
-import javax.inject.Named;
-
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -38,6 +33,10 @@ import org.apache.drill.exec.vector.BigIntVector;
 import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.ValueVector;
 
+import javax.inject.Named;
+import java.util.ArrayList;
+import java.util.Iterator;
+
 public abstract class HashTableTemplate implements HashTable {
 
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HashTable.class);
@@ -115,7 +114,6 @@ public abstract class HashTableTemplate implements HashTable {
     private int batchIndex = 0;
 
     private BatchHolder(int idx) {
-
       this.batchIndex = idx;
 
       if (idx == 0) {  // first batch holder can use the original htContainer
@@ -134,10 +132,10 @@ public abstract class HashTableTemplate implements HashTable {
     }
 
     private void init(IntVector links, IntVector hashValues, int size) {
-      for (int i=0; i < size; i++) {
+      for (int i = 0; i < size; i++) {
         links.getMutator().setSafe(i, EMPTY_SLOT);
       }
-      for (int i=0; i < size; i++) {
+      for (int i = 0; i < size; i++) {
         hashValues.getMutator().setSafe(i, 0);
       }
       links.getMutator().setValueCount(size);
@@ -159,7 +157,8 @@ public abstract class HashTableTemplate implements HashTable {
       boolean match = false;
 
       if (currentIdxWithinBatch >= HashTable.BATCH_SIZE) {
-        logger.debug("Batch size = {}, incomingRowIdx = {}, currentIdxWithinBatch = {}.", HashTable.BATCH_SIZE, incomingRowIdx, currentIdxWithinBatch);
+        logger.debug("Batch size = {}, incomingRowIdx = {}, currentIdxWithinBatch = {}.", HashTable.BATCH_SIZE,
+            incomingRowIdx, currentIdxWithinBatch);
       }
       assert (currentIdxWithinBatch < HashTable.BATCH_SIZE);
       assert (incomingRowIdx < HashTable.BATCH_SIZE);
@@ -170,7 +169,7 @@ public abstract class HashTableTemplate implements HashTable {
         match = isKeyMatchInternalBuild(incomingRowIdx, currentIdxWithinBatch);
       }
 
-      if (! match) {
+      if (!match) {
         currentIdxHolder.value = links.getAccessor().get(currentIdxWithinBatch);
       }
       return match;
@@ -178,10 +177,11 @@ public abstract class HashTableTemplate implements HashTable {
 
     // Insert a new <key1, key2...keyN> entry coming from the incoming batch into the hash table
     // container at the specified index
-    private boolean insertEntry(int incomingRowIdx, int currentIdx, int hashValue, BatchHolder lastEntryBatch, int lastEntryIdxWithinBatch) {
+    private boolean insertEntry(int incomingRowIdx, int currentIdx, int hashValue, BatchHolder lastEntryBatch,
+        int lastEntryIdxWithinBatch) {
       int currentIdxWithinBatch = currentIdx & BATCH_MASK;
 
-      if (! setValue(incomingRowIdx, currentIdxWithinBatch)) {
+      if (!setValue(incomingRowIdx, currentIdxWithinBatch)) {
         return false;
       }
 
@@ -198,7 +198,8 @@ public abstract class HashTableTemplate implements HashTable {
       maxOccupiedIdx = Math.max(maxOccupiedIdx, currentIdxWithinBatch);
 
       if (EXTRA_DEBUG) {
-        logger.debug("BatchHolder: inserted key at incomingRowIdx = {}, currentIdx = {}, hash value = {}.", incomingRowIdx, currentIdx, hashValue);
+        logger.debug("BatchHolder: inserted key at incomingRowIdx = {}, currentIdx = {}, hash value = {}.",
+            incomingRowIdx, currentIdx, hashValue);
       }
 
       return true;
@@ -210,7 +211,8 @@ public abstract class HashTableTemplate implements HashTable {
 
     private void rehash(int numbuckets, IntVector newStartIndices, int batchStartIdx) {
 
-      logger.debug("Rehashing entries within the batch: {}; batchStartIdx = {}, total numBuckets in hash table = {}.", batchIndex, batchStartIdx, numbuckets);
+      logger.debug("Rehashing entries within the batch: {}; batchStartIdx = {}, total numBuckets in hash table = {}" +
+          ".", batchIndex, batchStartIdx, numbuckets);
 
       int size = links.getAccessor().getValueCount();
       IntVector newLinks = allocMetadataVector(size, EMPTY_SLOT);
@@ -229,7 +231,10 @@ public abstract class HashTableTemplate implements HashTable {
           newHashValues.getMutator().setSafe(entryIdxWithinBatch, hash);
 
           if (EXTRA_DEBUG) {
-            logger.debug("New bucket was empty. bucketIdx = {}, newStartIndices[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, bucketIdx, newStartIndices.getAccessor().get(bucketIdx), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+            logger.debug("New bucket was empty. bucketIdx = {}, newStartIndices[ {} ] = {}, newLinks[ {} ] = {}, " +
+                "hash value = {}.", bucketIdx, bucketIdx, newStartIndices.getAccessor().get(bucketIdx),
+                entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch),
+                newHashValues.getAccessor().get(entryIdxWithinBatch));
           }
 
         } else {
@@ -251,17 +256,26 @@ public abstract class HashTableTemplate implements HashTable {
               newHashValues.getMutator().setSafe(entryIdxWithinBatch, hash);
 
               if (EXTRA_DEBUG) {
-                logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch, newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+                logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, " +
+                    "newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch,
+                    newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch,
+                    newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get
+                        (entryIdxWithinBatch));
               }
 
               break;
             } else if (bh != this && bh.links.getAccessor().get(idxWithinBatch) == EMPTY_SLOT) {
               bh.links.getMutator().setSafe(idxWithinBatch, entryIdx); // update the link in the other batch
-              newLinks.getMutator().setSafe(entryIdxWithinBatch, EMPTY_SLOT); // update the newLink entry in this batch to mark end of the hash chain
-              newHashValues.getMutator().setSafe(entryIdxWithinBatch,  hash);
+              newLinks.getMutator().setSafe(entryIdxWithinBatch, EMPTY_SLOT); // update the newLink entry in this
+              // batch to mark end of the hash chain
+              newHashValues.getMutator().setSafe(entryIdxWithinBatch, hash);
 
               if (EXTRA_DEBUG) {
-                logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch, newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch, newLinks.getAccessor().get(entryIdxWithinBatch), newHashValues.getAccessor().get(entryIdxWithinBatch));
+                logger.debug("Followed hash chain in new bucket. bucketIdx = {}, newLinks[ {} ] = {}, " +
+                    "newLinks[ {} ] = {}, hash value = {}.", bucketIdx, idxWithinBatch,
+                    newLinks.getAccessor().get(idxWithinBatch), entryIdxWithinBatch,
+                    newLinks.getAccessor().get(entryIdxWithinBatch),
+                    newHashValues.getAccessor().get(entryIdxWithinBatch));
               }
 
               break;
@@ -287,8 +301,8 @@ public abstract class HashTableTemplate implements HashTable {
     private boolean outputKeys(VectorContainer outContainer, int outStartIndex, int numRecords) {
 
       /** for debugging
-        BigIntVector vv0 = getValueVector(0);
-        BigIntHolder holder = new BigIntHolder();
+      BigIntVector vv0 = getValueVector(0);
+      BigIntHolder holder = new BigIntHolder();
       */
 
       // set the value count for htContainer's value vectors before the transfer ..
@@ -304,7 +318,8 @@ public abstract class HashTableTemplate implements HashTable {
       }
 
 /*
-      logger.debug("Attempting to output keys for batch index: {} from index {} to maxOccupiedIndex {}.", this.batchIndex, 0, maxOccupiedIdx);
+      logger.debug("Attempting to output keys for batch index: {} from index {} to maxOccupiedIndex {}.",
+      this.batchIndex, 0, maxOccupiedIdx);
       for (int i = batchOutputCount; i <= maxOccupiedIdx; i++) {
         if (outputRecordKeys(i, batchOutputCount) ) {
           if (EXTRA_DEBUG) logger.debug("Outputting keys to output index: {}", batchOutputCount) ;
@@ -312,7 +327,8 @@ public abstract class HashTableTemplate implements HashTable {
           // debugging
           // holder.value = vv0.getAccessor().get(i);
           // if (holder.value == 100018 || holder.value == 100021) {
-          //  logger.debug("Outputting key = {} at index - {} to outgoing index = {}.", holder.value, i, batchOutputCount);
+          //  logger.debug("Outputting key = {} at index - {} to outgoing index = {}.", holder.value, i,
+          //      batchOutputCount);
           // }
 
           batchOutputCount++;
@@ -337,14 +353,15 @@ public abstract class HashTableTemplate implements HashTable {
         if (idxWithinBatch == EMPTY_SLOT) {
           break;
         } else {
-          logger.debug("links[ {} ] = {}, hashValues[ {} ] = {}.", idxWithinBatch, links.getAccessor().get(idxWithinBatch), idxWithinBatch, hashValues.getAccessor().get(idxWithinBatch));
+          logger.debug("links[ {} ] = {}, hashValues[ {} ] = {}.", idxWithinBatch,
+              links.getAccessor().get(idxWithinBatch), idxWithinBatch, hashValues.getAccessor().get(idxWithinBatch));
           idx = links.getAccessor().get(idxWithinBatch);
         }
       }
     }
 
     private void clear() {
-      htContainer.clear();;
+      htContainer.clear();
       links.clear();
       hashValues.clear();
     }
@@ -363,30 +380,41 @@ public abstract class HashTableTemplate implements HashTable {
     // These methods will be code-generated
 
     @RuntimeOverridden
-    protected void setupInterior(@Named("incomingBuild") RecordBatch incomingBuild,
-                                 @Named("incomingProbe") RecordBatch incomingProbe,
-                                 @Named("outgoing") RecordBatch outgoing,
-                                 @Named("htContainer") VectorContainer htContainer) {}
+    protected void setupInterior(
+        @Named("incomingBuild") RecordBatch incomingBuild,
+        @Named("incomingProbe") RecordBatch incomingProbe,
+        @Named("outgoing") RecordBatch outgoing,
+        @Named("htContainer") VectorContainer htContainer) {
+    }
 
     @RuntimeOverridden
-    protected boolean isKeyMatchInternalBuild(@Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {return false;}
+    protected boolean isKeyMatchInternalBuild(
+        @Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {
+      return false;
+    }
 
     @RuntimeOverridden
-    protected boolean isKeyMatchInternalProbe(@Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {return false;}
+    protected boolean isKeyMatchInternalProbe(
+        @Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {
+      return false;
+    }
 
     @RuntimeOverridden
-    protected boolean setValue(@Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {return false;}
+    protected boolean setValue(@Named("incomingRowIdx") int incomingRowIdx, @Named("htRowIdx") int htRowIdx) {
+      return false;
+    }
 
     @RuntimeOverridden
-    protected boolean outputRecordKeys(@Named("htRowIdx") int htRowIdx, @Named("outRowIdx") int outRowIdx) {return false;}
-
+    protected boolean outputRecordKeys(@Named("htRowIdx") int htRowIdx, @Named("outRowIdx") int outRowIdx) {
+      return false;
+    }
   } // class BatchHolder
 
 
   @Override
   public void setup(HashTableConfig htConfig, FragmentContext context, BufferAllocator allocator,
-                    RecordBatch incomingBuild, RecordBatch incomingProbe,
-                    RecordBatch outgoing, VectorContainer htContainerOrig) {
+      RecordBatch incomingBuild, RecordBatch incomingProbe,
+      RecordBatch outgoing, VectorContainer htContainerOrig) {
     float loadf = htConfig.getLoadFactor();
     int initialCap = htConfig.getInitialCapacity();
 
@@ -483,16 +511,16 @@ public abstract class HashTableTemplate implements HashTable {
 
   private static int roundUpToPowerOf2(int number) {
     int rounded = number >= MAXIMUM_CAPACITY
-           ? MAXIMUM_CAPACITY
-           : (rounded = Integer.highestOneBit(number)) != 0
-               ? (Integer.bitCount(number) > 1) ? rounded << 1 : rounded
-               : 1;
+        ? MAXIMUM_CAPACITY
+        : (rounded = Integer.highestOneBit(number)) != 0
+        ? (Integer.bitCount(number) > 1) ? rounded << 1 : rounded
+        : 1;
 
-        return rounded;
+    return rounded;
   }
 
   public PutStatus put(int incomingRowIdx, IndexPointer htIdxHolder, int retryCount) {
-    HashTable.PutStatus putStatus = put(incomingRowIdx, htIdxHolder) ;
+    HashTable.PutStatus putStatus = put(incomingRowIdx, htIdxHolder);
     int count = retryCount;
     int numBatchHolders;
     while (putStatus == PutStatus.PUT_FAILED && count > 0) {
@@ -526,7 +554,8 @@ public abstract class HashTableTemplate implements HashTable {
       addBatchIfNeeded(currentIdx);
 
       if (EXTRA_DEBUG) {
-        logger.debug("Empty bucket index = {}. incomingRowIdx = {}; inserting new entry at currentIdx = {}.", i, incomingRowIdx, currentIdx);
+        logger.debug("Empty bucket index = {}. incomingRowIdx = {}; inserting new entry at currentIdx = {}.", i,
+            incomingRowIdx, currentIdx);
       }
 
       if (insertEntry(incomingRowIdx, currentIdx, hash, lastEntryBatch, lastEntryIdxWithinBatch)) {
@@ -542,7 +571,7 @@ public abstract class HashTableTemplate implements HashTable {
     currentIdx = startIdx;
     boolean found = false;
 
-    bh = batchHolders.get( (currentIdx >>> 16) & BATCH_MASK);
+    bh = batchHolders.get((currentIdx >>> 16) & BATCH_MASK);
     currentIdxHolder.value = currentIdx;
 
     // if startIdx is non-empty, follow the hash chain links until we find a matching
@@ -554,13 +583,12 @@ public abstract class HashTableTemplate implements HashTable {
         htIdxHolder.value = currentIdxHolder.value;
         found = true;
         break;
-      }
-      else if (currentIdxHolder.value == EMPTY_SLOT) {
+      } else if (currentIdxHolder.value == EMPTY_SLOT) {
         lastEntryBatch = bh;
         lastEntryIdxWithinBatch = currentIdxWithinBatch;
         break;
       } else {
-        bh = batchHolders.get( (currentIdxHolder.value >>> 16) & HashTable.BATCH_MASK);
+        bh = batchHolders.get((currentIdxHolder.value >>> 16) & HashTable.BATCH_MASK);
         lastEntryBatch = bh;
       }
     }
@@ -571,7 +599,8 @@ public abstract class HashTableTemplate implements HashTable {
       addBatchIfNeeded(currentIdx);
 
       if (EXTRA_DEBUG) {
-        logger.debug("No match was found for incomingRowIdx = {}; inserting new entry at currentIdx = {}.", incomingRowIdx, currentIdx);
+        logger.debug("No match was found for incomingRowIdx = {}; inserting new entry at currentIdx = {}.",
+            incomingRowIdx, currentIdx);
       }
 
       if (insertEntry(incomingRowIdx, currentIdx, hash, lastEntryBatch, lastEntryIdxWithinBatch)) {
@@ -582,17 +611,18 @@ public abstract class HashTableTemplate implements HashTable {
       }
     }
 
-    return found ? PutStatus.KEY_PRESENT : PutStatus.KEY_ADDED ;
+    return found ? PutStatus.KEY_PRESENT : PutStatus.KEY_ADDED;
   }
 
-  private boolean insertEntry(int incomingRowIdx, int currentIdx, int hashValue, BatchHolder lastEntryBatch, int lastEntryIdx) {
+  private boolean insertEntry(int incomingRowIdx, int currentIdx, int hashValue, BatchHolder lastEntryBatch,
+      int lastEntryIdx) {
 
     addBatchIfNeeded(currentIdx);
 
-    BatchHolder bh = batchHolders.get( (currentIdx >>> 16) & BATCH_MASK);
+    BatchHolder bh = batchHolders.get((currentIdx >>> 16) & BATCH_MASK);
 
     if (bh.insertEntry(incomingRowIdx, currentIdx, hashValue, lastEntryBatch, lastEntryIdx)) {
-      numEntries++ ;
+      numEntries++;
 
       /* Resize hash table if needed and transfer the metadata
        * Resize only after inserting the current entry into the hash table
@@ -617,10 +647,10 @@ public abstract class HashTableTemplate implements HashTable {
     int currentIdx = startIndices.getAccessor().get(i);
 
     if (currentIdx == EMPTY_SLOT) {
-        return -1;
+      return -1;
     }
 
-    BatchHolder bh = batchHolders.get( (currentIdx >>> 16) & BATCH_MASK);
+    BatchHolder bh = batchHolders.get((currentIdx >>> 16) & BATCH_MASK);
     currentIdxHolder.value = currentIdx;
 
     boolean found = false;
@@ -632,7 +662,7 @@ public abstract class HashTableTemplate implements HashTable {
       } else if (currentIdxHolder.value == EMPTY_SLOT) {
         break;
       } else {
-        bh = batchHolders.get( (currentIdxHolder.value >>> 16) & BATCH_MASK);
+        bh = batchHolders.get((currentIdxHolder.value >>> 16) & BATCH_MASK);
       }
     }
 
@@ -651,8 +681,7 @@ public abstract class HashTableTemplate implements HashTable {
         logger.debug("HashTable: Added new batch. Num batches = {}.", batchHolders.size());
       }
       return bh;
-    }
-    else {
+    } else {
       return batchHolders.get(batchHolders.size() - 1);
     }
   }
@@ -700,7 +729,7 @@ public abstract class HashTableTemplate implements HashTable {
     IntVector newStartIndices = allocMetadataVector(tableSize, EMPTY_SLOT);
 
     for (int i = 0; i < batchHolders.size(); i++) {
-      BatchHolder bh = batchHolders.get(i) ;
+      BatchHolder bh = batchHolders.get(i);
       int batchStartIdx = i * BATCH_SIZE;
       bh.rehash(tableSize, newStartIndices, batchStartIdx);
     }
@@ -714,7 +743,7 @@ public abstract class HashTableTemplate implements HashTable {
       for (int i = 0; i < startIndices.getAccessor().getValueCount(); i++) {
         logger.debug("Bucket: {}, startIdx[ {} ] = {}.", i, i, startIndices.getAccessor().get(i));
         int idx = startIndices.getAccessor().get(i);
-        BatchHolder bh = batchHolders.get( (idx >>> 16) & BATCH_MASK);
+        BatchHolder bh = batchHolders.get((idx >>> 16) & BATCH_MASK);
         bh.dump(idx);
       }
     }
@@ -724,7 +753,7 @@ public abstract class HashTableTemplate implements HashTable {
 
   public boolean outputKeys(int batchIdx, VectorContainer outContainer, int outStartIndex, int numRecords) {
     assert batchIdx < batchHolders.size();
-    if (! batchHolders.get(batchIdx).outputKeys(outContainer, outStartIndex, numRecords)) {
+    if (!batchHolders.get(batchIdx).outputKeys(outContainer, outStartIndex, numRecords)) {
       return false;
     }
     return true;
@@ -733,7 +762,7 @@ public abstract class HashTableTemplate implements HashTable {
   private IntVector allocMetadataVector(int size, int initialValue) {
     IntVector vector = (IntVector) TypeHelper.getNewVector(dummyIntField, allocator);
     vector.allocateNew(size);
-    for (int i=0; i < size; i++) {
+    for (int i = 0; i < size; i++) {
       vector.getMutator().setSafe(i, initialValue);
     }
     vector.getMutator().setValueCount(size);
@@ -747,8 +776,11 @@ public abstract class HashTableTemplate implements HashTable {
   }
 
   // These methods will be code-generated in the context of the outer class
-  protected abstract void doSetup(@Named("incomingBuild") RecordBatch incomingBuild, @Named("incomingProbe") RecordBatch incomingProbe);
-  protected abstract int getHashBuild(@Named("incomingRowIdx") int incomingRowIdx) ;
-  protected abstract int getHashProbe(@Named("incomingRowIdx") int incomingRowIdx) ;
+  protected abstract void doSetup(@Named("incomingBuild") RecordBatch incomingBuild,
+      @Named("incomingProbe") RecordBatch incomingProbe);
+
+  protected abstract int getHashBuild(@Named("incomingRowIdx") int incomingRowIdx);
+
+  protected abstract int getHashProbe(@Named("incomingRowIdx") int incomingRowIdx);
 
 }


Mime
View raw message