drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jacq...@apache.org
Subject [19/45] drill git commit: DRILL-3987: (REFACTOR) Common and Vector modules building.
Date Fri, 13 Nov 2015 02:37:49 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AccountorImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AccountorImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AccountorImpl.java
new file mode 100644
index 0000000..4a72719
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/AccountorImpl.java
@@ -0,0 +1,427 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.memory;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.DrillBuf;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.util.AssertionUtil;
+
+import com.google.common.collect.LinkedListMultimap;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Multimap;
+
+public class AccountorImpl implements Accountor {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AccountorImpl.class);
+
+  private static final boolean ENABLE_ACCOUNTING = AssertionUtil.isAssertionsEnabled();
+  private final AtomicRemainder remainder;
+  private final long total;
+  private ConcurrentMap<ByteBuf, DebugStackTrace> buffers = Maps.newConcurrentMap();
+  private AccountorImpl parent;
+
+  private final boolean errorOnLeak;
+  // some operators are no subject to the fragment limit. They set the applyFragmentLimit to false
+
+  private final boolean enableFragmentLimit;
+  private final double  fragmentMemOvercommitFactor;
+
+  private final boolean  DEFAULT_ENABLE_FRAGMENT_LIMIT=false;
+  private final double   DEFAULT_FRAGMENT_MEM_OVERCOMMIT_FACTOR=1.5;
+
+  private final boolean applyFragmentLimit;
+
+  private final LimitConsumer limitConsumer;
+  long fragmentLimit;
+
+  private long peakMemoryAllocation = 0;
+
+  // The top level Allocator has an accountor that keeps track of all the LimitConsumers currently executing.
+  // This enables the top level accountor to calculate a new fragment limit whenever necessary.
+  private final List<LimitConsumer> limitConsumers;
+
+  public AccountorImpl(DrillConfig config, boolean errorOnLeak, LimitConsumer context, AccountorImpl parent, long max,
+      long preAllocated, boolean applyFragLimit) {
+    // TODO: fix preallocation stuff
+    this.errorOnLeak = errorOnLeak;
+    AtomicRemainder parentRemainder = parent != null ? parent.remainder : null;
+    this.parent = parent;
+
+    boolean enableFragmentLimit;
+    double  fragmentMemOvercommitFactor;
+
+    try {
+      enableFragmentLimit = config.getBoolean(ExecConstants.ENABLE_FRAGMENT_MEMORY_LIMIT);
+      fragmentMemOvercommitFactor = config.getDouble(ExecConstants.FRAGMENT_MEM_OVERCOMMIT_FACTOR);
+    }catch(Exception e){
+      enableFragmentLimit = DEFAULT_ENABLE_FRAGMENT_LIMIT;
+      fragmentMemOvercommitFactor = DEFAULT_FRAGMENT_MEM_OVERCOMMIT_FACTOR;
+    }
+    this.enableFragmentLimit = enableFragmentLimit;
+    this.fragmentMemOvercommitFactor = fragmentMemOvercommitFactor;
+
+
+    this.applyFragmentLimit=applyFragLimit;
+
+    this.remainder = new AtomicRemainder(errorOnLeak, parentRemainder, max, preAllocated, applyFragmentLimit);
+    this.total = max;
+    this.limitConsumer = context;
+    this.fragmentLimit=this.total; // Allow as much as possible to start with;
+    if (ENABLE_ACCOUNTING) {
+      buffers = Maps.newConcurrentMap();
+    } else {
+      buffers = null;
+    }
+    this.limitConsumers = new ArrayList<LimitConsumer>();
+    if(parent!=null && parent.parent==null){ // Only add the fragment context to the fragment level accountor
+      synchronized(this) {
+        addLimitConsumer(this.limitConsumer);
+      }
+    }
+  }
+
+  public boolean transferTo(Accountor target, DrillBuf buf, long size) {
+    return transfer(target, buf, size, true);
+  }
+
+  public boolean transferIn(DrillBuf buf, long size) {
+    return transfer(this, buf, size, false);
+  }
+
+  private boolean transfer(Accountor target, DrillBuf buf, long size, boolean release) {
+    boolean withinLimit = target.forceAdditionalReservation(size);
+    if(release){
+      release(buf, size);
+    }
+
+    if (ENABLE_ACCOUNTING) {
+      if (target instanceof AccountorImpl) {
+        ((AccountorImpl) target).buffers.put(buf, new DebugStackTrace(buf.capacity(), Thread.currentThread()
+            .getStackTrace()));
+      }
+    }
+    return withinLimit;
+  }
+
+  public long getAvailable() {
+    if (parent != null) {
+      return Math.min(parent.getAvailable(), getCapacity() - getAllocation());
+    }
+    return getCapacity() - getAllocation();
+  }
+
+  public long getCapacity() {
+    return fragmentLimit;
+  }
+
+  public long getAllocation() {
+    return remainder.getUsed();
+  }
+
+  public long getPeakMemoryAllocation() {
+    return peakMemoryAllocation;
+  }
+
+  public boolean reserve(long size) {
+    boolean status = remainder.get(size, this.applyFragmentLimit);
+    peakMemoryAllocation = Math.max(peakMemoryAllocation, getAllocation());
+    return status;
+  }
+
+  public boolean forceAdditionalReservation(long size) {
+    if (size > 0) {
+      boolean status = remainder.forceGet(size);
+      peakMemoryAllocation = Math.max(peakMemoryAllocation, getAllocation());
+      return status;
+    } else {
+      return true;
+    }
+  }
+
+  public void reserved(long expected, DrillBuf buf) {
+    // make sure to take away the additional memory that happened due to rounding.
+
+    long additional = buf.capacity() - expected;
+    if (additional > 0) {
+      remainder.forceGet(additional);
+    }
+
+    if (ENABLE_ACCOUNTING) {
+      buffers.put(buf, new DebugStackTrace(buf.capacity(), Thread.currentThread().getStackTrace()));
+    }
+
+    peakMemoryAllocation = Math.max(peakMemoryAllocation, getAllocation());
+  }
+
+
+  public void releasePartial(DrillBuf buf, long size) {
+    remainder.returnAllocation(size);
+    if (ENABLE_ACCOUNTING) {
+      if (buf != null) {
+        DebugStackTrace dst = buffers.get(buf);
+        if (dst == null) {
+          throw new IllegalStateException("Partially releasing a buffer that has already been released. Buffer: " + buf);
+        }
+        dst.size -= size;
+        if (dst.size < 0) {
+          throw new IllegalStateException("Partially releasing a buffer that has already been released. Buffer: " + buf);
+        }
+      }
+    }
+  }
+
+  void release(long size) {
+    remainder.returnAllocation(size);
+  }
+
+  public void release(DrillBuf buf, long size) {
+    remainder.returnAllocation(size);
+    if (ENABLE_ACCOUNTING) {
+      if (buf != null && buffers.remove(buf) == null) {
+        throw new IllegalStateException("Releasing a buffer that has already been released. Buffer: " + buf);
+      }
+    }
+  }
+
+  private void addLimitConsumer(LimitConsumer c) {
+    if (parent != null){
+      parent.addLimitConsumer(c);
+    }else {
+      if(logger.isTraceEnabled()) {
+        String fragStr = c == null ? "[Null Context]" : c.getIdentifier();
+        fragStr+=" (Object Id: "+System.identityHashCode(c)+")";
+        StackTraceElement[] ste = (new Throwable()).getStackTrace();
+        StringBuffer sb = new StringBuffer();
+        for (StackTraceElement s : ste) {
+          sb.append(s.toString());
+          sb.append("\n");
+        }
+
+        logger.trace("Fragment " + fragStr + " added to root accountor.\n"+sb.toString());
+      }
+      synchronized(this) {
+        limitConsumers.add(c);
+      }
+    }
+  }
+
+  private void removeLimitConsumer(LimitConsumer c) {
+    if (parent != null){
+      if (parent.parent==null){
+        // only fragment level allocators will have the fragment context saved
+        parent.removeLimitConsumer(c);
+      }
+    }else{
+      if(logger.isDebugEnabled()) {
+        String fragStr = c == null ? "[Null Context]" : c.getIdentifier();
+        fragStr += " (Object Id: " + System.identityHashCode(c) + ")";
+        logger.trace("Fragment " + fragStr + " removed from root accountor");
+      }
+      synchronized(this) {
+        limitConsumers.remove(c);
+      }
+    }
+  }
+
+  public long resetFragmentLimits(){
+    // returns the new capacity
+    if(!this.enableFragmentLimit){
+      return getCapacity();
+    }
+
+    if(parent!=null){
+      parent.resetFragmentLimits();
+    }else {
+      //Get remaining memory available per fragment and distribute it EQUALLY among all the fragments.
+      //Fragments get the memory limit added to the amount already allocated.
+      //This favours fragments that are already running which will get a limit greater than newly started fragments.
+      //If the already running fragments end quickly, their limits will be assigned back to the remaining fragments
+      //quickly. If they are long running, then we want to favour them with larger limits anyway.
+      synchronized (this) {
+        int nFragments = limitConsumers.size();
+        long allocatedMemory=0;
+        for (LimitConsumer fragment : limitConsumers) {
+          allocatedMemory += fragment.getAllocated();
+        }
+        if(logger.isTraceEnabled()) {
+          logger.trace("Resetting Fragment Memory Limit: total Available memory== "+total
+            +" Total Allocated Memory :"+allocatedMemory
+            +" Number of fragments: "+nFragments
+            + " fragmentMemOvercommitFactor: "+fragmentMemOvercommitFactor
+            + " Root fragment limit: "+this.fragmentLimit + "(Root obj: "+System.identityHashCode(this)+")"
+          );
+        }
+        if(nFragments>0) {
+          long rem = (total - allocatedMemory) / nFragments;
+          for (LimitConsumer fragment : limitConsumers) {
+            fragment.setLimit((long) (rem * fragmentMemOvercommitFactor));
+          }
+        }
+        if(logger.isTraceEnabled() && false){
+          StringBuffer sb= new StringBuffer();
+          sb.append("[root](0:0)");
+          sb.append("Allocated memory: ");
+          sb.append(this.getAllocation());
+          sb.append(" Fragment Limit: ");
+          sb.append(this.getFragmentLimit());
+          logger.trace(sb.toString());
+          for (LimitConsumer fragment : limitConsumers) {
+            sb= new StringBuffer();
+            sb.append('[');
+            sb.append(fragment.getIdentifier());
+            sb.append(']');
+            sb.append("Allocated memory: ");
+            sb.append(fragment.getAllocated());
+            sb.append(" Fragment Limit: ");
+            sb.append(fragment.getLimit());
+            logger.trace(sb.toString());
+          }
+          logger.trace("Resetting Complete");
+        }
+      }
+    }
+    return getCapacity();
+  }
+
+  public void close() {
+    // remove the fragment context and reset fragment limits whenever an allocator closes
+    if(parent!=null && parent.parent==null) {
+
+      logger.debug("Fragment " + limitConsumer.getIdentifier() + "  accountor being closed");
+      removeLimitConsumer(limitConsumer);
+    }
+    resetFragmentLimits();
+
+    if (ENABLE_ACCOUNTING && !buffers.isEmpty()) {
+      StringBuffer sb = new StringBuffer();
+      sb.append("Attempted to close accountor with ");
+      sb.append(buffers.size());
+      sb.append(" buffer(s) still allocated for ");
+      sb.append(limitConsumer.getIdentifier());
+      sb.append(".\n");
+
+      Multimap<DebugStackTrace, DebugStackTrace> multi = LinkedListMultimap.create();
+      for (DebugStackTrace t : buffers.values()) {
+        multi.put(t, t);
+      }
+
+      for (DebugStackTrace entry : multi.keySet()) {
+        Collection<DebugStackTrace> allocs = multi.get(entry);
+
+        sb.append("\n\n\tTotal ");
+        sb.append(allocs.size());
+        sb.append(" allocation(s) of byte size(s): ");
+        for (DebugStackTrace alloc : allocs) {
+          sb.append(alloc.size);
+          sb.append(", ");
+        }
+
+        sb.append("at stack location:\n");
+        entry.addToString(sb);
+      }
+      if (!buffers.isEmpty()) {
+        IllegalStateException e = new IllegalStateException(sb.toString());
+        if (errorOnLeak) {
+          throw e;
+        } else {
+          logger.warn("Memory leaked.", e);
+        }
+      }
+    }
+
+    remainder.close();
+
+  }
+
+  public void setFragmentLimit(long add) {
+    // We ADD the limit to the current allocation. If none has been allocated, this
+    // sets a new limit. If memory has already been allocated, the fragment gets its
+    // limit based on the allocation, though this might still result in reducing the
+    // limit.
+
+    if (parent != null && parent.parent==null) { // This is a fragment level accountor
+      this.fragmentLimit=getAllocation()+add;
+      this.remainder.setLimit(this.fragmentLimit);
+      logger.trace("Fragment " + limitConsumer.getIdentifier() + " memory limit set to " + this.fragmentLimit);
+    }
+  }
+
+  public long getFragmentLimit(){
+    return this.fragmentLimit;
+  }
+
+  public class DebugStackTrace {
+
+    private StackTraceElement[] elements;
+    private long size;
+
+    public DebugStackTrace(long size, StackTraceElement[] elements) {
+      super();
+      this.elements = elements;
+      this.size = size;
+    }
+
+    public void addToString(StringBuffer sb) {
+      for (int i = 3; i < elements.length; i++) {
+        sb.append("\t\t");
+        sb.append(elements[i]);
+        sb.append("\n");
+      }
+    }
+
+    @Override
+    public int hashCode() {
+      final int prime = 31;
+      int result = 1;
+      result = prime * result + Arrays.hashCode(elements);
+//      result = prime * result + (int) (size ^ (size >>> 32));
+      return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj) {
+        return true;
+      }
+      if (obj == null) {
+        return false;
+      }
+      if (getClass() != obj.getClass()) {
+        return false;
+      }
+      DebugStackTrace other = (DebugStackTrace) obj;
+      if (!Arrays.equals(elements, other.elements)) {
+        return false;
+      }
+      // weird equal where size doesn't matter for multimap purposes.
+//      if (size != other.size)
+//        return false;
+      return true;
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryException.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryException.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryException.java
deleted file mode 100644
index 063f1c1..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryException.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.memory;
-
-import org.apache.drill.exec.exception.FragmentSetupException;
-
-public class OutOfMemoryException extends FragmentSetupException{
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OutOfMemoryException.class);
-
-  public OutOfMemoryException() {
-    super();
-
-  }
-
-  public OutOfMemoryException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
-    super(message, cause, enableSuppression, writableStackTrace);
-
-  }
-
-  public OutOfMemoryException(String message, Throwable cause) {
-    super(message, cause);
-
-  }
-
-  public OutOfMemoryException(String message) {
-    super(message);
-
-  }
-
-  public OutOfMemoryException(Throwable cause) {
-    super(cause);
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java
index 54d7d70..40f8e2b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/TopLevelAllocator.java
@@ -29,8 +29,7 @@ import java.util.Map.Entry;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.ops.FragmentContext;
-import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.testing.ControlsInjector;
 import org.apache.drill.exec.testing.ControlsInjectorFactory;
 import org.apache.drill.exec.util.AssertionUtil;
@@ -46,7 +45,7 @@ public class TopLevelAllocator implements BufferAllocator {
   private static final boolean ENABLE_ACCOUNTING = AssertionUtil.isAssertionsEnabled();
   private final Map<ChildAllocator, StackTraceElement[]> childrenMap;
   private final PooledByteBufAllocatorL innerAllocator = PooledByteBufAllocatorL.DEFAULT;
-  private final Accountor acct;
+  private final AccountorImpl acct;
   private final boolean errorOnLeak;
   private final DrillBuf empty;
   private final DrillConfig config;
@@ -55,7 +54,7 @@ public class TopLevelAllocator implements BufferAllocator {
     MAXIMUM_DIRECT_MEMORY = maximumAllocation;
     this.config=(config!=null) ? config : DrillConfig.create();
     this.errorOnLeak = errorOnLeak;
-    this.acct = new Accountor(config, errorOnLeak, null, null, maximumAllocation, 0, true);
+    this.acct = new AccountorImpl(config, errorOnLeak, null, null, maximumAllocation, 0, true);
     this.empty = DrillBuf.getEmpty(this, acct);
     this.childrenMap = ENABLE_ACCOUNTING ? new IdentityHashMap<ChildAllocator, StackTraceElement[]>() : null;
   }
@@ -83,7 +82,7 @@ public class TopLevelAllocator implements BufferAllocator {
       return empty;
     }
     if(!acct.reserve(min)) {
-      throw new OutOfMemoryRuntimeException(createErrorMsg(this, min));
+      throw new OutOfMemoryException(createErrorMsg(this, min));
     }
 
     try {
@@ -94,7 +93,7 @@ public class TopLevelAllocator implements BufferAllocator {
     } catch (OutOfMemoryError e) {
       if ("Direct buffer memory".equals(e.getMessage())) {
         acct.release(min);
-        throw new OutOfMemoryRuntimeException(createErrorMsg(this, min), e);
+        throw new OutOfMemoryException(createErrorMsg(this, min), e);
       } else {
         throw e;
       }
@@ -122,18 +121,20 @@ public class TopLevelAllocator implements BufferAllocator {
   }
 
   @Override
-  public BufferAllocator getChildAllocator(FragmentContext context, long initialReservation, long maximumReservation,
+  public BufferAllocator getChildAllocator(LimitConsumer limitConsumer, long initialReservation,
+      long maximumReservation,
       boolean applyFragmentLimit) {
     if(!acct.reserve(initialReservation)){
       logger.debug(String.format("You attempted to create a new child allocator with initial reservation %d but only %d bytes of memory were available.", initialReservation, acct.getCapacity() - acct.getAllocation()));
-      throw new OutOfMemoryRuntimeException(
+      throw new OutOfMemoryException(
           String
               .format(
                   "You attempted to create a new child allocator with initial reservation %d but only %d bytes of memory were available.",
                   initialReservation, acct.getCapacity() - acct.getAllocation()));
     }
     logger.debug("New child allocator with initial reservation {}", initialReservation);
-    ChildAllocator allocator = new ChildAllocator(context, acct, maximumReservation, initialReservation, childrenMap, applyFragmentLimit);
+    ChildAllocator allocator = new ChildAllocator(limitConsumer, acct, maximumReservation, initialReservation,
+        childrenMap, applyFragmentLimit);
     if(ENABLE_ACCOUNTING){
       childrenMap.put(allocator, Thread.currentThread().getStackTrace());
     }
@@ -142,17 +143,17 @@ public class TopLevelAllocator implements BufferAllocator {
   }
 
   @Override
-  public void resetFragmentLimits() {
+  public void resetLimits() {
     acct.resetFragmentLimits();
   }
 
   @Override
-  public void setFragmentLimit(long limit){
+  public void setLimit(long limit){
     acct.setFragmentLimit(limit);
   }
 
   @Override
-  public long getFragmentLimit(){
+  public long getLimit(){
     return acct.getFragmentLimit();
   }
 
@@ -186,29 +187,26 @@ public class TopLevelAllocator implements BufferAllocator {
 
   private class ChildAllocator implements BufferAllocator {
     private final DrillBuf empty;
-    private Accountor childAcct;
+    private AccountorImpl childAcct;
     private Map<ChildAllocator, StackTraceElement[]> children = new HashMap<>();
     private boolean closed = false;
-    private FragmentHandle handle;
-    private FragmentContext fragmentContext;
+    private LimitConsumer limitConsumer;
     private Map<ChildAllocator, StackTraceElement[]> thisMap;
     private boolean applyFragmentLimit;
 
-    public ChildAllocator(FragmentContext context,
-                          Accountor parentAccountor,
+    public ChildAllocator(LimitConsumer limitConsumer,
+        AccountorImpl parentAccountor,
                           long max,
                           long pre,
                           Map<ChildAllocator,
                           StackTraceElement[]> map,
         boolean applyFragmentLimit) {
       assert max >= pre;
-      this.applyFragmentLimit=applyFragmentLimit;
-      DrillConfig drillConf = context != null ? context.getConfig() : null;
-      childAcct = new Accountor(drillConf, errorOnLeak, context, parentAccountor, max, pre, applyFragmentLimit);
-      this.fragmentContext=context;
-      this.handle = context != null ? context.getHandle() : null;
+      this.applyFragmentLimit = applyFragmentLimit;
+      this.limitConsumer = limitConsumer;
+      childAcct = new AccountorImpl(config, errorOnLeak, limitConsumer, parentAccountor, max, pre, applyFragmentLimit);
       thisMap = map;
-      this.empty = DrillBuf.getEmpty(this, childAcct);
+      empty = DrillBuf.getEmpty(this, childAcct);
     }
 
     @Override
@@ -226,15 +224,11 @@ public class TopLevelAllocator implements BufferAllocator {
 
     @Override
     public DrillBuf buffer(int size, int max) {
-      if (ENABLE_ACCOUNTING) {
-        injector.injectUnchecked(fragmentContext, CHILD_BUFFER_INJECTION_SITE);
-      }
-
       if (size == 0) {
         return empty;
       }
       if(!childAcct.reserve(size)) {
-        throw new OutOfMemoryRuntimeException(createErrorMsg(this, size));
+        throw new OutOfMemoryException(createErrorMsg(this, size));
       }
 
       try {
@@ -245,7 +239,7 @@ public class TopLevelAllocator implements BufferAllocator {
       } catch (OutOfMemoryError e) {
         if ("Direct buffer memory".equals(e.getMessage())) {
           childAcct.release(size);
-          throw new OutOfMemoryRuntimeException(createErrorMsg(this, size), e);
+          throw new OutOfMemoryException(createErrorMsg(this, size), e);
         } else {
           throw e;
         }
@@ -262,17 +256,19 @@ public class TopLevelAllocator implements BufferAllocator {
     }
 
     @Override
-    public BufferAllocator getChildAllocator(FragmentContext context, long initialReservation, long maximumReservation,
+    public BufferAllocator getChildAllocator(LimitConsumer limitConsumer, long initialReservation,
+        long maximumReservation,
         boolean applyFragmentLimit) {
       if (!childAcct.reserve(initialReservation)) {
-        throw new OutOfMemoryRuntimeException(
+        throw new OutOfMemoryException(
             String
                 .format(
                     "You attempted to create a new child allocator with initial reservation %d but only %d bytes of memory were available.",
                     initialReservation, childAcct.getAvailable()));
       }
       logger.debug("New child allocator with initial reservation {}", initialReservation);
-      ChildAllocator newChildAllocator = new ChildAllocator(context, childAcct, maximumReservation, initialReservation, null, applyFragmentLimit);
+      ChildAllocator newChildAllocator = new ChildAllocator(limitConsumer, childAcct, maximumReservation,
+          initialReservation, null, applyFragmentLimit);
       this.children.put(newChildAllocator, Thread.currentThread().getStackTrace());
       return newChildAllocator;
     }
@@ -282,17 +278,17 @@ public class TopLevelAllocator implements BufferAllocator {
     }
 
     @Override
-    public void resetFragmentLimits(){
+    public void resetLimits(){
       childAcct.resetFragmentLimits();
     }
 
     @Override
-    public void setFragmentLimit(long limit){
+    public void setLimit(long limit){
       childAcct.setFragmentLimit(limit);
     }
 
     @Override
-    public long getFragmentLimit(){
+    public long getLimit(){
       return childAcct.getFragmentLimit();
     }
 
@@ -314,8 +310,8 @@ public class TopLevelAllocator implements BufferAllocator {
 
 
             IllegalStateException e = new IllegalStateException(String.format(
-                    "Failure while trying to close child allocator: Child level allocators not closed. Fragment %d:%d. Stack trace: \n %s",
-                    handle.getMajorFragmentId(), handle.getMinorFragmentId(), sb.toString()));
+                        "Failure while trying to close child allocator: Child level allocators not closed. Identifier: %s. Stack trace: \n %s",
+                        limitConsumer.getIdentifier(), sb.toString()));
             if (errorOnLeak) {
               throw e;
             } else {

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManager.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManager.java
deleted file mode 100644
index e0f1eab..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManager.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*******************************************************************************
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package org.apache.drill.exec.ops;
-
-import com.carrotsearch.hppc.LongObjectOpenHashMap;
-import io.netty.buffer.DrillBuf;
-import org.apache.drill.exec.memory.BufferAllocator;
-
-/**
- * Manages a list of {@link DrillBuf}s that can be reallocated as needed. Upon
- * re-allocation the old buffer will be freed. Managing a list of these buffers
- * prevents some parts of the system from needing to define a correct location
- * to place the final call to free them.
- *
- * The current uses of these types of buffers are within the pluggable components of Drill.
- * In UDFs, memory management should not be a concern. We provide access to re-allocatable
- * DrillBufs to give UDF writers general purpose buffers we can account for. To prevent the need
- * for UDFs to contain boilerplate to close all of the buffers they request, this list
- * is tracked at a higher level and all of the buffers are freed once we are sure that
- * the code depending on them is done executing (currently {@link FragmentContext}
- * and {@link QueryContext}.
- */
-public class BufferManager implements AutoCloseable {
-  private LongObjectOpenHashMap<DrillBuf> managedBuffers = new LongObjectOpenHashMap<>();
-  private final BufferAllocator allocator;
-
-  // fragment context associated with this buffer manager, if the buffer
-  // manager is owned by another type, this can be null
-  private final FragmentContext fragmentContext;
-
-  public BufferManager(BufferAllocator allocator, FragmentContext fragmentContext) {
-    this.allocator = allocator;
-    this.fragmentContext = fragmentContext;
-  }
-
-  @Override
-  public void close() throws Exception {
-    final Object[] mbuffers = ((LongObjectOpenHashMap<Object>) (Object) managedBuffers).values;
-    for (int i = 0; i < mbuffers.length; i++) {
-      if (managedBuffers.allocated[i]) {
-        ((DrillBuf) mbuffers[i]).release(1);
-      }
-    }
-    managedBuffers.clear();
-  }
-
-  public DrillBuf replace(DrillBuf old, int newSize) {
-    if (managedBuffers.remove(old.memoryAddress()) == null) {
-      throw new IllegalStateException("Tried to remove unmanaged buffer.");
-    }
-    old.release(1);
-    return getManagedBuffer(newSize);
-  }
-
-  public DrillBuf getManagedBuffer() {
-    return getManagedBuffer(256);
-  }
-
-  public DrillBuf getManagedBuffer(int size) {
-    DrillBuf newBuf = allocator.buffer(size);
-    managedBuffers.put(newBuf.memoryAddress(), newBuf);
-    newBuf.setFragmentContext(fragmentContext);
-    newBuf.setBufferManager(this);
-    return newBuf;
-  }
-}

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManagerImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManagerImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManagerImpl.java
new file mode 100644
index 0000000..38f2736
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BufferManagerImpl.java
@@ -0,0 +1,46 @@
+package org.apache.drill.exec.ops;
+
+import io.netty.buffer.DrillBuf;
+
+import org.apache.drill.exec.memory.BufferAllocator;
+
+import com.carrotsearch.hppc.LongObjectOpenHashMap;
+
+public class BufferManagerImpl implements BufferManager {
+  private LongObjectOpenHashMap<DrillBuf> managedBuffers = new LongObjectOpenHashMap<>();
+  private final BufferAllocator allocator;
+
+  public BufferManagerImpl(BufferAllocator allocator) {
+    this.allocator = allocator;
+  }
+
+  @Override
+  public void close() {
+    final Object[] mbuffers = ((LongObjectOpenHashMap<Object>) (Object) managedBuffers).values;
+    for (int i = 0; i < mbuffers.length; i++) {
+      if (managedBuffers.allocated[i]) {
+        ((DrillBuf) mbuffers[i]).release(1);
+      }
+    }
+    managedBuffers.clear();
+  }
+
+  public DrillBuf replace(DrillBuf old, int newSize) {
+    if (managedBuffers.remove(old.memoryAddress()) == null) {
+      throw new IllegalStateException("Tried to remove unmanaged buffer.");
+    }
+    old.release(1);
+    return getManagedBuffer(newSize);
+  }
+
+  public DrillBuf getManagedBuffer() {
+    return getManagedBuffer(256);
+  }
+
+  public DrillBuf getManagedBuffer(int size) {
+    DrillBuf newBuf = allocator.buffer(size);
+    managedBuffers.put(newBuf.memoryAddress(), newBuf);
+    newBuf.setBufferManager(this);
+    return newBuf;
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
index c974652..a773c22 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
@@ -30,18 +30,19 @@ import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.memory.LimitConsumer;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.proto.BitControl.PlanFragment;
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
 import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
 import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
+import org.apache.drill.exec.proto.helper.QueryIdHelper;
 import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.exec.rpc.RpcOutcomeListener;
 import org.apache.drill.exec.rpc.control.ControlTunnel;
@@ -158,9 +159,10 @@ public class FragmentContext implements AutoCloseable, UdfUtilities {
     // Add the fragment context to the root allocator.
     // The QueryManager will call the root allocator to recalculate all the memory limits for all the fragments
     try {
-      allocator = context.getAllocator().getChildAllocator(this, fragment.getMemInitial(), fragment.getMemMax(), true);
+      allocator = context.getAllocator().getChildAllocator(new AsLimitConsumer(), fragment.getMemInitial(),
+          fragment.getMemMax(), true);
       Preconditions.checkNotNull(allocator, "Unable to acuqire allocator");
-    } catch(final OutOfMemoryException | OutOfMemoryRuntimeException e) {
+    } catch (final OutOfMemoryException e) {
       throw UserException.memoryError(e)
         .addContext("Fragment", getHandle().getMajorFragmentId() + ":" + getHandle().getMinorFragmentId())
         .build(logger);
@@ -169,7 +171,32 @@ public class FragmentContext implements AutoCloseable, UdfUtilities {
     }
 
     stats = new FragmentStats(allocator, dbContext.getMetrics(), fragment.getAssignment());
-    bufferManager = new BufferManager(this.allocator, this);
+    bufferManager = new BufferManagerImpl(this.allocator);
+  }
+
+  private class AsLimitConsumer implements LimitConsumer {
+    final String identifier = QueryIdHelper.getFragmentId(fragment.getHandle());
+
+    @Override
+    public String getIdentifier() {
+      return identifier;
+    }
+
+    @Override
+    public long getAllocated() {
+      return allocator.getAllocatedMemory();
+    }
+
+    @Override
+    public long getLimit() {
+      return allocator.getLimit();
+    }
+
+    @Override
+    public void setLimit(long limit) {
+      allocator.setLimit(limit);
+    }
+
   }
 
   /**
@@ -270,6 +297,9 @@ public class FragmentContext implements AutoCloseable, UdfUtilities {
     return frag;
   }
 
+  public LimitConsumer asLimitConsumer() {
+    return new AsLimitConsumer();
+  }
 
 
   /**
@@ -287,7 +317,8 @@ public class FragmentContext implements AutoCloseable, UdfUtilities {
   public BufferAllocator getNewChildAllocator(final long initialReservation,
                                               final long maximumReservation,
                                               final boolean applyFragmentLimit) throws OutOfMemoryException {
-    return allocator.getChildAllocator(this, initialReservation, maximumReservation, applyFragmentLimit);
+    return allocator.getChildAllocator(new AsLimitConsumer(), initialReservation, maximumReservation,
+        applyFragmentLimit);
   }
 
   public <T> T getImplementationClass(final ClassGenerator<T> cg)
@@ -365,7 +396,7 @@ public class FragmentContext implements AutoCloseable, UdfUtilities {
   }
 
   public void setFragmentLimit(final long limit) {
-    allocator.setFragmentLimit(limit);
+    allocator.setLimit(limit);
   }
 
   public ExecutionControls getExecutionControls() {

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java
index a78ee23..a3ec6bd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java
@@ -17,28 +17,27 @@
  */
 package org.apache.drill.exec.ops;
 
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
 import io.netty.buffer.DrillBuf;
 
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+
 import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
-
-import com.carrotsearch.hppc.LongObjectOpenHashMap;
-import org.apache.drill.exec.testing.ExecutionControls;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.testing.ExecutionControls;
 import org.apache.drill.exec.work.WorkManager;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 
-import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
 
 class OperatorContextImpl extends OperatorContext implements AutoCloseable {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OperatorContextImpl.class);
@@ -46,9 +45,9 @@ class OperatorContextImpl extends OperatorContext implements AutoCloseable {
   private final BufferAllocator allocator;
   private final ExecutionControls executionControls;
   private boolean closed = false;
-  private PhysicalOperator popConfig;
-  private OperatorStats stats;
-  private LongObjectOpenHashMap<DrillBuf> managedBuffers = new LongObjectOpenHashMap<>();
+  private final PhysicalOperator popConfig;
+  private final OperatorStats stats;
+  private final BufferManager manager;
   private final boolean applyFragmentLimit;
   private DrillFileSystem fs;
   private final ExecutorService executor;
@@ -65,6 +64,7 @@ class OperatorContextImpl extends OperatorContext implements AutoCloseable {
     this.applyFragmentLimit=applyFragmentLimit;
     this.allocator = context.getNewChildAllocator(popConfig.getInitialAllocation(), popConfig.getMaxAllocation(), applyFragmentLimit);
     this.popConfig = popConfig;
+    this.manager = new BufferManagerImpl(allocator);
 
     OpProfileDef def = new OpProfileDef(popConfig.getOperatorId(), popConfig.getOperatorType(), getChildCount(popConfig));
     stats = context.getStats().newOperatorStats(def, allocator);
@@ -76,28 +76,22 @@ class OperatorContextImpl extends OperatorContext implements AutoCloseable {
     this.applyFragmentLimit=applyFragmentLimit;
     this.allocator = context.getNewChildAllocator(popConfig.getInitialAllocation(), popConfig.getMaxAllocation(), applyFragmentLimit);
     this.popConfig = popConfig;
+    this.manager = new BufferManagerImpl(allocator);
     this.stats     = stats;
     executionControls = context.getExecutionControls();
     executor = context.getDrillbitContext().getExecutor();
   }
 
   public DrillBuf replace(DrillBuf old, int newSize) {
-    if (managedBuffers.remove(old.memoryAddress()) == null) {
-      throw new IllegalStateException("Tried to remove unmanaged buffer.");
-    }
-    old.release();
-    return getManagedBuffer(newSize);
+    return manager.replace(old, newSize);
   }
 
   public DrillBuf getManagedBuffer() {
-    return getManagedBuffer(256);
+    return manager.getManagedBuffer();
   }
 
   public DrillBuf getManagedBuffer(int size) {
-    DrillBuf newBuf = allocator.buffer(size);
-    managedBuffers.put(newBuf.memoryAddress(), newBuf);
-    newBuf.setOperatorContext(this);
-    return newBuf;
+    return manager.getManagedBuffer(size);
   }
 
   public ExecutionControls getExecutionControls() {
@@ -123,13 +117,7 @@ class OperatorContextImpl extends OperatorContext implements AutoCloseable {
     }
     logger.debug("Closing context for {}", popConfig != null ? popConfig.getClass().getName() : null);
 
-    // release managed buffers.
-    Object[] buffers = ((LongObjectOpenHashMap<Object>)(Object)managedBuffers).values;
-    for (int i =0; i < buffers.length; i++) {
-      if (managedBuffers.allocated[i]) {
-        ((DrillBuf)buffers[i]).release();
-      }
-    }
+    manager.close();
 
     if (allocator != null) {
       allocator.close();

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
index b56c16c..3bcd111 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
@@ -17,23 +17,22 @@
  */
 package org.apache.drill.exec.ops;
 
+import io.netty.buffer.DrillBuf;
+
 import java.io.IOException;
 import java.util.Collection;
 import java.util.List;
 
-import com.google.common.collect.Lists;
-import io.netty.buffer.DrillBuf;
-import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.jdbc.SimpleCalciteSchema;
-
+import org.apache.calcite.schema.SchemaPlus;
 import org.apache.drill.common.AutoCloseables;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.LogicalPlanPersistence;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
-import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.planner.sql.DrillOperatorTable;
 import org.apache.drill.exec.proto.BitControl.QueryContextInformation;
@@ -51,6 +50,8 @@ import org.apache.drill.exec.testing.ExecutionControls;
 import org.apache.drill.exec.util.ImpersonationUtil;
 import org.apache.drill.exec.util.Utilities;
 
+import com.google.common.collect.Lists;
+
 // TODO except for a couple of tests, this is only created by Foreman
 // TODO the many methods that just return drillbitContext.getXxx() should be replaced with getDrillbitContext()
 // TODO - consider re-name to PlanningContext, as the query execution context actually appears
@@ -98,7 +99,7 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext {
       throw new DrillRuntimeException("Error creating off-heap allocator for planning context.",e);
     }
     // TODO(DRILL-1942) the new allocator has this capability built-in, so this can be removed once that is available
-    bufferManager = new BufferManager(this.allocator, null);
+    bufferManager = new BufferManagerImpl(this.allocator);
     viewExpansionContext = new ViewExpansionContext(this);
     schemaTreesToClose = Lists.newArrayList();
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/BaseRootExec.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/BaseRootExec.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/BaseRootExec.java
index 9301650..ee6475c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/BaseRootExec.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/BaseRootExec.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.impl;
 import java.util.List;
 
 import org.apache.drill.common.DeferredException;
-import org.apache.drill.exec.memory.OutOfMemoryException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OpProfileDef;
 import org.apache.drill.exec.ops.OperatorContext;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/MergingReceiverCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/MergingReceiverCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/MergingReceiverCreator.java
index 1bf6c01..59e7448 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/MergingReceiverCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/MergingReceiverCreator.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.impl;
 import java.util.List;
 
 import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.exec.memory.OutOfMemoryException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.MergingReceiverPOP;
 import org.apache.drill.exec.physical.impl.mergereceiver.MergingRecordBatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index dbb5e00..6c763d6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -30,10 +30,9 @@ import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.TypeHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
@@ -184,7 +183,7 @@ public class ScanBatch implements CloseableRecordBatch {
         injector.injectChecked(context.getExecutionControls(), "next-allocate", OutOfMemoryException.class);
 
         currentReader.allocate(fieldVectorMap);
-      } catch (OutOfMemoryException | OutOfMemoryRuntimeException e) {
+      } catch (OutOfMemoryException e) {
         logger.debug("Caught Out of Memory Exception", e);
         clearFieldVectorMap();
         return IterOutcome.OUT_OF_MEMORY;
@@ -260,7 +259,7 @@ public class ScanBatch implements CloseableRecordBatch {
       } else {
         return IterOutcome.OK;
       }
-    } catch (OutOfMemoryRuntimeException ex) {
+    } catch (OutOfMemoryException ex) {
       context.fail(UserException.memoryError(ex).build(logger));
       return IterOutcome.STOP;
     } catch (Exception ex) {

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScreenCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScreenCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScreenCreator.java
index a46ea35..3b90979 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScreenCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScreenCreator.java
@@ -20,8 +20,8 @@ package org.apache.drill.exec.physical.impl;
 import java.util.List;
 
 import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.AccountingUserConnection;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.MetricDef;
@@ -35,6 +35,7 @@ import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.RecordBatch.IterOutcome;
 
 import com.google.common.base.Preconditions;
+
 import org.apache.drill.exec.testing.ControlsInjector;
 import org.apache.drill.exec.testing.ControlsInjectorFactory;
 
@@ -81,7 +82,7 @@ public class ScreenCreator implements RootCreator<Screen> {
       logger.trace("Screen Outcome {}", outcome);
       switch (outcome) {
       case OUT_OF_MEMORY:
-        throw new OutOfMemoryRuntimeException();
+        throw new OutOfMemoryException();
       case STOP:
         return false;
       case NONE:

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java
index 248c8da..73f3f95 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/SingleSenderCreator.java
@@ -20,8 +20,8 @@ package org.apache.drill.exec.physical.impl;
 import java.util.List;
 
 import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.AccountingDataTunnel;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.MetricDef;
@@ -98,7 +98,7 @@ public class SingleSenderCreator implements RootCreator<SingleSender>{
 //      logger.debug("Outcome of sender next {}", out);
       switch (out) {
       case OUT_OF_MEMORY:
-        throw new OutOfMemoryRuntimeException();
+        throw new OutOfMemoryException();
       case STOP:
       case NONE:
         // if we didn't do anything yet, send an empty schema.

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
index aca7549..54d2839 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
@@ -30,6 +30,7 @@ import org.apache.drill.common.logical.data.Order.Ordering;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
@@ -37,7 +38,6 @@ import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.TopN;
 import org.apache.drill.exec.physical.impl.sort.RecordBatchData;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
index 34c514e..7da8a16 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
-import org.apache.drill.exec.memory.OutOfMemoryException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.base.Writer;
 import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
index 89122c8..ee9a0ab 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
@@ -30,6 +30,7 @@ import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.compile.sig.GeneratorMapping;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
@@ -39,7 +40,6 @@ import org.apache.drill.exec.expr.HoldingContainerExpression;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.expr.ValueVectorWriteExpression;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.StreamingAggregate;
 import org.apache.drill.exec.physical.impl.aggregate.StreamingAggregator.AggOutcome;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/broadcastsender/BroadcastSenderRootExec.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/broadcastsender/BroadcastSenderRootExec.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/broadcastsender/BroadcastSenderRootExec.java
index 98db0cd..c731e38 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/broadcastsender/BroadcastSenderRootExec.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/broadcastsender/BroadcastSenderRootExec.java
@@ -19,8 +19,8 @@ package org.apache.drill.exec.physical.impl.broadcastsender;
 
 import java.util.List;
 
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.AccountingDataTunnel;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.MetricDef;
@@ -97,7 +97,7 @@ public class BroadcastSenderRootExec extends BaseRootExec {
     logger.debug("Outcome of sender next {}", out);
     switch(out){
       case OUT_OF_MEMORY:
-        throw new OutOfMemoryRuntimeException();
+        throw new OutOfMemoryException();
       case STOP:
       case NONE:
         for (int i = 0; i < tunnels.length; ++i) {

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
index 17ac7b1..e435e79 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
@@ -26,11 +26,11 @@ import org.apache.drill.common.expression.ExpressionStringBuilder;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.Filter;
 import org.apache.drill.exec.record.AbstractSingleRecordBatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterTemplate2.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterTemplate2.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterTemplate2.java
index 11d01d7..d3f9eda 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterTemplate2.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterTemplate2.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.impl.filter;
 import javax.inject.Named;
 
 import org.apache.drill.exec.exception.SchemaChangeException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.RecordBatch;
@@ -65,7 +65,7 @@ public abstract class FilterTemplate2 implements Filterer{
       return;
     }
     if (! outgoingSelectionVector.allocateNewSafe(recordCount)) {
-      throw new OutOfMemoryRuntimeException("Unable to allocate filter batch");
+      throw new OutOfMemoryException("Unable to allocate filter batch");
     }
     switch(svMode){
     case NONE:

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
index 3a8b735..c3b3f45 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
@@ -29,6 +29,7 @@ import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.logical.data.NamedExpression;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
@@ -38,7 +39,6 @@ import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.expr.ValueVectorReadExpression;
 import org.apache.drill.exec.expr.ValueVectorWriteExpression;
 import org.apache.drill.exec.expr.fn.DrillComplexWriterFuncHolder;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.FlattenPOP;
 import org.apache.drill.exec.record.AbstractSingleRecordBatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
index 21ebd9a..2d0bd43 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
@@ -31,10 +31,10 @@ import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.compile.sig.GeneratorMapping;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.MetricDef;
 import org.apache.drill.exec.physical.config.HashJoinPOP;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
index 96113e9..edafbfc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
@@ -34,13 +34,13 @@ import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.MergeJoinPOP;
 import org.apache.drill.exec.physical.impl.join.JoinUtils.JoinComparator;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/NestedLoopJoinBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/NestedLoopJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/NestedLoopJoinBatch.java
index 8f2dad3..b390b8f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/NestedLoopJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/NestedLoopJoinBatch.java
@@ -25,10 +25,10 @@ import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.compile.sig.GeneratorMapping;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.NestedLoopJoinPOP;
 import org.apache.drill.exec.physical.impl.sort.RecordBatchData;
@@ -40,6 +40,7 @@ import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.vector.AllocationHelper;
+
 import com.google.common.base.Preconditions;
 import com.sun.codemodel.JExpr;
 import com.sun.codemodel.JExpression;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/limit/LimitRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/limit/LimitRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/limit/LimitRecordBatch.java
index 06dd699..176ee17 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/limit/LimitRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/limit/LimitRecordBatch.java
@@ -19,8 +19,8 @@ package org.apache.drill.exec.physical.impl.limit;
 
 import java.util.List;
 
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.Limit;
 import org.apache.drill.exec.record.AbstractSingleRecordBatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
index cd94274..1cc598a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
@@ -35,13 +35,13 @@ import org.apache.drill.common.logical.data.Order.Ordering;
 import org.apache.drill.exec.compile.sig.GeneratorMapping;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.MetricDef;
 import org.apache.drill.exec.physical.MinorFragmentEndpoint;
@@ -79,7 +79,6 @@ import org.apache.drill.exec.vector.ValueVector;
 import org.apache.calcite.rel.RelFieldCollation.Direction;
 
 import com.google.common.base.Preconditions;
-
 import com.google.common.collect.Lists;
 import com.sun.codemodel.JConditional;
 import com.sun.codemodel.JExpr;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
index 629a3e2..c9483ae 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
@@ -40,6 +40,7 @@ import org.apache.drill.exec.cache.DistributedMap;
 import org.apache.drill.exec.cache.DistributedMultiMap;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
@@ -49,7 +50,6 @@ import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.expr.ValueVectorReadExpression;
 import org.apache.drill.exec.expr.ValueVectorWriteExpression;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.OrderedPartitionSender;
 import org.apache.drill.exec.physical.impl.sort.SortBatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
index 6e49e78..5e9e84c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
@@ -27,12 +27,12 @@ import org.apache.drill.common.expression.ErrorCollectorImpl;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.AccountingDataTunnel;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.MetricDef;
@@ -169,7 +169,7 @@ public class PartitionSenderRootExec extends BaseRootExec {
         return false;
 
       case OUT_OF_MEMORY:
-        throw new OutOfMemoryRuntimeException();
+        throw new OutOfMemoryException();
 
       case STOP:
         if (partitioner != null) {

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
index 98ee320..e3033b4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
@@ -22,9 +22,9 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.LinkedBlockingDeque;
 
 import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.expr.TypeHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.ProducerConsumer;
 import org.apache.drill.exec.physical.impl.sort.RecordBatchData;
@@ -81,7 +81,7 @@ public class ProducerConsumerBatch extends AbstractRecordBatch {
     } else if (wrapper.failed) {
       return IterOutcome.STOP;
     } else if (wrapper.outOfMemory) {
-      throw new OutOfMemoryRuntimeException();
+      throw new OutOfMemoryException();
     }
 
     recordCount = wrapper.batch.getRecordCount();
@@ -149,7 +149,7 @@ public class ProducerConsumerBatch extends AbstractRecordBatch {
               throw new UnsupportedOperationException();
           }
         }
-      } catch (final OutOfMemoryRuntimeException e) {
+      } catch (final OutOfMemoryException e) {
         try {
           queue.putFirst(RecordBatchDataWrapper.outOfMemory());
         } catch (final InterruptedException ex) {

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index ce7c5ec..1ca5d1d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -40,6 +40,7 @@ import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
@@ -51,7 +52,6 @@ import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.expr.ValueVectorReadExpression;
 import org.apache.drill.exec.expr.ValueVectorWriteExpression;
 import org.apache.drill.exec.expr.fn.DrillComplexWriterFuncHolder;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.Project;
 import org.apache.drill.exec.planner.StarColumnHelper;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java
index cebefa5..fb17669 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortBatch.java
@@ -26,13 +26,13 @@ import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.logical.data.Order.Ordering;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.Sort;
 import org.apache.drill.exec.record.AbstractRecordBatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
index cb04244..fa6001b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
@@ -21,10 +21,10 @@ import java.io.IOException;
 import java.util.List;
 
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.SelectionVectorRemover;
 import org.apache.drill.exec.record.AbstractSingleRecordBatch;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
index 357269d..51d74cc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
@@ -30,14 +30,14 @@ import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.ValueVectorReadExpression;
 import org.apache.drill.exec.expr.ValueVectorWriteExpression;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.UnionAll;
 import org.apache.drill.exec.record.AbstractRecordBatch;
@@ -146,7 +146,7 @@ public class UnionAllRecordBatch extends AbstractRecordBatch<UnionAll> {
     for (ValueVector v : allocationVectors) {
       try {
         AllocationHelper.allocateNew(v, current.getRecordCount());
-      } catch (OutOfMemoryRuntimeException ex) {
+      } catch (OutOfMemoryException ex) {
         return false;
       }
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unorderedreceiver/UnorderedReceiverBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unorderedreceiver/UnorderedReceiverBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unorderedreceiver/UnorderedReceiverBatch.java
index fafa14e..00cf295 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unorderedreceiver/UnorderedReceiverBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unorderedreceiver/UnorderedReceiverBatch.java
@@ -23,8 +23,8 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.MetricDef;
 import org.apache.drill.exec.ops.OperatorContext;

http://git-wip-us.apache.org/repos/asf/drill/blob/4524fdbe/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFrameRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFrameRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFrameRecordBatch.java
index da13669..2261734 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFrameRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFrameRecordBatch.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.List;
 
 import com.google.common.collect.Iterables;
+
 import org.apache.drill.common.exceptions.DrillException;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.FunctionCall;
@@ -30,12 +31,12 @@ import org.apache.drill.common.logical.data.Order;
 import org.apache.drill.exec.compile.sig.GeneratorMapping;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
-import org.apache.drill.exec.memory.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.WindowPOP;
 import org.apache.drill.exec.record.AbstractRecordBatch;


Mime
View raw message