drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From s..@apache.org
Subject [3/3] drill git commit: DRILL-4382: Remove dependency on drill-logical from vector package
Date Tue, 16 Feb 2016 21:10:34 GMT
DRILL-4382: Remove dependency on drill-logical from vector package


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/9a3a5c4f
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/9a3a5c4f
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/9a3a5c4f

Branch: refs/heads/master
Commit: 9a3a5c4ff670a50a49f61f97dd838da59a12f976
Parents: 0a2518d
Author: Steven Phillips <steven@dremio.com>
Authored: Thu Feb 4 17:43:17 2016 -0800
Committer: Steven Phillips <steven@dremio.com>
Committed: Tue Feb 16 11:58:48 2016 -0800

----------------------------------------------------------------------
 .../exec/store/hbase/HBaseRecordReader.java     |   4 +-
 .../drill/exec/store/hive/HiveRecordReader.java |   4 +-
 .../org/apache/drill/exec/client/DumpCat.java   |   2 +-
 .../drill/exec/physical/impl/ScanBatch.java     |   8 +-
 .../impl/TopN/PriorityQueueTemplate.java        |   3 +-
 .../exec/physical/impl/WriterRecordBatch.java   |   4 +-
 .../physical/impl/aggregate/HashAggBatch.java   |   4 +-
 .../impl/aggregate/HashAggTemplate.java         |   2 +-
 .../impl/aggregate/StreamingAggBatch.java       |   4 +-
 .../physical/impl/common/ChainedHashTable.java  |   2 +-
 .../physical/impl/common/HashTableTemplate.java |   2 +-
 .../impl/flatten/FlattenRecordBatch.java        |   9 +-
 .../OrderedPartitionRecordBatch.java            |   4 +-
 .../impl/producer/ProducerConsumerBatch.java    |   5 +-
 .../impl/project/ProjectRecordBatch.java        |  26 +-
 .../impl/union/UnionAllRecordBatch.java         |  14 +-
 .../physical/impl/window/WindowFunction.java    |  14 +-
 .../physical/impl/xsort/ExternalSortBatch.java  |   3 +-
 .../logical/partition/PruneScanRule.java        |   2 +-
 .../drill/exec/record/RecordBatchLoader.java    |   6 +-
 .../apache/drill/exec/record/SchemaUtil.java    |   8 +-
 .../apache/drill/exec/record/TypedFieldId.java  | 273 +++++++++++++++++++
 .../drill/exec/record/VectorContainer.java      |   4 +-
 .../org/apache/drill/exec/schema/Field.java     |  10 -
 .../drill/exec/server/rest/QueryWrapper.java    |   2 +-
 .../drill/exec/store/AbstractRecordReader.java  |   4 +-
 .../apache/drill/exec/store/RecordReader.java   |   4 +-
 .../drill/exec/store/avro/AvroRecordReader.java |  24 +-
 .../sequencefile/SequenceFileRecordReader.java  |   8 +-
 .../compliant/CompliantTextRecordReader.java    |   6 +-
 .../text/compliant/RepeatedVarCharOutput.java   |   3 +-
 .../drill/exec/store/mock/MockRecordReader.java |   5 +-
 .../columnreaders/ParquetRecordReader.java      |  14 +-
 .../exec/store/parquet2/DrillParquetReader.java |   5 +-
 .../drill/exec/store/pojo/PojoRecordReader.java |   4 +-
 .../exec/store/text/DrillTextRecordReader.java  |   2 +-
 .../apache/drill/exec/util/BatchPrinter.java    |   6 +-
 .../org/apache/drill/exec/util/VectorUtil.java  |   8 +-
 .../drill/exec/vector/complex/FieldIdUtil.java  | 223 +++++++++++++++
 .../exec/vector/complex/fn/FieldSelection.java  |   2 +-
 .../java/org/apache/drill/DrillTestWrapper.java |  20 +-
 .../java/org/apache/drill/PlanTestBase.java     |   2 +-
 .../drill/exec/cache/TestWriteToDisk.java       |   8 +-
 .../exec/physical/impl/TestOptiqPlans.java      |  10 +-
 .../physical/impl/TestSimpleFragmentRun.java    |   4 +-
 .../exec/physical/impl/join/TestMergeJoin.java  |   8 +-
 .../impl/join/TestMergeJoinAdvanced.java        |   1 +
 .../impl/mergereceiver/TestMergingReceiver.java |   2 +-
 .../record/ExpressionTreeMaterializerTest.java  |   2 +-
 .../exec/record/TestMaterializedField.java      |   2 +-
 .../drill/exec/record/vector/TestLoad.java      |  10 +-
 .../exec/record/vector/TestValueVector.java     |   2 +-
 .../apache/drill/exec/server/TestBitRpc.java    |   2 +-
 .../store/parquet/ParquetResultListener.java    |  16 +-
 .../store/parquet/TestParquetPhysicalPlan.java  |   2 +-
 .../jdbc/impl/DrillColumnMetaDataList.java      |   2 +-
 .../drill/jdbc/DrillColumnMetaDataListTest.java |   4 +-
 exec/vector/pom.xml                             |   5 -
 .../src/main/codegen/includes/vv_imports.ftl    |   1 -
 .../main/codegen/templates/BasicTypeHelper.java |   6 +-
 .../codegen/templates/FixedValueVectors.java    |   4 +-
 .../codegen/templates/NullableValueVectors.java |   2 +-
 .../codegen/templates/RepeatedValueVectors.java |   2 +-
 .../src/main/codegen/templates/UnionVector.java |  23 +-
 .../templates/VariableLengthVectors.java        |   2 +-
 .../drill/exec/record/MaterializedField.java    | 199 ++++----------
 .../apache/drill/exec/record/TypedFieldId.java  | 273 -------------------
 .../drill/exec/vector/BaseValueVector.java      |   5 +-
 .../org/apache/drill/exec/vector/BitVector.java |   5 +-
 .../apache/drill/exec/vector/ObjectVector.java  |   3 +-
 .../apache/drill/exec/vector/ValueVector.java   |   3 +-
 .../drill/exec/vector/VectorDescriptor.java     |   3 +-
 .../apache/drill/exec/vector/ZeroVector.java    |   3 +-
 .../vector/complex/AbstractContainerVector.java |   8 +-
 .../exec/vector/complex/AbstractMapVector.java  |   2 +-
 .../drill/exec/vector/complex/FieldIdUtil.java  | 187 -------------
 .../drill/exec/vector/complex/ListVector.java   |  10 +-
 .../drill/exec/vector/complex/MapVector.java    |   8 +-
 .../exec/vector/complex/RepeatedListVector.java |  13 +-
 .../exec/vector/complex/RepeatedMapVector.java  |  32 +--
 .../vector/complex/impl/PromotableWriter.java   |   3 +-
 81 files changed, 750 insertions(+), 886 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
index d093522..0af0009 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
@@ -145,7 +145,7 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
       // when creating reader (order of first appearance in query).
       for (SchemaPath column : getColumns()) {
         if (column.equals(ROW_KEY_PATH)) {
-          MaterializedField field = MaterializedField.create(column, ROW_KEY_TYPE);
+          MaterializedField field = MaterializedField.create(column.getAsNamePart().getName(), ROW_KEY_TYPE);
           rowKeyVector = outputMutator.addField(field, VarBinaryVector.class);
         } else {
           getOrCreateFamilyVector(column.getRootSegment().getPath(), false);
@@ -246,7 +246,7 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
       MapVector v = familyVectorMap.get(familyName);
       if(v == null) {
         SchemaPath column = SchemaPath.getSimplePath(familyName);
-        MaterializedField field = MaterializedField.create(column, COLUMN_FAMILY_TYPE);
+        MaterializedField field = MaterializedField.create(column.getAsNamePart().getName(), COLUMN_FAMILY_TYPE);
         v = outputMutator.addField(field, MapVector.class);
         if (allocateOnCreate) {
           v.allocateNew();

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
index 6fdca8f..79ca65f 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
@@ -270,14 +270,14 @@ public class HiveRecordReader extends AbstractRecordReader {
       final OptionManager options = fragmentContext.getOptions();
       for (int i = 0; i < selectedColumnNames.size(); i++) {
         MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedColumnTypes.get(i), options);
-        MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedColumnNames.get(i)), type);
+        MaterializedField field = MaterializedField.create(selectedColumnNames.get(i), type);
         Class vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
         vectors.add(output.addField(field, vvClass));
       }
 
       for (int i = 0; i < selectedPartitionNames.size(); i++) {
         MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i), options);
-        MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedPartitionNames.get(i)), type);
+        MaterializedField field = MaterializedField.create(selectedPartitionNames.get(i), type);
         Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
         pVectors.add(output.addField(field, vvClass));
       }

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
index 54a61c8..cdbd3b4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
@@ -253,7 +253,7 @@ public class DumpCat {
       for (final VectorWrapper w : vectorContainer) {
         final MaterializedField field = w.getValueVector().getField();
         System.out.println (String.format("name : %s, minor_type : %s, data_mode : %s",
-                                          field.toExpr(),
+                                          field.getPath(),
                                           field.getType().getMinorType().toString(),
                                           field.isNullable() ? "nullable":"non-nullable"
                           ));

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index 73bebd1..c1cd469 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -70,7 +70,7 @@ public class ScanBatch implements CloseableRecordBatch {
   private final VectorContainer container = new VectorContainer();
 
   /** Fields' value vectors indexed by fields' keys. */
-  private final Map<MaterializedField.Key, ValueVector> fieldVectorMap =
+  private final Map<String, ValueVector> fieldVectorMap =
       Maps.newHashMap();
 
   private int recordCount;
@@ -281,7 +281,7 @@ public class ScanBatch implements CloseableRecordBatch {
       partitionVectors = Lists.newArrayList();
       for (int i : selectedPartitionColumns) {
         final MaterializedField field =
-            MaterializedField.create(SchemaPath.getSimplePath(partitionColumnDesignator + i),
+            MaterializedField.create(SchemaPath.getSimplePath(partitionColumnDesignator + i).getAsUnescapedPath(),
                                      Types.optional(MinorType.VARCHAR));
         final ValueVector v = mutator.addField(field, NullableVarCharVector.class);
         partitionVectors.add(v);
@@ -341,7 +341,7 @@ public class ScanBatch implements CloseableRecordBatch {
     public <T extends ValueVector> T addField(MaterializedField field,
                                               Class<T> clazz) throws SchemaChangeException {
       // Check if the field exists.
-      ValueVector v = fieldVectorMap.get(field.key());
+      ValueVector v = fieldVectorMap.get(field.getPath());
       if (v == null || v.getClass() != clazz) {
         // Field does not exist--add it to the map and the output container.
         v = TypeHelper.getNewVector(field, oContext.getAllocator(), callBack);
@@ -353,7 +353,7 @@ public class ScanBatch implements CloseableRecordBatch {
                   clazz.getSimpleName(), v.getClass().getSimpleName()));
         }
 
-        final ValueVector old = fieldVectorMap.put(field.key(), v);
+        final ValueVector old = fieldVectorMap.put(field.getPath(), v);
         if (old != null) {
           old.clear();
           container.remove(old);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
index 7f77ec5..149da25 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
@@ -23,6 +23,7 @@ import java.util.concurrent.TimeUnit;
 
 import javax.inject.Named;
 
+import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -66,7 +67,7 @@ public abstract class PriorityQueueTemplate implements PriorityQueue {
     BatchSchema schema = container.getSchema();
     VectorContainer newContainer = new VectorContainer();
     for (MaterializedField field : schema) {
-      int[] ids = container.getValueVectorId(field.getPath()).getFieldIds();
+      int[] ids = container.getValueVectorId(SchemaPath.getSimplePath(field.getPath())).getFieldIds();
       newContainer.add(container.getValueAccessorById(field.getValueClass(), ids).getValueVectors());
     }
     newContainer.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
index 7da8a16..e6c946c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WriterRecordBatch.java
@@ -157,9 +157,9 @@ public class WriterRecordBatch extends AbstractRecordBatch<Writer> {
       //   1. Fragment unique id.
       //   2. Summary: currently contains number of records written.
       final MaterializedField fragmentIdField =
-          MaterializedField.create(SchemaPath.getSimplePath("Fragment"), Types.required(MinorType.VARCHAR));
+          MaterializedField.create("Fragment", Types.required(MinorType.VARCHAR));
       final MaterializedField summaryField =
-          MaterializedField.create(SchemaPath.getSimplePath("Number of records written"),
+          MaterializedField.create("Number of records written",
               Types.required(MinorType.BIGINT));
 
       container.addOrGet(fragmentIdField);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
index ffcb491..3595ecf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
@@ -200,7 +200,7 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
         continue;
       }
 
-      final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
+      final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsNamePart().getName(), expr.getMajorType());
       ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
 
       // add this group-by vector to the output container
@@ -224,7 +224,7 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
         continue;
       }
 
-      final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
+      final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsNamePart().getName(), expr.getMajorType());
       ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
       aggrOutFieldIds[i] = container.add(vv);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
index 8af1508..9ff874f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
@@ -263,7 +263,7 @@ public abstract class HashAggTemplate implements HashAggregator {
       FieldReference ref =
           new FieldReference("dummy", ExpressionPosition.UNKNOWN, valueFieldIds.get(0).getIntermediateType());
       for (TypedFieldId id : valueFieldIds) {
-        materializedValueFields[i++] = MaterializedField.create(ref, id.getIntermediateType());
+        materializedValueFields[i++] = MaterializedField.create(ref.getAsNamePart().getName(), id.getIntermediateType());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
index c084e39..19232f8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
@@ -273,7 +273,7 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
         continue;
       }
       keyExprs[i] = expr;
-      final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
+      final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
       final ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
       keyOutputIds[i] = container.add(vector);
     }
@@ -288,7 +288,7 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
         continue;
       }
 
-      final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
+      final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
       ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
       TypedFieldId id = container.add(vector);
       valueExprs[i] = new ValueVectorWriteExpression(id, expr, true);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
index 08ccaa1..bd34d76 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
@@ -195,7 +195,7 @@ public class ChainedHashTable {
      */
     for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
       LogicalExpression expr = keyExprsBuild[i];
-      final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
+      final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
       ValueVector vv = TypeHelper.getNewVector(outputField, allocator);
       htKeyFieldIds[i] = htContainerOrig.add(vv);
       i++;

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
index e0876af..7d9f568 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
@@ -467,7 +467,7 @@ public abstract class HashTableTemplate implements HashTable {
 
     threshold = (int) Math.ceil(tableSize * loadf);
 
-    dummyIntField = MaterializedField.create(SchemaPath.getSimplePath("dummy"), Types.required(MinorType.INT));
+    dummyIntField = MaterializedField.create("dummy", Types.required(MinorType.INT));
 
     startIndices = allocMetadataVector(tableSize, EMPTY_SLOT);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
index f1d6dfc..d8211fd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
@@ -25,6 +25,7 @@ import org.apache.drill.common.expression.ErrorCollector;
 import org.apache.drill.common.expression.ErrorCollectorImpl;
 import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.data.NamedExpression;
 import org.apache.drill.exec.exception.ClassTransformationException;
 import org.apache.drill.exec.exception.OutOfMemoryException;
@@ -264,12 +265,12 @@ public class FlattenRecordBatch extends AbstractSingleRecordBatch<FlattenPOP> {
 
     TransferPair tp = null;
     if (flattenField instanceof RepeatedMapVector) {
-      tp = ((RepeatedMapVector)flattenField).getTransferPairToSingleMap(reference, oContext.getAllocator());
+      tp = ((RepeatedMapVector)flattenField).getTransferPairToSingleMap(reference.getAsNamePart().getName(), oContext.getAllocator());
     } else {
       final ValueVector vvIn = RepeatedValueVector.class.cast(flattenField).getDataVector();
       // vvIn may be null because of fast schema return for repeated list vectors
       if (vvIn != null) {
-        tp = vvIn.getTransferPair(reference, oContext.getAllocator());
+        tp = vvIn.getTransferPair(reference.getAsNamePart().getName(), oContext.getAllocator());
       }
     }
     return tp;
@@ -360,10 +361,10 @@ public class FlattenRecordBatch extends AbstractSingleRecordBatch<FlattenPOP> {
 
     List<NamedExpression> exprs = Lists.newArrayList();
     for (MaterializedField field : incoming.getSchema()) {
-      if (field.getPath().equals(popConfig.getColumn())) {
+      if (field.getPath().equals(popConfig.getColumn().getAsUnescapedPath())) {
         continue;
       }
-      exprs.add(new NamedExpression(field.getPath(), new FieldReference(field.getPath())));
+      exprs.add(new NamedExpression(SchemaPath.getSimplePath(field.getPath()), new FieldReference(field.getPath())));
     }
     return exprs;
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
index 64cfad0..897870c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionRecordBatch.java
@@ -150,7 +150,7 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
     this.minorFragmentSampleCount = cache.getCounter(mapKey);
 
     SchemaPath outputPath = popConfig.getRef();
-    MaterializedField outputField = MaterializedField.create(outputPath, Types.required(TypeProtos.MinorType.INT));
+    MaterializedField outputField = MaterializedField.create(outputPath.getAsNamePart().getName(), Types.required(TypeProtos.MinorType.INT));
     this.partitionKeyVector = (IntVector) TypeHelper.getNewVector(outputField, oContext.getAllocator());
 
   }
@@ -430,7 +430,7 @@ public class OrderedPartitionRecordBatch extends AbstractRecordBatch<OrderedPart
       TypeProtos.MajorType.Builder builder = TypeProtos.MajorType.newBuilder().mergeFrom(expr.getMajorType())
           .clearMode().setMode(TypeProtos.DataMode.REQUIRED);
       TypeProtos.MajorType newType = builder.build();
-      MaterializedField outputField = MaterializedField.create(schemaPath, newType);
+      MaterializedField outputField = MaterializedField.create(schemaPath.getAsUnescapedPath(), newType);
       if (collector.hasErrors()) {
         throw new SchemaChangeException(String.format(
             "Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
index 38d08b6..85844c0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
@@ -21,6 +21,7 @@ import java.util.concurrent.BlockingDeque;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.LinkedBlockingDeque;
 
+import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.expr.TypeHelper;
@@ -99,9 +100,9 @@ public class ProducerConsumerBatch extends AbstractRecordBatch {
         final MaterializedField field = schema.getColumn(i);
         final MajorType type = field.getType();
         final ValueVector vOut = container.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
-                container.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
+                container.getValueVectorId(SchemaPath.getSimplePath(field.getPath())).getFieldIds()).getValueVector();
         final ValueVector vIn = newContainer.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
-                newContainer.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
+                newContainer.getValueVectorId(SchemaPath.getSimplePath(field.getPath())).getFieldIds()).getValueVector();
         final TransferPair tp = vIn.makeTransferPair(vOut);
         tp.transfer();
       }

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index 56bfa79..5ba7b5a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -325,7 +325,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
                 continue;
               }
               final FieldReference ref = new FieldReference(name);
-              final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref, vvIn.getField().getType()), callBack);
+              final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref.getAsNamePart().getName(), vvIn.getField().getType()), callBack);
               final TransferPair tp = vvIn.makeTransferPair(vvOut);
               transfers.add(tp);
             }
@@ -333,7 +333,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
             int k = 0;
             for (final VectorWrapper<?> wrapper : incoming) {
               final ValueVector vvIn = wrapper.getValueVector();
-              final SchemaPath originalPath = vvIn.getField().getPath();
+              final SchemaPath originalPath = SchemaPath.getSimplePath(vvIn.getField().getPath());
               if (k > result.outputNames.size()-1) {
                 assert false;
               }
@@ -350,7 +350,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
               final MaterializedField outputField = MaterializedField.create(name, expr.getMajorType());
               final ValueVector vv = container.addOrGet(outputField, callBack);
               allocationVectors.add(vv);
-              final TypedFieldId fid = container.getValueVectorId(outputField.getPath());
+              final TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
               final ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
               final HoldingContainer hc = cg.addExpr(write, false);
             }
@@ -400,7 +400,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
         Preconditions.checkNotNull(incoming);
 
         final FieldReference ref = getRef(namedExpression);
-        final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref, vectorRead.getMajorType()), callBack);
+        final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref.getAsUnescapedPath(), vectorRead.getMajorType()), callBack);
         final TransferPair tp = vvIn.makeTransferPair(vvOut);
         transfers.add(tp);
         transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]);
@@ -421,7 +421,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
         // need to do evaluation.
         final ValueVector vector = container.addOrGet(outputField, callBack);
         allocationVectors.add(vector);
-        final TypedFieldId fid = container.getValueVectorId(outputField.getPath());
+        final TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
         final boolean useSetSafe = !(vector instanceof FixedWidthVector);
         final ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe);
         final HoldingContainer hc = cg.addExpr(write, false);
@@ -461,7 +461,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
     final List<NamedExpression> exprs = Lists.newArrayList();
     for (final MaterializedField field : incoming.getSchema()) {
       if (Types.isComplex(field.getType()) || Types.isRepeated(field.getType())) {
-        final LogicalExpression convertToJson = FunctionCallFactory.createConvert(ConvertExpression.CONVERT_TO, "JSON", field.getPath(), ExpressionPosition.UNKNOWN);
+        final LogicalExpression convertToJson = FunctionCallFactory.createConvert(ConvertExpression.CONVERT_TO, "JSON", SchemaPath.getSimplePath(field.getPath()), ExpressionPosition.UNKNOWN);
         final String castFuncName = CastFunctions.getCastFunc(MinorType.VARCHAR);
         final List<LogicalExpression> castArgs = Lists.newArrayList();
         castArgs.add(convertToJson);  //input_expr
@@ -474,7 +474,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
         final FunctionCall castCall = new FunctionCall(castFuncName, castArgs, ExpressionPosition.UNKNOWN);
         exprs.add(new NamedExpression(castCall, new FieldReference(field.getPath())));
       } else {
-        exprs.add(new NamedExpression(field.getPath(), new FieldReference(field.getPath())));
+        exprs.add(new NamedExpression(SchemaPath.getSimplePath(field.getPath()), new FieldReference(field.getPath())));
       }
     }
     return exprs;
@@ -590,7 +590,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
       result.outputNames = Lists.newArrayList();
       for(final VectorWrapper<?> wrapper : incoming) {
         final ValueVector vvIn = wrapper.getValueVector();
-        final String name = vvIn.getField().getPath().getRootSegment().getPath();
+        final String name = vvIn.getField().getPath();
 
         // add the prefix to the incoming column name
         final String newName = prefix + StarColumnHelper.PREFIX_DELIMITER + name;
@@ -610,7 +610,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
 
         for (final VectorWrapper<?> wrapper : incoming) {
           final ValueVector vvIn = wrapper.getValueVector();
-          final String incomingName = vvIn.getField().getPath().getRootSegment().getPath();
+          final String incomingName = vvIn.getField().getPath();
           // get the prefix of the name
           final String[] nameComponents = incomingName.split(StarColumnHelper.PREFIX_DELIMITER, 2);
           // if incoming valuevector does not have a prefix, ignore it since this expression is not referencing it
@@ -633,7 +633,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
         if (exprContainsStar) {
           for (final VectorWrapper<?> wrapper : incoming) {
             final ValueVector vvIn = wrapper.getValueVector();
-            final String incomingName = vvIn.getField().getPath().getRootSegment().getPath();
+            final String incomingName = vvIn.getField().getPath();
             if (refContainsStar) {
               addToResultMaps(incomingName, result, true); // allow dups since this is likely top-level project
             } else {
@@ -656,7 +656,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
       result.outputNames = Lists.newArrayList();
       for (final VectorWrapper<?> wrapper : incoming) {
         final ValueVector vvIn = wrapper.getValueVector();
-        final String incomingName = vvIn.getField().getPath().getRootSegment().getPath();
+        final String incomingName = vvIn.getField().getPath();
         addToResultMaps(incomingName, result, true); // allow dups since this is likely top-level project
       }
     }
@@ -677,7 +677,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
 
       for (final VectorWrapper<?> wrapper : incoming) {
         final ValueVector vvIn = wrapper.getValueVector();
-        final String name = vvIn.getField().getPath().getRootSegment().getPath();
+        final String name = vvIn.getField().getPath();
         final String[] components = name.split(StarColumnHelper.PREFIX_DELIMITER, 2);
         if (components.length <= 1)  {
           k++;
@@ -714,7 +714,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
       result.outputNames = Lists.newArrayList();
       for (final VectorWrapper<?> wrapper : incoming) {
         final ValueVector vvIn = wrapper.getValueVector();
-        final String incomingName = vvIn.getField().getPath().getRootSegment().getPath();
+        final String incomingName = vvIn.getField().getPath();
         if (expr.getPath().equalsIgnoreCase(incomingName)) {  // case insensitive matching of field name.
           final String newName = ref.getPath();
           addToResultMaps(newName, result, true);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
index 55b9afe..a8b8aeb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/union/UnionAllRecordBatch.java
@@ -167,9 +167,9 @@ public class UnionAllRecordBatch extends AbstractRecordBatch<UnionAll> {
     for(VectorWrapper<?> vw : current) {
       ValueVector vvIn = vw.getValueVector();
       // get the original input column names
-      SchemaPath inputPath = vvIn.getField().getPath();
+      SchemaPath inputPath = SchemaPath.getSimplePath(vvIn.getField().getPath());
       // get the renamed column names
-      SchemaPath outputPath = outputFields.get(index).getPath();
+      SchemaPath outputPath = SchemaPath.getSimplePath(outputFields.get(index).getPath());
 
       final ErrorCollector collector = new ErrorCollectorImpl();
       // According to input data names, Minortypes, Datamodes, choose to
@@ -185,7 +185,7 @@ public class UnionAllRecordBatch extends AbstractRecordBatch<UnionAll> {
           }
 
           ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
-          ValueVector vvOut = container.addOrGet(MaterializedField.create(outputPath, vectorRead.getMajorType()));
+          ValueVector vvOut = container.addOrGet(MaterializedField.create(outputPath.getAsUnescapedPath(), vectorRead.getMajorType()));
           TransferPair tp = vvIn.makeTransferPair(vvOut);
           transfers.add(tp);
         // Copy data in order to rename the column
@@ -195,10 +195,10 @@ public class UnionAllRecordBatch extends AbstractRecordBatch<UnionAll> {
             throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
           }
 
-          MaterializedField outputField = MaterializedField.create(outputPath, expr.getMajorType());
+          MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), expr.getMajorType());
           ValueVector vv = container.addOrGet(outputField, callBack);
           allocationVectors.add(vv);
-          TypedFieldId fid = container.getValueVectorId(outputField.getPath());
+          TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
           ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
           cg.addExpr(write);
         }
@@ -228,10 +228,10 @@ public class UnionAllRecordBatch extends AbstractRecordBatch<UnionAll> {
           }
         }
 
-        final MaterializedField outputField = MaterializedField.create(outputPath, expr.getMajorType());
+        final MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), expr.getMajorType());
         ValueVector vector = container.addOrGet(outputField, callBack);
         allocationVectors.add(vector);
-        TypedFieldId fid = container.getValueVectorId(outputField.getPath());
+        TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
 
         boolean useSetSafe = !(vector instanceof FixedWidthVector);
         ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFunction.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFunction.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFunction.java
index 1c71297..a983df5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFunction.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/window/WindowFunction.java
@@ -136,7 +136,7 @@ public abstract class WindowFunction {
       }
 
       // add corresponding ValueVector to container
-      final MaterializedField output = MaterializedField.create(ne.getRef(), aggregate.getMajorType());
+      final MaterializedField output = MaterializedField.create(ne.getRef().getAsNamePart().getName(), aggregate.getMajorType());
       batch.addOrGet(output).allocateNew();
       TypedFieldId outputId = batch.getValueVectorId(ne.getRef());
       writeAggregationToOutput = new ValueVectorWriteExpression(outputId, aggregate, true);
@@ -205,7 +205,7 @@ public abstract class WindowFunction {
     @Override
     boolean materialize(final NamedExpression ne, final VectorContainer batch, FunctionLookupContext registry)
         throws SchemaChangeException {
-      final MaterializedField outputField = MaterializedField.create(ne.getRef(), getMajorType());
+      final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsNamePart().getName(), getMajorType());
       batch.addOrGet(outputField).allocateNew();
       fieldId = batch.getValueVectorId(ne.getRef());
       return true;
@@ -256,7 +256,7 @@ public abstract class WindowFunction {
         throws SchemaChangeException {
       final FunctionCall call = (FunctionCall) ne.getExpr();
       final LogicalExpression argument = call.args.get(0);
-      final MaterializedField outputField = MaterializedField.create(ne.getRef(), argument.getMajorType());
+      final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsNamePart().getName(), argument.getMajorType());
       batch.addOrGet(outputField).allocateNew();
       fieldId = batch.getValueVectorId(ne.getRef());
 
@@ -310,7 +310,7 @@ public abstract class WindowFunction {
       }
 
       // add corresponding ValueVector to container
-      final MaterializedField output = MaterializedField.create(ne.getRef(), majorType);
+      final MaterializedField output = MaterializedField.create(ne.getRef().getAsNamePart().getName(), majorType);
       batch.addOrGet(output).allocateNew();
       final TypedFieldId outputId =  batch.getValueVectorId(ne.getRef());
 
@@ -358,7 +358,7 @@ public abstract class WindowFunction {
       }
 
       // add lag output ValueVector to container
-      final MaterializedField output = MaterializedField.create(ne.getRef(), majorType);
+      final MaterializedField output = MaterializedField.create(ne.getRef().getAsNamePart().getName(), majorType);
       batch.addOrGet(output).allocateNew();
       final TypedFieldId outputId = batch.getValueVectorId(ne.getRef());
 
@@ -422,7 +422,7 @@ public abstract class WindowFunction {
         return false;
       }
 
-      final MaterializedField output = MaterializedField.create(ne.getRef(), input.getMajorType());
+      final MaterializedField output = MaterializedField.create(ne.getRef().getAsNamePart().getName(), input.getMajorType());
       batch.addOrGet(output).allocateNew();
       final TypedFieldId outputId = batch.getValueVectorId(ne.getRef());
 
@@ -483,7 +483,7 @@ public abstract class WindowFunction {
         return false;
       }
 
-      final MaterializedField output = MaterializedField.create(ne.getRef(), input.getMajorType());
+      final MaterializedField output = MaterializedField.create(ne.getRef().getAsNamePart().getName(), input.getMajorType());
       batch.addOrGet(output).allocateNew();
       final TypedFieldId outputId = batch.getValueVectorId(ne.getRef());
 

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
index b2aa305..7797339 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
@@ -31,6 +31,7 @@ import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.ErrorCollector;
 import org.apache.drill.common.expression.ErrorCollectorImpl;
 import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.data.Order.Ordering;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.compile.sig.GeneratorMapping;
@@ -628,7 +629,7 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
       for (BatchGroup group : batchGroupList) {
         vectors[i++] = group.getValueAccessorById(
             field.getValueClass(),
-            group.getValueVectorId(field.getPath()).getFieldIds())
+            group.getValueVectorId(SchemaPath.getSimplePath(field.getPath())).getFieldIds())
             .getValueVector();
       }
       cont.add(vectors);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
index 5ef35d3..6a365e8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
@@ -235,7 +235,7 @@ public abstract class PruneScanRule extends StoragePluginOptimizerRule {
         for (int partitionColumnIndex : BitSets.toIter(partitionColumnBitSet)) {
           SchemaPath column = SchemaPath.getSimplePath(fieldNameMap.get(partitionColumnIndex));
           MajorType type = descriptor.getVectorType(column, settings);
-          MaterializedField field = MaterializedField.create(column, type);
+          MaterializedField field = MaterializedField.create(column.getAsUnescapedPath(), type);
           ValueVector v = TypeHelper.getNewVector(field, allocator);
           v.allocateNew();
           vectors[partitionColumnIndex] = v;

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
index 13164b9..84c616b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
@@ -83,10 +83,10 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
     // the schema has changed since the previous call.
 
     // Set up to recognize previous fields that no longer exist.
-    final Map<MaterializedField, ValueVector> oldFields = Maps.newHashMap();
+    final Map<String, ValueVector> oldFields = Maps.newHashMap();
     for(final VectorWrapper<?> wrapper : container) {
       final ValueVector vector = wrapper.getValueVector();
-      oldFields.put(vector.getField(), vector);
+      oldFields.put(vector.getField().getPath(), vector);
     }
 
     final VectorContainer newVectors = new VectorContainer();
@@ -95,7 +95,7 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
       int bufOffset = 0;
       for(final SerializedField field : fields) {
         final MaterializedField fieldDef = MaterializedField.create(field);
-        ValueVector vector = oldFields.remove(fieldDef);
+        ValueVector vector = oldFields.remove(fieldDef.getPath());
 
         if (vector == null) {
           // Field did not exist previously--is schema change.

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
index e13e742..d6a8a40 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
@@ -54,7 +54,7 @@ public class SchemaUtil {
 
     for (BatchSchema s : schemas) {
       for (MaterializedField field : s) {
-        SchemaPath path = field.getPath();
+        SchemaPath path = SchemaPath.getSimplePath(field.getPath());
         Set<MinorType> currentTypes = typeSetMap.get(path);
         if (currentTypes == null) {
           currentTypes = Sets.newHashSet();
@@ -83,10 +83,10 @@ public class SchemaUtil {
         for (MinorType t : types) {
           builder.addSubType(t);
         }
-        MaterializedField field = MaterializedField.create(path, builder.build());
+        MaterializedField field = MaterializedField.create(path.getAsUnescapedPath(), builder.build());
         fields.add(field);
       } else {
-        MaterializedField field = MaterializedField.create(path, Types.optional(types.iterator().next()));
+        MaterializedField field = MaterializedField.create(path.getAsUnescapedPath(), Types.optional(types.iterator().next()));
         fields.add(field);
       }
     }
@@ -153,7 +153,7 @@ public class SchemaUtil {
   public static VectorContainer coerceContainer(VectorAccessible in, BatchSchema toSchema, OperatorContext context) {
     int recordCount = in.getRecordCount();
     boolean isHyper = false;
-    Map<SchemaPath, Object> vectorMap = Maps.newHashMap();
+    Map<String, Object> vectorMap = Maps.newHashMap();
     for (VectorWrapper w : in) {
       if (w.isHyper()) {
         isHyper = true;

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
new file mode 100644
index 0000000..a322f72
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
@@ -0,0 +1,273 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.record;
+
+import java.util.Arrays;
+
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.expr.BasicTypeHelper;
+import org.apache.drill.exec.vector.ValueVector;
+
+import com.carrotsearch.hppc.IntArrayList;
+import com.google.common.base.Preconditions;
+
+public class TypedFieldId {
+  final MajorType finalType;
+  final MajorType secondaryFinal;
+  final MajorType intermediateType;
+  final int[] fieldIds;
+  final boolean isHyperReader;
+  final boolean isListVector;
+  final PathSegment remainder;
+
+  public TypedFieldId(MajorType type, int... fieldIds) {
+    this(type, type, type, false, null, fieldIds);
+  }
+
+  public TypedFieldId(MajorType type, IntArrayList breadCrumb, PathSegment remainder) {
+    this(type, type, type, false, remainder, breadCrumb.toArray());
+  }
+
+  public TypedFieldId(MajorType type, boolean isHyper, int... fieldIds) {
+    this(type, type, type, isHyper, null, fieldIds);
+  }
+
+  public TypedFieldId(MajorType intermediateType, MajorType secondaryFinal, MajorType finalType, boolean isHyper, PathSegment remainder, int... fieldIds) {
+    this(intermediateType, secondaryFinal, finalType, isHyper, false, remainder, fieldIds);
+  }
+
+  public TypedFieldId(MajorType intermediateType, MajorType secondaryFinal, MajorType finalType, boolean isHyper, boolean isListVector, PathSegment remainder, int... fieldIds) {
+    super();
+    this.intermediateType = intermediateType;
+    this.finalType = finalType;
+    this.secondaryFinal = secondaryFinal;
+    this.fieldIds = fieldIds;
+    this.isHyperReader = isHyper;
+    this.isListVector = isListVector;
+    this.remainder = remainder;
+  }
+
+  public TypedFieldId cloneWithChild(int id) {
+    int[] fieldIds = ArrayUtils.add(this.fieldIds, id);
+    return new TypedFieldId(intermediateType, secondaryFinal, finalType, isHyperReader, remainder, fieldIds);
+  }
+
+  public PathSegment getLastSegment() {
+    if (remainder == null) {
+      return null;
+    }
+    PathSegment seg = remainder;
+    while (seg.getChild() != null) {
+      seg = seg.getChild();
+    }
+    return seg;
+  }
+
+  public TypedFieldId cloneWithRemainder(PathSegment remainder) {
+    return new TypedFieldId(intermediateType, secondaryFinal, finalType, isHyperReader, remainder, fieldIds);
+  }
+
+  public boolean hasRemainder() {
+    return remainder != null;
+  }
+
+  public PathSegment getRemainder() {
+    return remainder;
+  }
+
+  public boolean isHyperReader() {
+    return isHyperReader;
+  }
+
+  public boolean isListVector() {
+    return isListVector;
+  }
+
+  public MajorType getIntermediateType() {
+    return intermediateType;
+  }
+
+  public Class<? extends ValueVector> getIntermediateClass() {
+    return (Class<? extends ValueVector>) BasicTypeHelper.getValueVectorClass(intermediateType.getMinorType(),
+        intermediateType.getMode());
+  }
+
+  public MajorType getFinalType() {
+    return finalType;
+  }
+
+  public int[] getFieldIds() {
+    return fieldIds;
+  }
+
+  public MajorType getSecondaryFinal() {
+    return secondaryFinal;
+  }
+
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  public static class Builder{
+    final IntArrayList ids = new IntArrayList();
+    MajorType finalType;
+    MajorType intermediateType;
+    MajorType secondaryFinal;
+    PathSegment remainder;
+    boolean hyperReader = false;
+    boolean withIndex = false;
+    boolean isListVector = false;
+
+    public Builder addId(int id) {
+      ids.add(id);
+      return this;
+    }
+
+    public Builder withIndex() {
+      withIndex = true;
+      return this;
+    }
+
+    public Builder remainder(PathSegment remainder) {
+      this.remainder = remainder;
+      return this;
+    }
+
+    public Builder hyper() {
+      this.hyperReader = true;
+      return this;
+    }
+
+    public Builder listVector() {
+      this.isListVector = true;
+      return this;
+    }
+
+    public Builder finalType(MajorType finalType) {
+      this.finalType = finalType;
+      return this;
+    }
+
+    public Builder secondaryFinal(MajorType secondaryFinal) {
+      this.secondaryFinal = secondaryFinal;
+      return this;
+    }
+
+    public Builder intermediateType(MajorType intermediateType) {
+      this.intermediateType = intermediateType;
+      return this;
+    }
+
+    public TypedFieldId build() {
+      Preconditions.checkNotNull(intermediateType);
+      Preconditions.checkNotNull(finalType);
+
+      if (intermediateType == null) {
+        intermediateType = finalType;
+      }
+      if (secondaryFinal == null) {
+        secondaryFinal = finalType;
+      }
+
+      MajorType actualFinalType = finalType;
+      //MajorType secondaryFinal = finalType;
+
+      // if this has an index, switch to required type for output
+      //if(withIndex && intermediateType == finalType) actualFinalType = finalType.toBuilder().setMode(DataMode.REQUIRED).build();
+
+      // if this isn't a direct access, switch the final type to nullable as offsets may be null.
+      // TODO: there is a bug here with some things.
+      //if(intermediateType != finalType) actualFinalType = finalType.toBuilder().setMode(DataMode.OPTIONAL).build();
+
+      return new TypedFieldId(intermediateType, secondaryFinal, actualFinalType, hyperReader, isListVector, remainder, ids.toArray());
+    }
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + Arrays.hashCode(fieldIds);
+    result = prime * result + ((finalType == null) ? 0 : finalType.hashCode());
+    result = prime * result + ((intermediateType == null) ? 0 : intermediateType.hashCode());
+    result = prime * result + (isHyperReader ? 1231 : 1237);
+    result = prime * result + ((remainder == null) ? 0 : remainder.hashCode());
+    result = prime * result + ((secondaryFinal == null) ? 0 : secondaryFinal.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    TypedFieldId other = (TypedFieldId) obj;
+    if (!Arrays.equals(fieldIds, other.fieldIds)) {
+      return false;
+    }
+    if (finalType == null) {
+      if (other.finalType != null) {
+        return false;
+      }
+    } else if (!finalType.equals(other.finalType)) {
+      return false;
+    }
+    if (intermediateType == null) {
+      if (other.intermediateType != null) {
+        return false;
+      }
+    } else if (!intermediateType.equals(other.intermediateType)) {
+      return false;
+    }
+    if (isHyperReader != other.isHyperReader) {
+      return false;
+    }
+    if (remainder == null) {
+      if (other.remainder != null) {
+        return false;
+      }
+    } else if (!remainder.equals(other.remainder)) {
+      return false;
+    }
+    if (secondaryFinal == null) {
+      if (other.secondaryFinal != null) {
+        return false;
+      }
+    } else if (!secondaryFinal.equals(other.secondaryFinal)) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    final int maxLen = 10;
+    return "TypedFieldId [fieldIds="
+        + (fieldIds != null ? Arrays.toString(Arrays.copyOf(fieldIds, Math.min(fieldIds.length, maxLen))) : null)
+        + ", remainder=" + remainder + "]";
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index 33351ba..663cf22 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -116,7 +116,7 @@ public class VectorContainer implements Iterable<VectorWrapper<?>>, VectorAccess
   }
 
   public <T extends ValueVector> T addOrGet(final MaterializedField field, final SchemaChangeCallBack callBack) {
-    final TypedFieldId id = getValueVectorId(field.getPath());
+    final TypedFieldId id = getValueVectorId(SchemaPath.getSimplePath(field.getPath()));
     final ValueVector vector;
     final Class<?> clazz = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getType().getMode());
     if (id != null) {
@@ -183,7 +183,7 @@ public class VectorContainer implements Iterable<VectorWrapper<?>>, VectorAccess
     // Sort list of VectorWrapper alphabetically based on SchemaPath.
     Collections.sort(canonicalWrappers, new Comparator<VectorWrapper<?>>() {
       public int compare(VectorWrapper<?> v1, VectorWrapper<?> v2) {
-        return v1.getField().getPath().toExpr().compareTo(v2.getField().getPath().toExpr());
+        return v1.getField().getPath().compareTo(v2.getField().getPath());
       }
     });
 

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java b/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java
index 35732ac..fd5698a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/schema/Field.java
@@ -38,16 +38,6 @@ public abstract class Field {
     this.parentSchema = parentSchema;
   }
 
-  public MaterializedField getAsMaterializedField(FieldReference ref) {
-    assert (ref!=null);
-
-    return MaterializedField.create(ref.getChild(getFieldName()), fieldType);
-  }
-
-  public MaterializedField getAsMaterializedField() {
-      return MaterializedField.create(SchemaPath.getCompoundPath(getFieldName().split("\\.")), fieldType);
-  }
-
   public abstract String getFieldName();
 
   public String getFullFieldName() {

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java
index f67c002..6784b82 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java
@@ -147,7 +147,7 @@ public class QueryWrapper {
             // TODO:  Clean:  DRILL-2933:  That load(...) no longer throws
             // SchemaChangeException, so check/clean catch clause below.
             for (int i = 0; i < loader.getSchema().getFieldCount(); ++i) {
-              columns.add(loader.getSchema().getColumn(i).getPath().getAsUnescapedPath());
+              columns.add(loader.getSchema().getColumn(i).getPath());
             }
             for (int i = 0; i < rows; ++i) {
               final Map<String, String> record = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java
index b25eae1..2ddcbaa 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java
@@ -26,7 +26,7 @@ import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.physical.base.GroupScan;
-import org.apache.drill.exec.record.MaterializedField.Key;
+import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.vector.ValueVector;
 
 import com.google.common.base.Preconditions;
@@ -102,7 +102,7 @@ public abstract class AbstractRecordReader implements RecordReader {
   }
 
   @Override
-  public void allocate(Map<Key, ValueVector> vectorMap) throws OutOfMemoryException {
+  public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException {
     for (final ValueVector v : vectorMap.values()) {
       v.allocateNew();
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
index 4df1772..11a8403 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
@@ -23,7 +23,7 @@ import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.impl.OutputMutator;
-import org.apache.drill.exec.record.MaterializedField.Key;
+import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.vector.ValueVector;
 
 public interface RecordReader extends AutoCloseable {
@@ -41,7 +41,7 @@ public interface RecordReader extends AutoCloseable {
    */
   void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException;
 
-  void allocate(Map<Key, ValueVector> vectorMap) throws OutOfMemoryException;
+  void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException;
 
   /**
    * Increments this record reader forward, writing via the provided output

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
index 42de8b0..84a584f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
@@ -47,6 +47,7 @@ import org.apache.drill.exec.physical.impl.OutputMutator;
 import org.apache.drill.exec.store.AbstractRecordReader;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.util.ImpersonationUtil;
+import org.apache.drill.exec.vector.complex.fn.FieldSelection;
 import org.apache.drill.exec.vector.complex.impl.MapOrListWriterImpl;
 import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
 import org.apache.hadoop.fs.FileSystem;
@@ -69,6 +70,7 @@ public class AvroRecordReader extends AbstractRecordReader {
   private final Path hadoop;
   private final long start;
   private final long end;
+  private final FieldSelection fieldSelection;
   private DrillBuf buffer;
   private VectorContainerWriter writer;
 
@@ -97,6 +99,7 @@ public class AvroRecordReader extends AbstractRecordReader {
     this.opUserName = userName;
     this.queryUserName = fragmentContext.getQueryUserName();
     setColumns(projectedColumns);
+    this.fieldSelection = FieldSelection.getFieldSelection(projectedColumns);
   }
 
   private DataFileReader getReader(final Path hadoop, final FileSystem fs) throws ExecutionSetupException {
@@ -168,14 +171,14 @@ public class AvroRecordReader extends AbstractRecordReader {
 
     switch (type) {
       case RECORD:
-        process(container, schema, null, new MapOrListWriterImpl(writer.rootAsMap()));
+        process(container, schema, null, new MapOrListWriterImpl(writer.rootAsMap()), fieldSelection);
         break;
       default:
         throw new DrillRuntimeException("Root object must be record type. Found: " + type);
     }
   }
 
-  private void process(final Object value, final Schema schema, final String fieldName, MapOrListWriterImpl writer) {
+  private void process(final Object value, final Schema schema, final String fieldName, MapOrListWriterImpl writer, FieldSelection fieldSelection) {
     if (value == null) {
       return;
     }
@@ -194,7 +197,7 @@ public class AvroRecordReader extends AbstractRecordReader {
               _writer = (MapOrListWriterImpl) writer.map(field.name());
           }
 
-          process(((GenericRecord) value).get(field.name()), field.schema(), field.name(), _writer);
+          process(((GenericRecord) value).get(field.name()), field.schema(), field.name(), _writer, fieldSelection.getChild(field.name()));
         }
         break;
       case ARRAY:
@@ -209,7 +212,7 @@ public class AvroRecordReader extends AbstractRecordReader {
         }
         writer.start();
         for (final Object o : array) {
-          process(o, elementSchema, fieldName, writer);
+          process(o, elementSchema, fieldName, writer, fieldSelection.getChild(fieldName));
         }
         writer.end();
         break;
@@ -218,7 +221,7 @@ public class AvroRecordReader extends AbstractRecordReader {
         if (schema.getTypes().get(0).getType() != Schema.Type.NULL) {
           throw new UnsupportedOperationException("Avro union type must be of the format : [\"null\", \"some-type\"]");
         }
-        process(value, schema.getTypes().get(1), fieldName, writer);
+        process(value, schema.getTypes().get(1), fieldName, writer, fieldSelection.getChild(fieldName));
         break;
       case MAP:
         @SuppressWarnings("unchecked")
@@ -227,7 +230,7 @@ public class AvroRecordReader extends AbstractRecordReader {
         writer = (MapOrListWriterImpl) writer.map(fieldName);
         writer.start();
         for (Entry<Object, Object> entry : map.entrySet()) {
-          process(entry.getValue(), valueSchema, entry.getKey().toString(), writer);
+          process(entry.getValue(), valueSchema, entry.getKey().toString(), writer, fieldSelection.getChild(entry.getKey().toString()));
         }
         writer.end();
         break;
@@ -239,14 +242,7 @@ public class AvroRecordReader extends AbstractRecordReader {
         assert fieldName != null;
 
         if (writer.isMapWriter()) {
-          SchemaPath path;
-          if (writer.map.getField().getPath().getRootSegment().getPath().equals("")) {
-            path = new SchemaPath(new PathSegment.NameSegment(fieldName));
-          } else {
-            path = writer.map.getField().getPath().getChild(fieldName);
-          }
-
-          if (!selected(path)) {
+          if (fieldSelection.isNeverValid()) {
             break;
           }
         }

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileRecordReader.java
index 2a131ba..a7f8c7d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/sequencefile/SequenceFileRecordReader.java
@@ -56,8 +56,8 @@ public class SequenceFileRecordReader extends AbstractRecordReader {
   private static final MajorType KEY_TYPE = Types.optional(TypeProtos.MinorType.VARBINARY);
   private static final MajorType VALUE_TYPE = Types.optional(TypeProtos.MinorType.VARBINARY);
 
-  private final SchemaPath keySchema = SchemaPath.getSimplePath("binary_key");
-  private final SchemaPath valueSchema = SchemaPath.getSimplePath("binary_value");
+  private final String keySchema = "binary_key";
+  private final String valueSchema = "binary_value";
 
   private NullableVarBinaryVector keyVector;
   private NullableVarBinaryVector valueVector;
@@ -74,8 +74,8 @@ public class SequenceFileRecordReader extends AbstractRecordReader {
                                   final String queryUserName,
                                   final String opUserName) {
     final List<SchemaPath> columns = new ArrayList<>();
-    columns.add(keySchema);
-    columns.add(valueSchema);
+    columns.add(SchemaPath.getSimplePath(keySchema));
+    columns.add(SchemaPath.getSimplePath(valueSchema));
     setColumns(columns);
     this.dfs = dfs;
     this.split = split;

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/CompliantTextRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/CompliantTextRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/CompliantTextRecordReader.java
index cf2359f..d324270 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/CompliantTextRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/CompliantTextRecordReader.java
@@ -228,11 +228,11 @@ public class CompliantTextRecordReader extends AbstractRecordReader {
    * This class provides OutputMutator for header extraction.
    */
   private class HeaderOutputMutator implements OutputMutator {
-    private final Map<MaterializedField.Key, ValueVector> fieldVectorMap = Maps.newHashMap();
+    private final Map<String, ValueVector> fieldVectorMap = Maps.newHashMap();
 
     @Override
     public <T extends ValueVector> T addField(MaterializedField field, Class<T> clazz) throws SchemaChangeException {
-      ValueVector v = fieldVectorMap.get(field.key());
+      ValueVector v = fieldVectorMap.get(field);
       if (v == null || v.getClass() != clazz) {
         // Field does not exist add it to the map
         v = TypeHelper.getNewVector(field, oContext.getAllocator());
@@ -240,7 +240,7 @@ public class CompliantTextRecordReader extends AbstractRecordReader {
           throw new SchemaChangeException(String.format(
               "Class %s was provided, expected %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
         }
-        fieldVectorMap.put(field.key(), v);
+        fieldVectorMap.put(field.getPath(), v);
       }
       return clazz.cast(v);
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/RepeatedVarCharOutput.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/RepeatedVarCharOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/RepeatedVarCharOutput.java
index ca02e97..2ec662e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/RepeatedVarCharOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/RepeatedVarCharOutput.java
@@ -47,7 +47,6 @@ class RepeatedVarCharOutput extends TextOutput {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RepeatedVarCharOutput.class);
 
   static final String COL_NAME = "columns";
-  static final FieldReference REF = new FieldReference(COL_NAME);
   static final SchemaPath COLUMNS = SchemaPath.getSimplePath("columns");
   public static final int MAXIMUM_NUMBER_COLUMNS = 64 * 1024;
 
@@ -122,7 +121,7 @@ class RepeatedVarCharOutput extends TextOutput {
   public RepeatedVarCharOutput(OutputMutator outputMutator, Collection<SchemaPath> columns, boolean isStarQuery) throws SchemaChangeException {
     super();
 
-    MaterializedField field = MaterializedField.create(REF, Types.repeated(TypeProtos.MinorType.VARCHAR));
+    MaterializedField field = MaterializedField.create(COL_NAME, Types.repeated(TypeProtos.MinorType.VARCHAR));
     this.vector = outputMutator.addField(field, RepeatedVarCharVector.class);
 
     this.mutator = vector.getMutator();

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
index d2c2425..2f69155 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
@@ -29,7 +29,6 @@ import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.impl.OutputMutator;
 import org.apache.drill.exec.record.MaterializedField;
-import org.apache.drill.exec.record.MaterializedField.Key;
 import org.apache.drill.exec.store.AbstractRecordReader;
 import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockColumn;
 import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockScanEntry;
@@ -62,7 +61,7 @@ public class MockRecordReader extends AbstractRecordReader {
 
   private MaterializedField getVector(String name, MajorType type, int length) {
     assert context != null : "Context shouldn't be null.";
-    final MaterializedField f = MaterializedField.create(SchemaPath.getSimplePath(name), type);
+    final MaterializedField f = MaterializedField.create(name, type);
     return f;
   }
 
@@ -101,7 +100,7 @@ public class MockRecordReader extends AbstractRecordReader {
   }
 
   @Override
-  public void allocate(Map<Key, ValueVector> vectorMap) throws OutOfMemoryException {
+  public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException {
     try {
       for (final ValueVector v : vectorMap.values()) {
         AllocationHelper.allocate(v, Character.MAX_VALUE, 50, 10);

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java
index 61e05db..da3b067 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java
@@ -35,11 +35,9 @@ import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.ops.FragmentContext;
-import org.apache.drill.exec.ops.MetricDef;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.impl.OutputMutator;
 import org.apache.drill.exec.record.MaterializedField;
-import org.apache.drill.exec.record.MaterializedField.Key;
 import org.apache.drill.exec.store.AbstractRecordReader;
 import org.apache.drill.exec.store.parquet.ParquetReaderStats;
 import org.apache.drill.exec.vector.AllocationHelper;
@@ -196,7 +194,7 @@ public class ParquetRecordReader extends AbstractRecordReader {
 
     int i = 0;
     for (SchemaPath expr : getColumns()) {
-      if ( field.matches(expr)) {
+      if ( field.getPath().equalsIgnoreCase(expr.getAsUnescapedPath())) {
         columnsFound[i] = true;
         return true;
       }
@@ -248,7 +246,7 @@ public class ParquetRecordReader extends AbstractRecordReader {
       SchemaElement se = schemaElements.get(column.getPath()[0]);
       MajorType mt = ParquetToDrillTypeConverter.toMajorType(column.getType(), se.getType_length(),
           getDataMode(column), se, fragmentContext.getOptions());
-      field = MaterializedField.create(toFieldName(column.getPath()),mt);
+      field = MaterializedField.create(toFieldName(column.getPath()), mt);
       if ( ! fieldSelected(field)) {
         continue;
       }
@@ -336,7 +334,7 @@ public class ParquetRecordReader extends AbstractRecordReader {
           col = projectedColumns.get(i);
           assert col!=null;
           if ( ! columnsFound[i] && !col.equals(STAR_COLUMN)) {
-            nullFilledVectors.add((NullableIntVector)output.addField(MaterializedField.create(col,
+            nullFilledVectors.add((NullableIntVector)output.addField(MaterializedField.create(col.getAsUnescapedPath(),
                     Types.optional(TypeProtos.MinorType.INT)),
                 (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(TypeProtos.MinorType.INT, DataMode.OPTIONAL)));
 
@@ -355,7 +353,7 @@ public class ParquetRecordReader extends AbstractRecordReader {
   }
 
   @Override
-  public void allocate(Map<Key, ValueVector> vectorMap) throws OutOfMemoryException {
+  public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException {
     try {
       for (final ValueVector v : vectorMap.values()) {
         AllocationHelper.allocate(v, recordsPerBatch, 50, 10);
@@ -366,8 +364,8 @@ public class ParquetRecordReader extends AbstractRecordReader {
   }
 
 
-  private SchemaPath toFieldName(String[] paths) {
-    return SchemaPath.getCompoundPath(paths);
+  private String toFieldName(String[] paths) {
+    return SchemaPath.getCompoundPath(paths).getAsUnescapedPath();
   }
 
   private TypeProtos.DataMode getDataMode(ColumnDescriptor column) {

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
index 27536d3..224d6eb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
@@ -39,7 +39,6 @@ import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.impl.OutputMutator;
 import org.apache.drill.exec.record.MaterializedField;
-import org.apache.drill.exec.record.MaterializedField.Key;
 import org.apache.drill.exec.store.AbstractRecordReader;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.parquet.ParquetDirectByteBufferAllocator;
@@ -189,7 +188,7 @@ public class DrillParquetReader extends AbstractRecordReader {
   }
 
   @Override
-  public void allocate(Map<Key, ValueVector> vectorMap) throws OutOfMemoryException {
+  public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException {
     try {
       for (final ValueVector v : vectorMap.values()) {
         AllocationHelper.allocate(v, Character.MAX_VALUE, 50, 10);
@@ -219,7 +218,7 @@ public class DrillParquetReader extends AbstractRecordReader {
           nullFilledVectors = new ArrayList<>();
           for(SchemaPath col: columnsNotFound){
             nullFilledVectors.add(
-              (NullableIntVector)output.addField(MaterializedField.create(col,
+              (NullableIntVector)output.addField(MaterializedField.create(col.getAsUnescapedPath(),
                   org.apache.drill.common.types.Types.optional(TypeProtos.MinorType.INT)),
                 (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(TypeProtos.MinorType.INT,
                   TypeProtos.DataMode.OPTIONAL)));

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
index 22f6bc3..7feb303 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
@@ -29,7 +29,7 @@ import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.impl.OutputMutator;
-import org.apache.drill.exec.record.MaterializedField.Key;
+import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.store.AbstractRecordReader;
 import org.apache.drill.exec.store.pojo.Writers.BitWriter;
 import org.apache.drill.exec.store.pojo.Writers.DoubleWriter;
@@ -122,7 +122,7 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
   }
 
   @Override
-  public void allocate(Map<Key, ValueVector> vectorMap) throws OutOfMemoryException {
+  public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException {
     for (final ValueVector v : vectorMap.values()) {
       AllocationHelper.allocate(v, Character.MAX_VALUE, 50, 10);
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/9a3a5c4f/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java
index d97fc58..2af5c8b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/text/DrillTextRecordReader.java
@@ -124,7 +124,7 @@ public class DrillTextRecordReader extends AbstractRecordReader {
 
   @Override
   public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
-    MaterializedField field = MaterializedField.create(ref, Types.repeated(TypeProtos.MinorType.VARCHAR));
+    MaterializedField field = MaterializedField.create(ref.getAsNamePart().getName(), Types.repeated(TypeProtos.MinorType.VARCHAR));
     try {
       vector = output.addField(field, RepeatedVarCharVector.class);
     } catch (Exception e) {


Mime
View raw message