drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sudhe...@apache.org
Subject [1/3] drill git commit: DRILL-5318: Sub-operator test fixture
Date Fri, 21 Apr 2017 23:32:28 GMT
Repository: drill
Updated Branches:
  refs/heads/master 381eab668 -> 3e8b01d5b


DRILL-5318: Sub-operator test fixture

This commit depends on:

* DRILL-5323

This PR cannot be accepted (or built) until the above are pulled and
this PR is rebased on top of them. The PR is issued now so that reviews
can be done in parallel.

Provides the following:

* A new OperatorFixture to set up all the objects needed to test at the
sub-operator level. This relies on the refactoring to create the
required interfaces.
* Pulls the config builder code out of the cluster fixture builder so
that configs can be build for sub-operator tests.
* Modifies the QueryBuilder test tool to run a query and get back one
of the new row set objects to allow direct inspection of data returned
from a query.
* Modifies the cluster fixture to create a JDBC connection to the test
cluster. (Use requires putting the Drill JDBC project on the test class
path since exec does not depend on JDBC.)

Created a common subclass for the cluster and operator fixtures to
abstract out the allocator and config. Also provides temp directory
support to the operator fixture.

Merged with DRILL-5415 (Improve Fixture Builder to configure client
properties)

Moved row set tests here from DRILL-5323 so that DRILL-5323 is self
contained. (The tests depend on the fixtures defined here.)

Added comments where needed.

Puts code back as it was prior to a code review comment. The code is
redundant, but necessarily so due to code which is specific to several
primitive types.

closes #788


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/3e8b01d5
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/3e8b01d5
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/3e8b01d5

Branch: refs/heads/master
Commit: 3e8b01d5b0d3013e3811913f0fd6028b22c1ac3f
Parents: 095a660
Author: Paul Rogers <progers@maprtech.com>
Authored: Tue Mar 14 16:18:24 2017 -0700
Committer: Sudheesh Katkam <sudheesh@apache.org>
Committed: Fri Apr 21 14:51:36 2017 -0700

----------------------------------------------------------------------
 .../drill/exec/memory/RootAllocatorFactory.java |  11 +-
 .../apache/drill/exec/record/BatchSchema.java   |   2 +-
 .../drill/exec/record/RecordBatchLoader.java    |  15 +-
 .../exec/record/selection/SelectionVector2.java |  21 +-
 .../java/org/apache/drill/test/BaseFixture.java |  61 +++
 .../org/apache/drill/test/ClientFixture.java    |  45 ++-
 .../org/apache/drill/test/ClusterFixture.java   | 114 +++---
 .../org/apache/drill/test/ConfigBuilder.java    | 153 +++++++
 .../org/apache/drill/test/FixtureBuilder.java   |  31 +-
 .../org/apache/drill/test/OperatorFixture.java  | 331 +++++++++++++++
 .../org/apache/drill/test/QueryBuilder.java     | 118 ++++++
 .../drill/test/rowSet/HyperRowSetImpl.java      |   3 +
 .../apache/drill/test/rowSet/RowSetBuilder.java |   1 +
 .../drill/test/rowSet/test/RowSetTest.java      | 400 +++++++++++++++++++
 .../drill/test/rowSet/test/package-info.java    |  21 +
 15 files changed, 1205 insertions(+), 122 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/main/java/org/apache/drill/exec/memory/RootAllocatorFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/RootAllocatorFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/RootAllocatorFactory.java
index 4fad668..5b42b3d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/RootAllocatorFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/RootAllocatorFactory.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,6 +19,8 @@ package org.apache.drill.exec.memory;
 
 import org.apache.drill.common.config.DrillConfig;
 
+import com.google.common.annotations.VisibleForTesting;
+
 public class RootAllocatorFactory {
 
   public static final String TOP_LEVEL_MAX_ALLOC = "drill.memory.top.max";
@@ -35,6 +37,11 @@ public class RootAllocatorFactory {
    * @return a new root allocator
    */
   public static BufferAllocator newRoot(final DrillConfig drillConfig) {
-    return new RootAllocator(Math.min(DrillConfig.getMaxDirectMemory(), drillConfig.getLong(TOP_LEVEL_MAX_ALLOC)));
+    return newRoot(drillConfig.getLong(TOP_LEVEL_MAX_ALLOC));
+  }
+
+  @VisibleForTesting
+  public static BufferAllocator newRoot(long maxAlloc) {
+    return new RootAllocator(Math.min(DrillConfig.getMaxDirectMemory(), maxAlloc));
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
index 168995d..e9dcd28 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
index ea99fcb..3801cb5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
@@ -70,6 +70,7 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
    * @throws SchemaChangeException
    *   TODO:  Clean:  DRILL-2933  load(...) never actually throws SchemaChangeException.
    */
+  @SuppressWarnings("resource")
   public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeException {
     if (logger.isTraceEnabled()) {
       logger.trace("Loading record batch with def {} and data {}", def, buf);
@@ -169,9 +170,9 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
 //  }
 
   @Override
-  public int getRecordCount() {
-    return valueCount;
-  }
+  public int getRecordCount() { return valueCount; }
+
+  public VectorContainer getContainer() { return container; }
 
   @Override
   public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids){
@@ -199,13 +200,9 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
   }
 
   @Override
-  public BatchSchema getSchema() {
-    return schema;
-  }
+  public BatchSchema getSchema() { return schema; }
 
-  public void resetRecordCount() {
-    valueCount = 0;
-  }
+  public void resetRecordCount() { valueCount = 0; }
 
   /**
    * Clears this loader, which clears the internal vector container (see

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
index dcf9a7d..1a31625 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -133,4 +133,23 @@ public class SelectionVector2 implements AutoCloseable {
   public void close() {
     clear();
   }
+
+  @Override
+  public String toString() {
+    StringBuilder buf = new StringBuilder();
+    buf.append("[SV2: recs=");
+    buf.append(recordCount);
+    buf.append(" - ");
+    int n = Math.min(20, recordCount);
+    for (int i = 0; i < n; i++) {
+      if (i > 0) { buf.append("," ); }
+      buf.append((int) getIndex(i));
+    }
+    if (recordCount > n) {
+      buf.append("...");
+      buf.append((int) getIndex(recordCount-1));
+    }
+    buf.append("]");
+    return buf.toString();
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/BaseFixture.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/BaseFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/BaseFixture.java
new file mode 100644
index 0000000..02cdbef
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/BaseFixture.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.test;
+
+import java.io.File;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.exec.memory.BufferAllocator;
+
+import com.google.common.io.Files;
+
+/**
+ * Base class for "fixtures." Provides the basics such as the Drill
+ * configuration, a memory allocator and so on.
+ */
+
+public class BaseFixture {
+
+  protected DrillConfig config;
+  protected BufferAllocator allocator;
+
+  /**
+   * Create a temp directory to store the given <i>dirName</i>. Directory will
+   * be deleted on exit. Directory is created if it does not exist.
+   *
+   * @param dirName directory name
+   * @return Full path including temp parent directory and given directory name.
+   */
+
+  public static File getTempDir(final String dirName) {
+    final File dir = Files.createTempDir();
+    Runtime.getRuntime().addShutdownHook(new Thread() {
+      @Override
+      public void run() {
+        FileUtils.deleteQuietly(dir);
+      }
+    });
+    File tempDir = new File(dir, dirName);
+    tempDir.mkdirs();
+    return tempDir;
+  }
+
+  public BufferAllocator allocator() { return allocator; }
+  public DrillConfig config() { return config; }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
index 25dab4f..a63a287 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
@@ -34,6 +34,13 @@ import org.apache.drill.exec.testing.ControlsInjectionUtil;
 import org.apache.drill.test.ClusterFixture.FixtureTestServices;
 import org.apache.drill.test.QueryBuilder.QuerySummary;
 
+/**
+ * Represents a Drill client. Provides many useful test-specific operations such
+ * as setting system options, running queries, and using the @{link TestBuilder}
+ * class.
+ * @see ExampleTest ExampleTest for usage examples
+ */
+
 public class ClientFixture implements AutoCloseable {
 
   public static class ClientBuilder {
@@ -45,21 +52,23 @@ public class ClientFixture implements AutoCloseable {
       this.cluster = cluster;
       clientProps = cluster.getClientProps();
     }
+
     /**
      * Specify an optional client property.
      * @param key property name
      * @param value property value
      * @return this builder
      */
-    public ClientBuilder property( String key, Object value ) {
-      if ( clientProps == null ) {
-        clientProps = new Properties( );
+
+    public ClientBuilder property(String key, Object value) {
+      if (clientProps == null) {
+        clientProps = new Properties();
       }
       clientProps.put(key, value);
       return this;
     }
 
-    public ClientFixture build( ) {
+    public ClientFixture build() {
       try {
         return new ClientFixture(this);
       } catch (RpcException e) {
@@ -81,17 +90,17 @@ public class ClientFixture implements AutoCloseable {
     // Create a client.
 
     if (cluster.usesZK()) {
-      client = new DrillClient(cluster.config( ));
+      client = new DrillClient(cluster.config());
     } else {
-      client = new DrillClient(cluster.config( ), cluster.serviceSet( ).getCoordinator());
+      client = new DrillClient(cluster.config(), cluster.serviceSet().getCoordinator());
     }
     client.connect(builder.clientProps);
     cluster.clients.add(this);
   }
 
   public DrillClient client() { return client; }
-  public ClusterFixture cluster( ) { return cluster; }
-  public BufferAllocator allocator( ) { return cluster.allocator( ); }
+  public ClusterFixture cluster() { return cluster; }
+  public BufferAllocator allocator() { return client.getAllocator(); }
 
   /**
    * Set a runtime option.
@@ -101,14 +110,14 @@ public class ClientFixture implements AutoCloseable {
    * @throws RpcException
    */
 
-  public void alterSession(String key, Object value ) {
-    String sql = "ALTER SESSION SET `" + key + "` = " + ClusterFixture.stringify( value );
-    runSqlSilently( sql );
+  public void alterSession(String key, Object value) {
+    String sql = "ALTER SESSION SET `" + key + "` = " + ClusterFixture.stringify(value);
+    runSqlSilently(sql);
   }
 
-  public void alterSystem(String key, Object value ) {
-    String sql = "ALTER SYSTEM SET `" + key + "` = " + ClusterFixture.stringify( value );
-    runSqlSilently( sql );
+  public void alterSystem(String key, Object value) {
+    String sql = "ALTER SYSTEM SET `" + key + "` = " + ClusterFixture.stringify(value);
+    runSqlSilently(sql);
   }
 
   /**
@@ -160,17 +169,17 @@ public class ClientFixture implements AutoCloseable {
       if (trimmedQuery.isEmpty()) {
         continue;
       }
-      queryBuilder( ).sql(trimmedQuery).print();
+      queryBuilder().sql(trimmedQuery).print();
     }
   }
 
   @Override
-  public void close( ) {
+  public void close() {
     if (client == null) {
       return;
     }
     try {
-      client.close( );
+      client.close();
     } finally {
       client = null;
       cluster.clients.remove(this);
@@ -198,7 +207,7 @@ public class ClientFixture implements AutoCloseable {
    */
 
   public ProfileParser parseProfile(String queryId) throws IOException {
-    File file = new File(cluster.getProfileDir(), queryId + ".sys.drill" );
+    File file = new File(cluster.getProfileDir(), queryId + ".sys.drill");
     return new ProfileParser(file);
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
index 90ce206..0ce337d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
@@ -20,27 +20,28 @@ package org.apache.drill.test;
 import java.io.File;
 import java.io.IOException;
 import java.net.URL;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Properties;
 
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigValueFactory;
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.BaseTestQuery;
 import org.apache.drill.DrillTestWrapper.TestServices;
 import org.apache.drill.QueryTestUtil;
-import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.TestBuilder;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ZookeeperHelper;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.memory.RootAllocatorFactory;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
 import org.apache.drill.exec.proto.UserBitShared.QueryType;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.server.Drillbit;
@@ -57,7 +58,6 @@ import org.apache.drill.exec.util.TestUtilities;
 
 import com.google.common.base.Charsets;
 import com.google.common.base.Preconditions;
-import com.google.common.io.Files;
 import com.google.common.io.Resources;
 
 /**
@@ -67,7 +67,7 @@ import com.google.common.io.Resources;
  * creates the requested Drillbit and client.
  */
 
-public class ClusterFixture implements AutoCloseable {
+public class ClusterFixture extends BaseFixture implements AutoCloseable {
   // private static final org.slf4j.Logger logger =
   // org.slf4j.LoggerFactory.getLogger(ClientFixture.class);
   public static final String ENABLE_FULL_CACHE = "drill.exec.test.use-full-cache";
@@ -123,10 +123,8 @@ public class ClusterFixture implements AutoCloseable {
 
   public static final String DEFAULT_BIT_NAME = "drillbit";
 
-  private DrillConfig config;
   private Map<String, Drillbit> bits = new HashMap<>();
   private Drillbit defaultDrillbit;
-  private BufferAllocator allocator;
   private boolean ownsZK;
   private ZookeeperHelper zkHelper;
   private RemoteServiceSet serviceSet;
@@ -199,7 +197,7 @@ public class ClusterFixture implements AutoCloseable {
       // combining locally-set properties and a config file: it is one
       // or the other.
 
-      if (builder.configProps == null) {
+      if (builder.configBuilder().hasResource()) {
         throw new IllegalArgumentException("Cannot specify a local ZK while using an external config file.");
       }
       builder.configProperty(ExecConstants.ZK_CONNECTION, zkConnect);
@@ -216,13 +214,7 @@ public class ClusterFixture implements AutoCloseable {
     // Because of the way DrillConfig works, we can set the ZK
     // connection string only if a property set is provided.
 
-    if (builder.configResource != null) {
-      config = DrillConfig.create(builder.configResource);
-    } else if (builder.configProps != null) {
-      config = configProperties(builder.configProps);
-    } else {
-      throw new IllegalStateException("Configuration was not provided.");
-    }
+    config = builder.configBuilder.build();
 
     if (builder.usingZk) {
       // Distribute drillbit using ZK (in-process or external)
@@ -341,39 +333,10 @@ public class ClusterFixture implements AutoCloseable {
     }
   }
 
-  private DrillConfig configProperties(Properties configProps) {
-    Properties stringProps = new Properties();
-    Properties collectionProps = new Properties();
-
-    // Filter out the collection type configs and other configs which can be converted to string.
-    for(Entry<Object, Object> entry : configProps.entrySet()) {
-      if(entry.getValue() instanceof Collection<?>) {
-        collectionProps.put(entry.getKey(), entry.getValue());
-      } else {
-        stringProps.setProperty(entry.getKey().toString(), entry.getValue().toString());
-      }
-    }
-
-    // First create a DrillConfig based on string properties.
-    Config drillConfig = DrillConfig.create(stringProps);
-
-    // Then add the collection properties inside the DrillConfig. Below call to withValue returns
-    // a new reference. Considering mostly properties will be of string type, doing this
-    // later will be less expensive as compared to doing it for all the properties.
-    for(Entry<Object, Object> entry : collectionProps.entrySet()) {
-      drillConfig = drillConfig.withValue(entry.getKey().toString(),
-        ConfigValueFactory.fromAnyRef(entry.getValue()));
-    }
-
-    return new DrillConfig(drillConfig, true);
-  }
-
   public Drillbit drillbit() { return defaultDrillbit; }
   public Drillbit drillbit(String name) { return bits.get(name); }
   public Collection<Drillbit> drillbits() { return bits.values(); }
   public RemoteServiceSet serviceSet() { return serviceSet; }
-  public BufferAllocator allocator() { return allocator; }
-  public DrillConfig config() { return config; }
   public File getDfsTestTmpDir() { return dfsTestTempDir; }
 
   public ClientFixture.ClientBuilder clientBuilder() {
@@ -392,6 +355,39 @@ public class ClusterFixture implements AutoCloseable {
   }
 
   /**
+   * Return a JDBC connection to the default (first) Drillbit.
+   * Note that this code requires special setup of the test code.
+   * Tests in the "exec" package do not normally have visibility
+   * to the Drill JDBC driver. So, the test must put that code
+   * on the class path manually in order for this code to load the
+   * JDBC classes. The caller is responsible for closing the JDBC
+   * connection before closing the cluster. (An enhancement is to
+   * do the close automatically as is done for clients.)
+   *
+   * @return a JDBC connection to the default Drillbit
+   */
+
+  public Connection jdbcConnection() {
+    try {
+      Class.forName("org.apache.drill.jdbc.Driver");
+    } catch (ClassNotFoundException e) {
+      throw new IllegalStateException(e);
+    }
+    String connStr = "jdbc:drill:";
+    if (usesZK()) {
+      connStr += "zk=" + zkHelper.getConnectionString();
+    } else {
+      DrillbitEndpoint ep = drillbit().getContext().getEndpoint();
+      connStr += "drillbit=" + ep.getAddress() + ":" + ep.getUserPort();
+    }
+    try {
+      return DriverManager.getConnection(connStr);
+    } catch (SQLException e) {
+      throw new IllegalStateException(e);
+    }
+  }
+
+  /**
    * Close the clients, Drillbits, allocator and
    * Zookeeper. Checks for exceptions. If an exception occurs,
    * continues closing, suppresses subsequent exceptions, and
@@ -562,10 +558,13 @@ public class ClusterFixture implements AutoCloseable {
   public static final String EXPLAIN_PLAN_JSON = "json";
 
   public static FixtureBuilder builder() {
-     return new FixtureBuilder()
-         .configProps(FixtureBuilder.defaultProps())
+    FixtureBuilder builder = new FixtureBuilder()
          .sessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, MAX_WIDTH_PER_NODE)
          ;
+    Properties props = new Properties();
+    props.putAll(ClusterFixture.TEST_CONFIGURATIONS);
+    builder.configBuilder.configProps(props);
+    return builder;
   }
 
   /**
@@ -689,27 +688,6 @@ public class ClusterFixture implements AutoCloseable {
   }
 
   /**
-   * Create a temp directory to store the given <i>dirName</i>. Directory will
-   * be deleted on exit. Directory is created if it does not exist.
-   *
-   * @param dirName directory name
-   * @return Full path including temp parent directory and given directory name.
-   */
-
-  public static File getTempDir(final String dirName) {
-    final File dir = Files.createTempDir();
-    Runtime.getRuntime().addShutdownHook(new Thread() {
-      @Override
-      public void run() {
-        FileUtils.deleteQuietly(dir);
-      }
-    });
-    File tempDir = new File(dir, dirName);
-    tempDir.mkdirs();
-    return tempDir;
-  }
-
-  /**
    * Create a temporary directory which will be removed when the
    * cluster closes.
    *

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java
new file mode 100644
index 0000000..82f6196
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.test;
+
+import java.util.Collection;
+import java.util.Properties;
+import java.util.Map.Entry;
+
+import org.apache.drill.common.config.DrillConfig;
+
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigValueFactory;
+
+/**
+ * Builds a {@link DrillConfig} for use in tests. Use this when a config
+ * is needed by itself, separate from an embedded Drillbit.
+ */
+public class ConfigBuilder {
+
+  protected String configResource;
+  protected Properties configProps;
+
+  /**
+   * Use the given configuration properties as overrides.
+   * @param configProps a collection of config properties
+   * @return this builder
+   * @see {@link #configProperty(String, Object)}
+   */
+
+  public ConfigBuilder configProps(Properties configProps) {
+    if (hasResource()) {
+      // Drill provides no constructor for this use case.
+      throw new IllegalArgumentException( "Cannot provide both a config resource and config properties.");
+    }
+    if (this.configProps == null) {
+      this.configProps = configProps;
+    } else {
+      this.configProps.putAll(configProps);
+    }
+    return this;
+  }
+
+  /**
+   * Use the given configuration file, stored as a resource, to initialize
+   * the Drill config. Note that the resource file should have the two
+   * following settings to work as a config for an embedded Drillbit:
+   * <pre><code>
+   * drill.exec.sys.store.provider.local.write : false,
+   * drill.exec.http.enabled : false
+   * </code></pre>
+   * It may be more convenient to add your settings to the default
+   * config settings with {@link #configProperty(String, Object)}.
+   * @param configResource path to the file that contains the
+   * config file to be read
+   * @return this builder
+   * @see {@link #configProperty(String, Object)}
+   */
+
+  public ConfigBuilder resource(String configResource) {
+
+    if (configProps != null) {
+      // Drill provides no constructor for this use case.
+      throw new IllegalArgumentException( "Cannot provide both a config resource and config properties.");
+    }
+
+    // TypeSafe gets unhappy about a leading slash, but other functions
+    // require it. Silently discard the leading slash if given to
+    // preserve the test writer's sanity.
+
+    this.configResource = ClusterFixture.trimSlash(configResource);
+    return this;
+  }
+
+  /**
+   * Add an additional boot-time property for the embedded Drillbit.
+   * @param key config property name
+   * @param value property value
+   * @return this builder
+   */
+
+  public ConfigBuilder put(String key, Object value) {
+    if (hasResource()) {
+      // Drill provides no constructor for this use case.
+      throw new IllegalArgumentException( "Cannot provide both a config resource and config properties.");
+    }
+    if (configProps == null) {
+      configProps = new Properties();
+    }
+    configProps.put(key, value.toString());
+    return this;
+  }
+
+  public DrillConfig build() {
+
+    // Create a config
+    // Because of the way DrillConfig works, we can set the ZK
+    // connection string only if a property set is provided.
+
+    if (hasResource()) {
+      return DrillConfig.create(configResource);
+    } else if (configProps != null) {
+      return constructConfig();
+    } else {
+      return DrillConfig.create();
+    }
+  }
+
+  private DrillConfig constructConfig() {
+    Properties stringProps = new Properties();
+    Properties collectionProps = new Properties();
+
+    // Filter out the collection type configs and other configs which can be converted to string.
+    for(Entry<Object, Object> entry : configProps.entrySet()) {
+      if(entry.getValue() instanceof Collection<?>) {
+        collectionProps.put(entry.getKey(), entry.getValue());
+      } else {
+        stringProps.setProperty(entry.getKey().toString(), entry.getValue().toString());
+      }
+    }
+
+    // First create a DrillConfig based on string properties.
+    Config drillConfig = DrillConfig.create(stringProps);
+
+    // Then add the collection properties inside the DrillConfig. Below call to withValue returns
+    // a new reference. Considering mostly properties will be of string type, doing this
+    // later will be less expensive as compared to doing it for all the properties.
+    for(Entry<Object, Object> entry : collectionProps.entrySet()) {
+      drillConfig = drillConfig.withValue(entry.getKey().toString(),
+        ConfigValueFactory.fromAnyRef(entry.getValue()));
+    }
+
+    return new DrillConfig(drillConfig, true);
+  }
+
+  public boolean hasResource() {
+    return configResource != null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/FixtureBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/FixtureBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/FixtureBuilder.java
index f6106ff..b305609 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/FixtureBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/FixtureBuilder.java
@@ -19,7 +19,6 @@ package org.apache.drill.test;
 
 import java.io.File;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.List;
 import java.util.Properties;
 
@@ -51,14 +50,7 @@ public class FixtureBuilder {
   public static final int DEFAULT_SERVER_RPC_THREADS = 10;
   public static final int DEFAULT_SCAN_THREADS = 8;
 
-  public static Properties defaultProps() {
-    Properties props = new Properties();
-    props.putAll(ClusterFixture.TEST_CONFIGURATIONS);
-    return props;
-  }
-
-  protected String configResource;
-  protected Properties configProps;
+  protected ConfigBuilder configBuilder = new ConfigBuilder();
   protected List<RuntimeOption> sessionOptions;
   protected List<RuntimeOption> systemOptions;
   protected int bitCount = 1;
@@ -71,16 +63,12 @@ public class FixtureBuilder {
   protected Properties clientProps;
 
   /**
-   * Use the given configuration properties to start the embedded Drillbit.
-   * @param configProps a collection of config properties
-   * @return this builder
-   * @see {@link #configProperty(String, Object)}
+   * The configuration builder which this fixture builder uses.
+   * @return the configuration builder for use in setting "advanced"
+   * configuration options.
    */
 
-  public FixtureBuilder configProps(Properties configProps) {
-    this.configProps = configProps;
-    return this;
-  }
+  public ConfigBuilder configBuilder() { return configBuilder; }
 
   /**
    * Use the given configuration file, stored as a resource, to start the
@@ -104,7 +92,7 @@ public class FixtureBuilder {
     // require it. Silently discard the leading slash if given to
     // preserve the test writer's sanity.
 
-    this.configResource = ClusterFixture.trimSlash(configResource);
+    configBuilder.resource(ClusterFixture.trimSlash(configResource));
     return this;
   }
 
@@ -116,10 +104,7 @@ public class FixtureBuilder {
    */
 
   public FixtureBuilder configProperty(String key, Object value) {
-    if (configProps == null) {
-      configProps = defaultProps();
-    }
-    configProps.put(key, value);
+    configBuilder.put(key, value.toString());
     return this;
   }
 
@@ -131,7 +116,7 @@ public class FixtureBuilder {
    * @return this builder
    */
   public FixtureBuilder configClientProperty(String key, Object value) {
-    if(clientProps == null) {
+    if (clientProps == null) {
       clientProps = new Properties();
     }
     clientProps.put(key, value.toString());

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java
new file mode 100644
index 0000000..2c72c3c
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java
@@ -0,0 +1,331 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.test;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.compile.CodeCompiler;
+import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.expr.ClassGenerator;
+import org.apache.drill.exec.expr.CodeGenerator;
+import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
+import org.apache.drill.exec.memory.RootAllocatorFactory;
+import org.apache.drill.exec.ops.FragmentExecContext;
+import org.apache.drill.exec.ops.MetricDef;
+import org.apache.drill.exec.ops.OperExecContext;
+import org.apache.drill.exec.ops.OperExecContextImpl;
+import org.apache.drill.exec.ops.OperatorStatReceiver;
+import org.apache.drill.exec.physical.base.PhysicalOperator;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.server.options.BaseOptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
+import org.apache.drill.exec.server.options.OptionValue;
+import org.apache.drill.exec.server.options.OptionValue.OptionType;
+import org.apache.drill.exec.testing.ExecutionControls;
+import org.apache.drill.test.rowSet.DirectRowSet;
+import org.apache.drill.test.rowSet.HyperRowSetImpl;
+import org.apache.drill.test.rowSet.IndirectRowSet;
+import org.apache.drill.test.rowSet.RowSet;
+import org.apache.drill.test.rowSet.RowSet.ExtendableRowSet;
+import org.apache.drill.test.rowSet.RowSet.SingleRowSet;
+import org.apache.drill.test.rowSet.RowSetBuilder;
+
+/**
+ * Test fixture for operator and (especially) "sub-operator" tests.
+ * These are tests that are done without the full Drillbit server.
+ * Instead, this fixture creates a test fixture runtime environment
+ * that provides "real" implementations of the classes required by
+ * operator internals, but with implementations tuned to the test
+ * environment. The services available from this fixture are:
+ * <ul>
+ * <li>Configuration (DrillConfig)</li>
+ * <li>Memory allocator</li>
+ * <li>Code generation (compilers, code cache, etc.)</li>
+ * <li>Read-only version of system and session options (which
+ * are set when creating the fixture.</li>
+ * <li>Write-only version of operator stats (which are easy to
+ * read to verify in tests.</li>
+ * </ul>
+ * What is <b>not</b> provided is anything that depends on a live server:
+ * <ul>
+ * <li>Network endpoints.</li>
+ * <li>Persistent storage.</li>
+ * <li>ZK access.</li>
+ * <li>Multiple threads of execution.</li>
+ * </ul>
+ */
+
+public class OperatorFixture extends BaseFixture implements AutoCloseable {
+
+  /**
+   * Builds an operator fixture based on a set of config options and system/session
+   * options.
+   */
+
+  public static class OperatorFixtureBuilder
+  {
+    ConfigBuilder configBuilder = new ConfigBuilder();
+    TestOptionSet options = new TestOptionSet();
+
+    public ConfigBuilder configBuilder() {
+      return configBuilder;
+    }
+
+    public TestOptionSet options() {
+      return options;
+    }
+
+    public OperatorFixture build() {
+      return new OperatorFixture(this);
+    }
+  }
+
+  /**
+   * Test-time implementation of the system and session options. Provides
+   * a simple storage and a simple set interface, then implements the standard
+   * system and session option read interface.
+   */
+
+  public static class TestOptionSet extends BaseOptionManager {
+
+    private Map<String,OptionValue> values = new HashMap<>();
+
+    public TestOptionSet() {
+      // Crashes in FunctionImplementationRegistry if not set
+      set(ExecConstants.CAST_TO_NULLABLE_NUMERIC, false);
+      // Crashes in the Dynamic UDF code if not disabled
+      set(ExecConstants.USE_DYNAMIC_UDFS_KEY, false);
+//      set(ExecConstants.CODE_GEN_EXP_IN_METHOD_SIZE_VALIDATOR, false);
+    }
+
+    public void set(String key, int value) {
+      set(key, (long) value);
+    }
+
+    public void set(String key, long value) {
+      values.put(key, OptionValue.createLong(OptionType.SYSTEM, key, value));
+    }
+
+    public void set(String key, boolean value) {
+      values.put(key, OptionValue.createBoolean(OptionType.SYSTEM, key, value));
+    }
+
+    public void set(String key, double value) {
+      values.put(key, OptionValue.createDouble(OptionType.SYSTEM, key, value));
+    }
+
+    public void set(String key, String value) {
+      values.put(key, OptionValue.createString(OptionType.SYSTEM, key, value));
+    }
+
+    @Override
+    public OptionValue getOption(String name) {
+      return values.get(name);
+    }
+  }
+
+  /**
+   * Provide a simplified test-time code generation context that
+   * uses the same code generation mechanism as the full Drill, but
+   * provide test-specific versions of various other services.
+   */
+
+  public static class TestCodeGenContext implements FragmentExecContext {
+
+    private final DrillConfig config;
+    private final OptionSet options;
+    private final CodeCompiler compiler;
+    private final FunctionImplementationRegistry functionRegistry;
+    private ExecutionControls controls;
+
+    public TestCodeGenContext(DrillConfig config, OptionSet options) {
+      this.config = config;
+      this.options = options;
+      ScanResult classpathScan = ClassPathScanner.fromPrescan(config);
+      functionRegistry = new FunctionImplementationRegistry(config, classpathScan, options);
+      compiler = new CodeCompiler(config, options);
+    }
+
+    public void setExecutionControls(ExecutionControls controls) {
+      this.controls = controls;
+    }
+
+    @Override
+    public FunctionImplementationRegistry getFunctionRegistry() {
+      return functionRegistry;
+    }
+
+    @Override
+    public OptionSet getOptionSet() {
+      return options;
+    }
+
+    @Override
+    public <T> T getImplementationClass(final ClassGenerator<T> cg)
+        throws ClassTransformationException, IOException {
+      return getImplementationClass(cg.getCodeGenerator());
+    }
+
+    @Override
+    public <T> T getImplementationClass(final CodeGenerator<T> cg)
+        throws ClassTransformationException, IOException {
+      return compiler.createInstance(cg);
+    }
+
+    @Override
+    public <T> List<T> getImplementationClass(final ClassGenerator<T> cg, final int instanceCount) throws ClassTransformationException, IOException {
+      return getImplementationClass(cg.getCodeGenerator(), instanceCount);
+    }
+
+    @Override
+    public <T> List<T> getImplementationClass(final CodeGenerator<T> cg, final int instanceCount) throws ClassTransformationException, IOException {
+      return compiler.createInstances(cg, instanceCount);
+    }
+
+    @Override
+    public boolean shouldContinue() {
+      return true;
+    }
+
+    @Override
+    public ExecutionControls getExecutionControls() {
+      return controls;
+    }
+
+    @Override
+    public DrillConfig getConfig() {
+      return config;
+    }
+  }
+
+  /**
+   * Implements a write-only version of the stats collector for use by opeators,
+   * then provides simplified test-time accessors to get the stats values when
+   * validating code in tests.
+   */
+
+  public static class MockStats implements OperatorStatReceiver {
+
+    public Map<Integer,Double> stats = new HashMap<>();
+
+    @Override
+    public void addLongStat(MetricDef metric, long value) {
+      setStat(metric, getStat(metric) + value);
+    }
+
+    @Override
+    public void addDoubleStat(MetricDef metric, double value) {
+      setStat(metric, getStat(metric) + value);
+    }
+
+    @Override
+    public void setLongStat(MetricDef metric, long value) {
+      setStat(metric, value);
+    }
+
+    @Override
+    public void setDoubleStat(MetricDef metric, double value) {
+      setStat(metric, value);
+    }
+
+    public double getStat(MetricDef metric) {
+      return getStat(metric.metricId());
+    }
+
+    private double getStat(int metricId) {
+      Double value = stats.get(metricId);
+      return value == null ? 0 : value;
+    }
+
+    private void setStat(MetricDef metric, double value) {
+      setStat(metric.metricId(), value);
+    }
+
+    private void setStat(int metricId, double value) {
+      stats.put(metricId, value);
+    }
+  }
+
+  private final TestOptionSet options;
+  private final TestCodeGenContext context;
+  private final OperatorStatReceiver stats;
+
+  protected OperatorFixture(OperatorFixtureBuilder builder) {
+    config = builder.configBuilder().build();
+    allocator = RootAllocatorFactory.newRoot(config);
+    options = builder.options();
+    context = new TestCodeGenContext(config, options);
+    stats = new MockStats();
+   }
+
+  public TestOptionSet options() { return options; }
+
+
+  public FragmentExecContext codeGenContext() { return context; }
+
+  @Override
+  public void close() throws Exception {
+    allocator.close();
+  }
+
+  public static OperatorFixtureBuilder builder() {
+    OperatorFixtureBuilder builder = new OperatorFixtureBuilder();
+    builder.configBuilder()
+      // Required to avoid Dynamic UDF calls for missing or
+      // ambiguous functions.
+      .put(ExecConstants.UDF_DISABLE_DYNAMIC, true);
+    return builder;
+  }
+
+  public static OperatorFixture standardFixture() {
+    return builder().build();
+  }
+
+  public OperExecContext newOperExecContext(PhysicalOperator opDefn) {
+    return new OperExecContextImpl(context, allocator, stats, opDefn, null);
+  }
+
+  public RowSetBuilder rowSetBuilder(BatchSchema schema) {
+    return new RowSetBuilder(allocator, schema);
+  }
+
+  public ExtendableRowSet rowSet(BatchSchema schema) {
+    return new DirectRowSet(allocator, schema);
+  }
+
+  public RowSet wrap(VectorContainer container) {
+    switch (container.getSchema().getSelectionVectorMode()) {
+    case FOUR_BYTE:
+      return new HyperRowSetImpl(allocator(), container, container.getSelectionVector4());
+    case NONE:
+      return new DirectRowSet(allocator(), container);
+    case TWO_BYTE:
+      return new IndirectRowSet(allocator(), container);
+    default:
+      throw new IllegalStateException( "Unexpected selection mode" );
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java
index f9837ff..f2a27c8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java
@@ -31,11 +31,13 @@ import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.client.PrintingResultsListener;
 import org.apache.drill.exec.client.QuerySubmitter.Format;
+import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.proto.UserBitShared.QueryId;
 import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
 import org.apache.drill.exec.proto.UserBitShared.QueryType;
 import org.apache.drill.exec.proto.helper.QueryIdHelper;
 import org.apache.drill.exec.record.RecordBatchLoader;
+import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.rpc.ConnectionThrottle;
 import org.apache.drill.exec.rpc.RpcException;
@@ -46,6 +48,9 @@ import org.apache.drill.exec.util.VectorUtil;
 import org.apache.drill.exec.vector.NullableVarCharVector;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.test.BufferingQueryEventListener.QueryEvent;
+import org.apache.drill.test.rowSet.DirectRowSet;
+import org.apache.drill.test.rowSet.RowSet;
+import org.apache.drill.test.rowSet.RowSet.RowSetReader;
 
 import com.google.common.base.Preconditions;
 
@@ -271,6 +276,119 @@ public class QueryBuilder {
   }
 
   /**
+   * Run the query and return the first result set as a
+   * {@link DirectRowSet} object that can be inspected directly
+   * by the code using a {@link RowSetReader}.
+   * <p>
+   * An enhancement is to provide a way to read a series of result
+   * batches as row sets.
+   * @return a row set that represents the first batch returned from
+   * the query
+   * @throws RpcException if anything goes wrong
+   */
+
+  public DirectRowSet rowSet() throws RpcException {
+
+    // Ignore all but the first non-empty batch.
+
+    QueryDataBatch dataBatch = null;
+    for (QueryDataBatch batch : results()) {
+      if (dataBatch == null  &&  batch.getHeader().getRowCount() != 0) {
+        dataBatch = batch;
+      } else {
+        batch.release();
+      }
+    }
+
+    // No results?
+
+    if (dataBatch == null) {
+      return null;
+    }
+
+    // Unload the batch and convert to a row set.
+
+    final RecordBatchLoader loader = new RecordBatchLoader(client.allocator());
+    try {
+      loader.load(dataBatch.getHeader().getDef(), dataBatch.getData());
+      dataBatch.release();
+      VectorContainer container = loader.getContainer();
+      container.setRecordCount(loader.getRecordCount());
+      return new DirectRowSet(client.allocator(), container);
+    } catch (SchemaChangeException e) {
+      throw new IllegalStateException(e);
+    }
+  }
+
+  /**
+   * Run the query that is expected to return (at least) one row
+   * with the only (or first) column returning a long value.
+   * The long value cannot be null.
+   *
+   * @return the value of the first column of the first row
+   * @throws RpcException if anything goes wrong
+   */
+
+  public long singletonLong() throws RpcException {
+    RowSet rowSet = rowSet();
+    if (rowSet == null) {
+      throw new IllegalStateException("No rows returned");
+    }
+    RowSetReader reader = rowSet.reader();
+    reader.next();
+    long value = reader.column(0).getLong();
+    rowSet.clear();
+    return value;
+  }
+
+  /**
+   * Run the query that is expected to return (at least) one row
+   * with the only (or first) column returning a int value.
+   * The int value cannot be null.
+   *
+   * @return the value of the first column of the first row
+   * @throws RpcException if anything goes wrong
+   */
+
+  public int singletonInt() throws RpcException {
+    RowSet rowSet = rowSet();
+    if (rowSet == null) {
+      throw new IllegalStateException("No rows returned");
+    }
+    RowSetReader reader = rowSet.reader();
+    reader.next();
+    int value = reader.column(0).getInt();
+    rowSet.clear();
+    return value;
+  }
+
+  /**
+   * Run the query that is expected to return (at least) one row
+   * with the only (or first) column returning a string value.
+   * The value may be null, in which case a null string is returned.
+   *
+   * @return the value of the first column of the first row
+   * @throws RpcException if anything goes wrong
+   */
+
+  public String singletonString() throws RpcException {
+    RowSet rowSet = rowSet();
+    if (rowSet == null) {
+      throw new IllegalStateException("No rows returned");
+    }
+    RowSetReader reader = rowSet.reader();
+    reader.next();
+    String value;
+    if (reader.column(0).isNull()) {
+      value = null;
+    } else {
+      value = reader.column(0).getString();
+    }
+    rowSet.clear();
+    return value;
+  }
+
+  /**
    * Run the query with the listener provided. Use when the result
    * count will be large, or you don't need the results.
    *

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java
index 9df0b23..c7cb1b2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java
@@ -228,12 +228,15 @@ public class HyperRowSetImpl extends AbstractRowSet implements HyperRowSet {
   /**
    * Selection vector that indexes into the hyper vectors.
    */
+
   private final SelectionVector4 sv4;
+
   /**
    * Collection of hyper vectors in flattened order: a left-to-right,
    * depth first ordering of vectors in maps. Order here corresponds to
    * the order used for column indexes in the row set reader.
    */
+
   private final HyperVectorWrapper<ValueVector> hvw[];
 
   public HyperRowSetImpl(BufferAllocator allocator, VectorContainer container, SelectionVector4 sv4) {

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java
index a5b03c8..74e9356 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java
@@ -70,6 +70,7 @@ public final class RowSetBuilder {
    *
    * @return this builder
    */
+
   public RowSetBuilder withSv2() {
     withSv2 = true;
     return this;

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/RowSetTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/RowSetTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/RowSetTest.java
new file mode 100644
index 0000000..8d9179b
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/RowSetTest.java
@@ -0,0 +1,400 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.test.rowSet.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+
+import org.apache.drill.common.types.TypeProtos.DataMode;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.vector.accessor.ArrayReader;
+import org.apache.drill.exec.vector.accessor.ArrayWriter;
+import org.apache.drill.exec.vector.accessor.TupleAccessor.TupleSchema;
+import org.apache.drill.test.OperatorFixture;
+import org.apache.drill.test.rowSet.RowSet.ExtendableRowSet;
+import org.apache.drill.test.rowSet.RowSet.RowSetReader;
+import org.apache.drill.test.rowSet.RowSet.RowSetWriter;
+import org.apache.drill.test.rowSet.RowSet.SingleRowSet;
+import org.apache.drill.test.rowSet.RowSetComparison;
+import org.apache.drill.test.rowSet.RowSetSchema;
+import org.apache.drill.test.rowSet.RowSetSchema.FlattenedSchema;
+import org.apache.drill.test.rowSet.RowSetSchema.PhysicalSchema;
+import org.apache.drill.test.rowSet.SchemaBuilder;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.base.Splitter;
+
+public class RowSetTest {
+
+  private static OperatorFixture fixture;
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    fixture = OperatorFixture.standardFixture();
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    fixture.close();
+  }
+
+  /**
+   * Test a simple physical schema with no maps.
+   */
+
+  @Test
+  public void testSchema() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("c", MinorType.INT)
+        .add("a", MinorType.INT, DataMode.REPEATED)
+        .addNullable("b", MinorType.VARCHAR)
+        .build();
+
+    assertEquals("c", batchSchema.getColumn(0).getName());
+    assertEquals("a", batchSchema.getColumn(1).getName());
+    assertEquals("b", batchSchema.getColumn(2).getName());
+
+    RowSetSchema schema = new RowSetSchema(batchSchema);
+    TupleSchema access = schema.hierarchicalAccess();
+    assertEquals(3, access.count());
+
+    crossCheck(access, 0, "c", MinorType.INT);
+    assertEquals(DataMode.REQUIRED, access.column(0).getDataMode());
+    assertEquals(DataMode.REQUIRED, access.column(0).getType().getMode());
+    assertTrue(! access.column(0).isNullable());
+
+    crossCheck(access, 1, "a", MinorType.INT);
+    assertEquals(DataMode.REPEATED, access.column(1).getDataMode());
+    assertEquals(DataMode.REPEATED, access.column(1).getType().getMode());
+    assertTrue(! access.column(1).isNullable());
+
+    crossCheck(access, 2, "b", MinorType.VARCHAR);
+    assertEquals(MinorType.VARCHAR, access.column(2).getType().getMinorType());
+    assertEquals(DataMode.OPTIONAL, access.column(2).getDataMode());
+    assertEquals(DataMode.OPTIONAL, access.column(2).getType().getMode());
+    assertTrue(access.column(2).isNullable());
+
+    // No maps: physical schema is the same as access schema.
+
+    PhysicalSchema physical = schema.physical();
+    assertEquals(3, physical.count());
+    assertEquals("c", physical.column(0).field().getName());
+    assertEquals("a", physical.column(1).field().getName());
+    assertEquals("b", physical.column(2).field().getName());
+  }
+
+  public void crossCheck(TupleSchema schema, int index, String fullName, MinorType type) {
+    String name = null;
+    for (String part : Splitter.on(".").split(fullName)) {
+      name = part;
+    }
+    assertEquals(name, schema.column(index).getName());
+    assertEquals(index, schema.columnIndex(fullName));
+    assertSame(schema.column(index), schema.column(fullName));
+    assertEquals(type, schema.column(index).getType().getMinorType());
+  }
+
+  @Test
+  public void testMapSchema() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("c", MinorType.INT)
+        .addMap("a")
+          .addNullable("b", MinorType.VARCHAR)
+          .add("d", MinorType.INT)
+          .addMap("e")
+            .add("f", MinorType.VARCHAR)
+            .buildMap()
+          .add("g", MinorType.INT)
+          .buildMap()
+        .add("h", MinorType.BIGINT)
+        .build();
+
+    RowSetSchema schema = new RowSetSchema(batchSchema);
+
+    // Access schema: flattened with maps removed
+
+    FlattenedSchema access = schema.flatAccess();
+    assertEquals(6, access.count());
+    crossCheck(access, 0, "c", MinorType.INT);
+    crossCheck(access, 1, "a.b", MinorType.VARCHAR);
+    crossCheck(access, 2, "a.d", MinorType.INT);
+    crossCheck(access, 3, "a.e.f", MinorType.VARCHAR);
+    crossCheck(access, 4, "a.g", MinorType.INT);
+    crossCheck(access, 5, "h", MinorType.BIGINT);
+
+    // Should have two maps.
+
+    assertEquals(2, access.mapCount());
+    assertEquals("a", access.map(0).getName());
+    assertEquals("e", access.map(1).getName());
+    assertEquals(0, access.mapIndex("a"));
+    assertEquals(1, access.mapIndex("a.e"));
+
+    // Verify physical schema: should mirror the schema created above.
+
+    PhysicalSchema physical = schema.physical();
+    assertEquals(3, physical.count());
+    assertEquals("c", physical.column(0).field().getName());
+    assertEquals("c", physical.column(0).fullName());
+    assertFalse(physical.column(0).isMap());
+    assertNull(physical.column(0).mapSchema());
+
+    assertEquals("a", physical.column(1).field().getName());
+    assertEquals("a", physical.column(1).fullName());
+    assertTrue(physical.column(1).isMap());
+    assertNotNull(physical.column(1).mapSchema());
+
+    assertEquals("h", physical.column(2).field().getName());
+    assertEquals("h", physical.column(2).fullName());
+    assertFalse(physical.column(2).isMap());
+    assertNull(physical.column(2).mapSchema());
+
+    PhysicalSchema aSchema = physical.column(1).mapSchema();
+    assertEquals(4, aSchema.count());
+    assertEquals("b", aSchema.column(0).field().getName());
+    assertEquals("a.b", aSchema.column(0).fullName());
+    assertEquals("d", aSchema.column(1).field().getName());
+    assertEquals("e", aSchema.column(2).field().getName());
+    assertEquals("g", aSchema.column(3).field().getName());
+
+    PhysicalSchema eSchema = aSchema.column(2).mapSchema();
+    assertEquals(1, eSchema.count());
+    assertEquals("f", eSchema.column(0).field().getName());
+    assertEquals("a.e.f", eSchema.column(0).fullName());
+  }
+
+  @Test
+  public void testScalarReaderWriter() {
+    testTinyIntRW();
+    testSmallIntRW();
+    testIntRW();
+    testLongRW();
+    testFloatRW();
+    testDoubleRW();
+  }
+
+  private void testTinyIntRW() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("col", MinorType.TINYINT)
+        .build();
+    SingleRowSet rs = fixture.rowSetBuilder(batchSchema)
+        .add(0)
+        .add(Byte.MAX_VALUE)
+        .add(Byte.MIN_VALUE)
+        .build();
+    RowSetReader reader = rs.reader();
+    assertTrue(reader.next());
+    assertEquals(0, reader.column(0).getInt());
+    assertTrue(reader.next());
+    assertEquals(Byte.MAX_VALUE, reader.column(0).getInt());
+    assertTrue(reader.next());
+    assertEquals(Byte.MIN_VALUE, reader.column(0).getInt());
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  private void testSmallIntRW() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("col", MinorType.SMALLINT)
+        .build();
+    SingleRowSet rs = fixture.rowSetBuilder(batchSchema)
+        .add(0)
+        .add(Short.MAX_VALUE)
+        .add(Short.MIN_VALUE)
+        .build();
+    RowSetReader reader = rs.reader();
+    assertTrue(reader.next());
+    assertEquals(0, reader.column(0).getInt());
+    assertTrue(reader.next());
+    assertEquals(Short.MAX_VALUE, reader.column(0).getInt());
+    assertTrue(reader.next());
+    assertEquals(Short.MIN_VALUE, reader.column(0).getInt());
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  private void testIntRW() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("col", MinorType.INT)
+        .build();
+    SingleRowSet rs = fixture.rowSetBuilder(batchSchema)
+        .add(0)
+        .add(Integer.MAX_VALUE)
+        .add(Integer.MIN_VALUE)
+        .build();
+    RowSetReader reader = rs.reader();
+    assertTrue(reader.next());
+    assertEquals(0, reader.column(0).getInt());
+    assertTrue(reader.next());
+    assertEquals(Integer.MAX_VALUE, reader.column(0).getInt());
+    assertTrue(reader.next());
+    assertEquals(Integer.MIN_VALUE, reader.column(0).getInt());
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  private void testLongRW() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("col", MinorType.BIGINT)
+        .build();
+    SingleRowSet rs = fixture.rowSetBuilder(batchSchema)
+        .add(0L)
+        .add(Long.MAX_VALUE)
+        .add(Long.MIN_VALUE)
+        .build();
+    RowSetReader reader = rs.reader();
+    assertTrue(reader.next());
+    assertEquals(0, reader.column(0).getLong());
+    assertTrue(reader.next());
+    assertEquals(Long.MAX_VALUE, reader.column(0).getLong());
+    assertTrue(reader.next());
+    assertEquals(Long.MIN_VALUE, reader.column(0).getLong());
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  private void testFloatRW() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("col", MinorType.FLOAT4)
+        .build();
+    SingleRowSet rs = fixture.rowSetBuilder(batchSchema)
+        .add(0F)
+        .add(Float.MAX_VALUE)
+        .add(Float.MIN_VALUE)
+        .build();
+    RowSetReader reader = rs.reader();
+    assertTrue(reader.next());
+    assertEquals(0, reader.column(0).getDouble(), 0.000001);
+    assertTrue(reader.next());
+    assertEquals(Float.MAX_VALUE, reader.column(0).getDouble(), 0.000001);
+    assertTrue(reader.next());
+    assertEquals(Float.MIN_VALUE, reader.column(0).getDouble(), 0.000001);
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  private void testDoubleRW() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("col", MinorType.FLOAT8)
+        .build();
+    SingleRowSet rs = fixture.rowSetBuilder(batchSchema)
+        .add(0D)
+        .add(Double.MAX_VALUE)
+        .add(Double.MIN_VALUE)
+        .build();
+    RowSetReader reader = rs.reader();
+    assertTrue(reader.next());
+    assertEquals(0, reader.column(0).getDouble(), 0.000001);
+    assertTrue(reader.next());
+    assertEquals(Double.MAX_VALUE, reader.column(0).getDouble(), 0.000001);
+    assertTrue(reader.next());
+    assertEquals(Double.MIN_VALUE, reader.column(0).getDouble(), 0.000001);
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  @Test
+  public void testMap() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .addMap("b")
+          .add("c", MinorType.INT)
+          .add("d", MinorType.INT)
+          .buildMap()
+        .build();
+    SingleRowSet rs = fixture.rowSetBuilder(batchSchema)
+        .add(10, 20, 30)
+        .add(40, 50, 60)
+        .build();
+    RowSetReader reader = rs.reader();
+    assertTrue(reader.next());
+    assertEquals(10, reader.column(0).getInt());
+    assertEquals(20, reader.column(1).getInt());
+    assertEquals(30, reader.column(2).getInt());
+    assertEquals(10, reader.column("a").getInt());
+    assertEquals(30, reader.column("b.d").getInt());
+    assertTrue(reader.next());
+    assertEquals(40, reader.column(0).getInt());
+    assertEquals(50, reader.column(1).getInt());
+    assertEquals(60, reader.column(2).getInt());
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  @Test
+  public void TestTopScalarArray() {
+    BatchSchema batchSchema = new SchemaBuilder()
+        .add("c", MinorType.INT)
+        .addArray("a", MinorType.INT)
+        .build();
+
+    ExtendableRowSet rs1 = fixture.rowSet(batchSchema);
+    RowSetWriter writer = rs1.writer();
+    writer.column(0).setInt(10);
+    ArrayWriter array = writer.column(1).array();
+    array.setInt(100);
+    array.setInt(110);
+    writer.save();
+    writer.column(0).setInt(20);
+    array = writer.column(1).array();
+    array.setInt(200);
+    array.setInt(120);
+    array.setInt(220);
+    writer.save();
+    writer.column(0).setInt(30);
+    writer.save();
+    writer.done();
+
+    RowSetReader reader = rs1.reader();
+    assertTrue(reader.next());
+    assertEquals(10, reader.column(0).getInt());
+    ArrayReader arrayReader = reader.column(1).array();
+    assertEquals(2, arrayReader.size());
+    assertEquals(100, arrayReader.getInt(0));
+    assertEquals(110, arrayReader.getInt(1));
+    assertTrue(reader.next());
+    assertEquals(20, reader.column(0).getInt());
+    arrayReader = reader.column(1).array();
+    assertEquals(3, arrayReader.size());
+    assertEquals(200, arrayReader.getInt(0));
+    assertEquals(120, arrayReader.getInt(1));
+    assertEquals(220, arrayReader.getInt(2));
+    assertTrue(reader.next());
+    assertEquals(30, reader.column(0).getInt());
+    arrayReader = reader.column(1).array();
+    assertEquals(0, arrayReader.size());
+    assertFalse(reader.next());
+
+    SingleRowSet rs2 = fixture.rowSetBuilder(batchSchema)
+      .add(10, new int[] {100, 110})
+      .add(20, new int[] {200, 120, 220})
+      .add(30, null)
+      .build();
+
+    new RowSetComparison(rs1)
+      .verifyAndClear(rs2);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/3e8b01d5/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/package-info.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/package-info.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/package-info.java
new file mode 100644
index 0000000..4d44275
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Tests for the row set test fixture. Yes, very meta.
+ */
+package org.apache.drill.test.rowSet.test;


Mime
View raw message