hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject svn commit: r1669718 [8/29] - in /hive/branches/llap: ./ ant/src/org/apache/hadoop/hive/ant/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/src/java...
Date Sat, 28 Mar 2015 00:22:27 GMT
Modified: hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Sat Mar 28 00:22:15 2015
@@ -66,6 +66,7 @@ import org.apache.hadoop.hive.common.met
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
+import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsRequest;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsResult;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
@@ -5566,6 +5567,12 @@ public class HiveMetaStore extends Thrif
     }
 
     @Override
+    public void add_dynamic_partitions(AddDynamicPartitions rqst)
+        throws NoSuchTxnException, TxnAbortedException, TException {
+      getTxnHandler().addDynamicPartitions(rqst);
+    }
+
+    @Override
     public GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request)
         throws MetaException, TException {
 

Modified: hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Sat Mar 28 00:22:15 2015
@@ -54,6 +54,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.conf.HiveConfUtil;
 import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
+import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsRequest;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsResult;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
@@ -1915,6 +1916,12 @@ public class HiveMetaStoreClient impleme
   }
 
   @Override
+  public void addDynamicPartitions(long txnId, String dbName, String tableName,
+                                   List<String> partNames) throws TException {
+    client.add_dynamic_partitions(new AddDynamicPartitions(txnId, dbName, tableName, partNames));
+  }
+
+  @Override
   public NotificationEventResponse getNextNotification(long lastEventId, int maxEvents,
                                                        NotificationFilter filter) throws TException {
     NotificationEventRequest rqst = new NotificationEventRequest(lastEventId);

Modified: hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Sat Mar 28 00:22:15 2015
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.metastore
 
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions;
 import org.apache.hadoop.hive.metastore.api.CompactionType;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
@@ -1351,6 +1352,18 @@ public interface IMetaStoreClient {
   ShowCompactResponse showCompactions() throws TException;
 
   /**
+   * Send a list of partitions to the metastore to indicate which partitions were loaded
+   * dynamically.
+   * @param txnId id of the transaction
+   * @param dbName database name
+   * @param tableName table name
+   * @param partNames partition name, as constructed by Warehouse.makePartName
+   * @throws TException
+   */
+  void addDynamicPartitions(long txnId, String dbName, String tableName, List<String> partNames)
+    throws TException;
+
+  /**
    * A filter provided by the client that determines if a given notification event should be
    * returned.
    */

Modified: hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java (original)
+++ hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java Sat Mar 28 00:22:15 2015
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.metastore.
 /**
  * Information on a possible or running compaction.
  */
-public class CompactionInfo {
+public class CompactionInfo implements Comparable<CompactionInfo> {
   public long id;
   public String dbname;
   public String tableName;
@@ -68,4 +68,9 @@ public class CompactionInfo {
   public boolean isMajorCompaction() {
     return CompactionType.MAJOR == type;
   }
+
+  @Override
+  public int compareTo(CompactionInfo o) {
+    return getFullPartitionName().compareTo(o.getFullPartitionName());
+  }
 }

Modified: hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java (original)
+++ hive/branches/llap/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java Sat Mar 28 00:22:15 2015
@@ -783,6 +783,48 @@ public class TxnHandler {
     }
   }
 
+  public void addDynamicPartitions(AddDynamicPartitions rqst)
+      throws NoSuchTxnException,  TxnAbortedException, MetaException {
+    Connection dbConn = null;
+    Statement stmt = null;
+    try {
+      try {
+        dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
+        stmt = dbConn.createStatement();
+        // Heartbeat this first to make sure the transaction is still valid.
+        heartbeatTxn(dbConn, rqst.getTxnid());
+        for (String partName : rqst.getPartitionnames()) {
+          StringBuilder buff = new StringBuilder();
+          buff.append("insert into TXN_COMPONENTS (tc_txnid, tc_database, tc_table, tc_partition) values (");
+          buff.append(rqst.getTxnid());
+          buff.append(", '");
+          buff.append(rqst.getDbname());
+          buff.append("', '");
+          buff.append(rqst.getTablename());
+          buff.append("', '");
+          buff.append(partName);
+          buff.append("')");
+          String s = buff.toString();
+          LOG.debug("Going to execute update <" + s + ">");
+          stmt.executeUpdate(s);
+        }
+        LOG.debug("Going to commit");
+        dbConn.commit();
+      } catch (SQLException e) {
+        LOG.debug("Going to rollback");
+        rollbackDBConn(dbConn);
+        checkRetryable(dbConn, e, "addDynamicPartitions");
+        throw new MetaException("Unable to insert into from transaction database " +
+          StringUtils.stringifyException(e));
+      } finally {
+        closeStmt(stmt);
+        closeDbConn(dbConn);
+      }
+    } catch (RetryException e) {
+      addDynamicPartitions(rqst);
+    }
+  }
+
   /**
    * For testing only, do not use.
    */

Modified: hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java (original)
+++ hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java Sat Mar 28 00:22:15 2015
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hive.metastore.txn;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.log4j.Level;
@@ -26,8 +28,11 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
 
 import static junit.framework.Assert.*;
 
@@ -38,6 +43,7 @@ public class TestCompactionTxnHandler {
 
   private HiveConf conf = new HiveConf();
   private CompactionTxnHandler txnHandler;
+  static final private Log LOG = LogFactory.getLog(TestCompactionTxnHandler.class);
 
   public TestCompactionTxnHandler() throws Exception {
     TxnDbUtil.setConfValues(conf);
@@ -417,6 +423,40 @@ public class TestCompactionTxnHandler {
     assertEquals(3, txnList.getOpen_txnsSize());
   }
 
+  @Test
+  public void addDynamicPartitions() throws Exception {
+    String dbName = "default";
+    String tableName = "adp_table";
+    OpenTxnsResponse openTxns = txnHandler.openTxns(new OpenTxnRequest(1, "me", "localhost"));
+    long txnId = openTxns.getTxn_ids().get(0);
+    // lock a table, as in dynamic partitions
+    LockComponent lc = new LockComponent(LockType.SHARED_WRITE, LockLevel.TABLE, dbName);
+    lc.setTablename(tableName);
+    LockRequest lr = new LockRequest(Arrays.asList(lc), "me", "localhost");
+    lr.setTxnid(txnId);
+    LockResponse lock = txnHandler.lock(new LockRequest(Arrays.asList(lc), "me", "localhost"));
+    assertEquals(LockState.ACQUIRED, lock.getState());
+
+    txnHandler.addDynamicPartitions(new AddDynamicPartitions(txnId, dbName, tableName,
+        Arrays.asList("ds=yesterday", "ds=today")));
+    txnHandler.commitTxn(new CommitTxnRequest(txnId));
+
+    Set<CompactionInfo> potentials = txnHandler.findPotentialCompactions(1000);
+    assertEquals(2, potentials.size());
+    SortedSet<CompactionInfo> sorted = new TreeSet<CompactionInfo>(potentials);
+
+    int i = 0;
+    for (CompactionInfo ci : sorted) {
+      assertEquals(dbName, ci.dbname);
+      assertEquals(tableName, ci.tableName);
+      switch (i++) {
+      case 0: assertEquals("ds=today", ci.partName); break;
+      case 1: assertEquals("ds=yesterday", ci.partName); break;
+      default: throw new RuntimeException("What?");
+      }
+    }
+  }
+
   @Before
   public void setUp() throws Exception {
     TxnDbUtil.prepDb();

Modified: hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java (original)
+++ hive/branches/llap/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java Sat Mar 28 00:22:15 2015
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.metastore
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreThread;
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;

Modified: hive/branches/llap/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/pom.xml?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/pom.xml (original)
+++ hive/branches/llap/pom.xml Sat Mar 28 00:22:15 2015
@@ -102,7 +102,7 @@
     <antlr.version>3.4</antlr.version>
     <avro.version>1.7.5</avro.version>
     <bonecp.version>0.8.0.RELEASE</bonecp.version>
-    <calcite.version>1.0.0-incubating</calcite.version>
+    <calcite.version>1.1.0-incubating</calcite.version>
     <datanucleus-api-jdo.version>3.2.6</datanucleus-api-jdo.version>
     <datanucleus-core.version>3.2.10</datanucleus-core.version>
     <datanucleus-rdbms.version>3.2.9</datanucleus-rdbms.version>
@@ -174,49 +174,7 @@
 
   <repositories>
    <!-- This needs to be removed before checking in-->
-    <repository>
-      <id>datanucleus</id>
-      <name>datanucleus maven repository</name>
-      <url>http://www.datanucleus.org/downloads/maven2</url>
-      <layout>default</layout>
-      <releases>
-        <enabled>true</enabled>
-        <checksumPolicy>warn</checksumPolicy>
-      </releases>
-      <snapshots>
-        <enabled>false</enabled>
-      </snapshots>
-    </repository>
-    <repository>
-      <id>glassfish-repository</id>
-      <url>http://maven.glassfish.org/content/groups/glassfish</url>
-      <releases>
-        <enabled>false</enabled>
-      </releases>
-      <snapshots>
-        <enabled>false</enabled>
-      </snapshots>
-    </repository>
-    <repository>
-      <id>glassfish-repo-archive</id>
-      <url>http://maven.glassfish.org/content/groups/glassfish</url>
-      <releases>
-        <enabled>false</enabled>
-      </releases>
-      <snapshots>
-        <enabled>false</enabled>
-      </snapshots>
-     </repository>
-     <repository>
-       <id>sonatype-snapshot</id>
-       <url>https://oss.sonatype.org/content/repositories/snapshots</url>
-       <releases>
-         <enabled>false</enabled>
-       </releases>
-       <snapshots>
-         <enabled>false</enabled>
-       </snapshots>
-    </repository>
+
   </repositories>
 
   <!-- Hadoop dependency management is done at the bottom under profiles -->

Modified: hive/branches/llap/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java (original)
+++ hive/branches/llap/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java Sat Mar 28 00:22:15 2015
@@ -10603,6 +10603,21 @@ public final class OrcProto {
      */
     org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncodingOrBuilder getColumnsOrBuilder(
         int index);
+
+    // optional string writerTimezone = 3;
+    /**
+     * <code>optional string writerTimezone = 3;</code>
+     */
+    boolean hasWriterTimezone();
+    /**
+     * <code>optional string writerTimezone = 3;</code>
+     */
+    java.lang.String getWriterTimezone();
+    /**
+     * <code>optional string writerTimezone = 3;</code>
+     */
+    com.google.protobuf.ByteString
+        getWriterTimezoneBytes();
   }
   /**
    * Protobuf type {@code orc.proto.StripeFooter}
@@ -10671,6 +10686,11 @@ public final class OrcProto {
               columns_.add(input.readMessage(org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding.PARSER, extensionRegistry));
               break;
             }
+            case 26: {
+              bitField0_ |= 0x00000001;
+              writerTimezone_ = input.readBytes();
+              break;
+            }
           }
         }
       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -10716,6 +10736,7 @@ public final class OrcProto {
       return PARSER;
     }
 
+    private int bitField0_;
     // repeated .orc.proto.Stream streams = 1;
     public static final int STREAMS_FIELD_NUMBER = 1;
     private java.util.List<org.apache.hadoop.hive.ql.io.orc.OrcProto.Stream> streams_;
@@ -10788,9 +10809,53 @@ public final class OrcProto {
       return columns_.get(index);
     }
 
+    // optional string writerTimezone = 3;
+    public static final int WRITERTIMEZONE_FIELD_NUMBER = 3;
+    private java.lang.Object writerTimezone_;
+    /**
+     * <code>optional string writerTimezone = 3;</code>
+     */
+    public boolean hasWriterTimezone() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional string writerTimezone = 3;</code>
+     */
+    public java.lang.String getWriterTimezone() {
+      java.lang.Object ref = writerTimezone_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          writerTimezone_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string writerTimezone = 3;</code>
+     */
+    public com.google.protobuf.ByteString
+        getWriterTimezoneBytes() {
+      java.lang.Object ref = writerTimezone_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        writerTimezone_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
     private void initFields() {
       streams_ = java.util.Collections.emptyList();
       columns_ = java.util.Collections.emptyList();
+      writerTimezone_ = "";
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -10810,6 +10875,9 @@ public final class OrcProto {
       for (int i = 0; i < columns_.size(); i++) {
         output.writeMessage(2, columns_.get(i));
       }
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(3, getWriterTimezoneBytes());
+      }
       getUnknownFields().writeTo(output);
     }
 
@@ -10827,6 +10895,10 @@ public final class OrcProto {
         size += com.google.protobuf.CodedOutputStream
           .computeMessageSize(2, columns_.get(i));
       }
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, getWriterTimezoneBytes());
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -10957,6 +11029,8 @@ public final class OrcProto {
         } else {
           columnsBuilder_.clear();
         }
+        writerTimezone_ = "";
+        bitField0_ = (bitField0_ & ~0x00000004);
         return this;
       }
 
@@ -10984,6 +11058,7 @@ public final class OrcProto {
       public org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter buildPartial() {
         org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter(this);
         int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
         if (streamsBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001)) {
             streams_ = java.util.Collections.unmodifiableList(streams_);
@@ -11002,6 +11077,11 @@ public final class OrcProto {
         } else {
           result.columns_ = columnsBuilder_.build();
         }
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.writerTimezone_ = writerTimezone_;
+        result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
       }
@@ -11069,6 +11149,11 @@ public final class OrcProto {
             }
           }
         }
+        if (other.hasWriterTimezone()) {
+          bitField0_ |= 0x00000004;
+          writerTimezone_ = other.writerTimezone_;
+          onChanged();
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -11576,6 +11661,80 @@ public final class OrcProto {
         return columnsBuilder_;
       }
 
+      // optional string writerTimezone = 3;
+      private java.lang.Object writerTimezone_ = "";
+      /**
+       * <code>optional string writerTimezone = 3;</code>
+       */
+      public boolean hasWriterTimezone() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>optional string writerTimezone = 3;</code>
+       */
+      public java.lang.String getWriterTimezone() {
+        java.lang.Object ref = writerTimezone_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          writerTimezone_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string writerTimezone = 3;</code>
+       */
+      public com.google.protobuf.ByteString
+          getWriterTimezoneBytes() {
+        java.lang.Object ref = writerTimezone_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          writerTimezone_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string writerTimezone = 3;</code>
+       */
+      public Builder setWriterTimezone(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        writerTimezone_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string writerTimezone = 3;</code>
+       */
+      public Builder clearWriterTimezone() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        writerTimezone_ = getDefaultInstance().getWriterTimezone();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string writerTimezone = 3;</code>
+       */
+      public Builder setWriterTimezoneBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        writerTimezone_ = value;
+        onChanged();
+        return this;
+      }
+
       // @@protoc_insertion_point(builder_scope:orc.proto.StripeFooter)
     }
 
@@ -18921,40 +19080,41 @@ public final class OrcProto {
       "ng\022,\n\004kind\030\001 \001(\0162\036.orc.proto.ColumnEncod" +
       "ing.Kind\022\026\n\016dictionarySize\030\002 \001(\r\"D\n\004Kind" +
       "\022\n\n\006DIRECT\020\000\022\016\n\nDICTIONARY\020\001\022\r\n\tDIRECT_V",
-      "2\020\002\022\021\n\rDICTIONARY_V2\020\003\"^\n\014StripeFooter\022\"" +
+      "2\020\002\022\021\n\rDICTIONARY_V2\020\003\"v\n\014StripeFooter\022\"" +
       "\n\007streams\030\001 \003(\0132\021.orc.proto.Stream\022*\n\007co" +
-      "lumns\030\002 \003(\0132\031.orc.proto.ColumnEncoding\"\341" +
-      "\002\n\004Type\022\"\n\004kind\030\001 \001(\0162\024.orc.proto.Type.K" +
-      "ind\022\024\n\010subtypes\030\002 \003(\rB\002\020\001\022\022\n\nfieldNames\030" +
-      "\003 \003(\t\022\025\n\rmaximumLength\030\004 \001(\r\022\021\n\tprecisio" +
-      "n\030\005 \001(\r\022\r\n\005scale\030\006 \001(\r\"\321\001\n\004Kind\022\013\n\007BOOLE" +
-      "AN\020\000\022\010\n\004BYTE\020\001\022\t\n\005SHORT\020\002\022\007\n\003INT\020\003\022\010\n\004LO" +
-      "NG\020\004\022\t\n\005FLOAT\020\005\022\n\n\006DOUBLE\020\006\022\n\n\006STRING\020\007\022" +
-      "\n\n\006BINARY\020\010\022\r\n\tTIMESTAMP\020\t\022\010\n\004LIST\020\n\022\007\n\003",
-      "MAP\020\013\022\n\n\006STRUCT\020\014\022\t\n\005UNION\020\r\022\013\n\007DECIMAL\020" +
-      "\016\022\010\n\004DATE\020\017\022\013\n\007VARCHAR\020\020\022\010\n\004CHAR\020\021\"x\n\021St" +
-      "ripeInformation\022\016\n\006offset\030\001 \001(\004\022\023\n\013index" +
-      "Length\030\002 \001(\004\022\022\n\ndataLength\030\003 \001(\004\022\024\n\014foot" +
-      "erLength\030\004 \001(\004\022\024\n\014numberOfRows\030\005 \001(\004\"/\n\020" +
-      "UserMetadataItem\022\014\n\004name\030\001 \001(\t\022\r\n\005value\030" +
-      "\002 \001(\014\"A\n\020StripeStatistics\022-\n\010colStats\030\001 " +
-      "\003(\0132\033.orc.proto.ColumnStatistics\"<\n\010Meta" +
-      "data\0220\n\013stripeStats\030\001 \003(\0132\033.orc.proto.St" +
-      "ripeStatistics\"\222\002\n\006Footer\022\024\n\014headerLengt",
-      "h\030\001 \001(\004\022\025\n\rcontentLength\030\002 \001(\004\022-\n\007stripe" +
-      "s\030\003 \003(\0132\034.orc.proto.StripeInformation\022\036\n" +
-      "\005types\030\004 \003(\0132\017.orc.proto.Type\022-\n\010metadat" +
-      "a\030\005 \003(\0132\033.orc.proto.UserMetadataItem\022\024\n\014" +
-      "numberOfRows\030\006 \001(\004\022/\n\nstatistics\030\007 \003(\0132\033" +
-      ".orc.proto.ColumnStatistics\022\026\n\016rowIndexS" +
-      "tride\030\010 \001(\r\"\305\001\n\nPostScript\022\024\n\014footerLeng" +
-      "th\030\001 \001(\004\022/\n\013compression\030\002 \001(\0162\032.orc.prot" +
-      "o.CompressionKind\022\034\n\024compressionBlockSiz" +
-      "e\030\003 \001(\004\022\023\n\007version\030\004 \003(\rB\002\020\001\022\026\n\016metadata",
-      "Length\030\005 \001(\004\022\025\n\rwriterVersion\030\006 \001(\r\022\016\n\005m" +
-      "agic\030\300> \001(\t*:\n\017CompressionKind\022\010\n\004NONE\020\000" +
-      "\022\010\n\004ZLIB\020\001\022\n\n\006SNAPPY\020\002\022\007\n\003LZO\020\003B\"\n org.a" +
-      "pache.hadoop.hive.ql.io.orc"
+      "lumns\030\002 \003(\0132\031.orc.proto.ColumnEncoding\022\026" +
+      "\n\016writerTimezone\030\003 \001(\t\"\341\002\n\004Type\022\"\n\004kind\030" +
+      "\001 \001(\0162\024.orc.proto.Type.Kind\022\024\n\010subtypes\030" +
+      "\002 \003(\rB\002\020\001\022\022\n\nfieldNames\030\003 \003(\t\022\025\n\rmaximum" +
+      "Length\030\004 \001(\r\022\021\n\tprecision\030\005 \001(\r\022\r\n\005scale" +
+      "\030\006 \001(\r\"\321\001\n\004Kind\022\013\n\007BOOLEAN\020\000\022\010\n\004BYTE\020\001\022\t" +
+      "\n\005SHORT\020\002\022\007\n\003INT\020\003\022\010\n\004LONG\020\004\022\t\n\005FLOAT\020\005\022" +
+      "\n\n\006DOUBLE\020\006\022\n\n\006STRING\020\007\022\n\n\006BINARY\020\010\022\r\n\tT",
+      "IMESTAMP\020\t\022\010\n\004LIST\020\n\022\007\n\003MAP\020\013\022\n\n\006STRUCT\020" +
+      "\014\022\t\n\005UNION\020\r\022\013\n\007DECIMAL\020\016\022\010\n\004DATE\020\017\022\013\n\007V" +
+      "ARCHAR\020\020\022\010\n\004CHAR\020\021\"x\n\021StripeInformation\022" +
+      "\016\n\006offset\030\001 \001(\004\022\023\n\013indexLength\030\002 \001(\004\022\022\n\n" +
+      "dataLength\030\003 \001(\004\022\024\n\014footerLength\030\004 \001(\004\022\024" +
+      "\n\014numberOfRows\030\005 \001(\004\"/\n\020UserMetadataItem" +
+      "\022\014\n\004name\030\001 \001(\t\022\r\n\005value\030\002 \001(\014\"A\n\020StripeS" +
+      "tatistics\022-\n\010colStats\030\001 \003(\0132\033.orc.proto." +
+      "ColumnStatistics\"<\n\010Metadata\0220\n\013stripeSt" +
+      "ats\030\001 \003(\0132\033.orc.proto.StripeStatistics\"\222",
+      "\002\n\006Footer\022\024\n\014headerLength\030\001 \001(\004\022\025\n\rconte" +
+      "ntLength\030\002 \001(\004\022-\n\007stripes\030\003 \003(\0132\034.orc.pr" +
+      "oto.StripeInformation\022\036\n\005types\030\004 \003(\0132\017.o" +
+      "rc.proto.Type\022-\n\010metadata\030\005 \003(\0132\033.orc.pr" +
+      "oto.UserMetadataItem\022\024\n\014numberOfRows\030\006 \001" +
+      "(\004\022/\n\nstatistics\030\007 \003(\0132\033.orc.proto.Colum" +
+      "nStatistics\022\026\n\016rowIndexStride\030\010 \001(\r\"\305\001\n\n" +
+      "PostScript\022\024\n\014footerLength\030\001 \001(\004\022/\n\013comp" +
+      "ression\030\002 \001(\0162\032.orc.proto.CompressionKin" +
+      "d\022\034\n\024compressionBlockSize\030\003 \001(\004\022\023\n\007versi",
+      "on\030\004 \003(\rB\002\020\001\022\026\n\016metadataLength\030\005 \001(\004\022\025\n\r" +
+      "writerVersion\030\006 \001(\r\022\016\n\005magic\030\300> \001(\t*:\n\017C" +
+      "ompressionKind\022\010\n\004NONE\020\000\022\010\n\004ZLIB\020\001\022\n\n\006SN" +
+      "APPY\020\002\022\007\n\003LZO\020\003B\"\n org.apache.hadoop.hiv" +
+      "e.ql.io.orc"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -19056,7 +19216,7 @@ public final class OrcProto {
           internal_static_orc_proto_StripeFooter_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_orc_proto_StripeFooter_descriptor,
-              new java.lang.String[] { "Streams", "Columns", });
+              new java.lang.String[] { "Streams", "Columns", "WriterTimezone", });
           internal_static_orc_proto_Type_descriptor =
             getDescriptor().getMessageTypes().get(16);
           internal_static_orc_proto_Type_fieldAccessorTable = new

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnWithConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnWithConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnWithConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnWithConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,173 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ColumnArithmeticColumnWithConvert.txt, which covers binary arithmetic 
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+  
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    <VectorOperandType1>[] vector1 = inputColVector1.vector;
+    <VectorOperandType2>[] vector2 = inputColVector2.vector;
+    <VectorReturnType>[] outputVector = outputColVector.vector;
+    
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+    
+    outputColVector.isRepeating = 
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+    
+    // Handle nulls first  
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+          
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or 
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */ 
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) { 
+      outputVector[0] = <TypeConversion1>(vector1[0]) <OperatorSymbol> <TypeConversion2>(vector2[0]);
+    } else if (inputColVector1.isRepeating) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <TypeConversion1>(vector1[0]) <OperatorSymbol> <TypeConversion2>(vector2[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <TypeConversion1>(vector1[0]) <OperatorSymbol> <TypeConversion2>(vector2[i]);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <TypeConversion1>(vector1[i]) <OperatorSymbol> <TypeConversion2>(vector2[0]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <TypeConversion1>(vector1[i]) <OperatorSymbol> <TypeConversion2>(vector2[0]);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <TypeConversion1>(vector1[i]) <OperatorSymbol> <TypeConversion2>(vector2[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <TypeConversion1>(vector1[i]) <OperatorSymbol>  <TypeConversion2>(vector2[i]);
+        }
+      }
+    }
+    
+    /* For the case when the output can have null values, follow 
+     * the convention that the data values must be 1 for long and 
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<VectorReturnType>";
+  }
+  
+  public int getColNum1() {
+    return colNum1;
+  }
+
+  public void setColNum1(int colNum1) {
+    this.colNum1 = colNum1;
+  }
+
+  public int getColNum2() {
+    return colNum2;
+  }
+
+  public void setColNum2(int colNum2) {
+    this.colNum2 = colNum2;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+  
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarWithConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarWithConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarWithConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarWithConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.<InputColumnVectorType>;
+import org.apache.hadoop.hive.ql.exec.vector.<OutputColumnVectorType>;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+/**
+ * Generated from template ColumnArithmeticScalarWithConvert.txt, which covers binary arithmetic 
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <VectorOperandType2> value;
+  private int outputColumn;
+
+  public <ClassName>(int colNum, <VectorOperandType2> value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = <TypeConversion2>(value);
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    outputColVector.isRepeating = inputColVector.isRepeating;
+    int n = batch.size;
+    <VectorOperandType1>[] vector = inputColVector.vector;
+    <VectorReturnType>[] outputVector = outputColVector.vector;
+    
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      outputVector[0] = <TypeConversion1>(vector[0]) <OperatorSymbol> value;
+      
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0]; 
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <TypeConversion1>(vector[i]) <OperatorSymbol> value;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <TypeConversion1>(vector[i]) <OperatorSymbol> value;
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <TypeConversion1>(vector[i]) <OperatorSymbol> value;
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <TypeConversion1>(vector[i]) <OperatorSymbol> value;
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+    
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+  
+  @Override
+  public String getOutputType() {
+    return "<VectorReturnType>";
+  }
+  
+  public int getColNum() {
+    return colNum;
+  }
+  
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public <VectorOperandType2> getValue() {
+    return value;
+  }
+
+  public void setValue(<VectorOperandType2> value) {
+    this.value = value;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

Modified: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt?rev=1669718&r1=1669717&r2=1669718&view=diff
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt (original)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt Sat Mar 28 00:22:15 2015
@@ -130,7 +130,7 @@ public class <ClassName> extends VectorE
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
   }

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.udf.UDFToString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
+import org.apache.hadoop.io.LongWritable;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template DTIColumnArithmeticDTIColumnNoConvert.txt, which covers arithmetic 
+ * expressions between a datetime/interval column and a datetime/interval column.
+ * No type conversion is needed, the operations can be performed using the vectorized long value
+ */
+public class <ClassName> extends LongCol<OperatorName>LongColumn {
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    super(colNum1, colNum2, outputColumn);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+
+

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+
+/**
+ * Generated from template DTIColumnArithmeticDTIScalarNoConvert.txt, which covers arithmetic
+ * expressions between a datetime/interval column and a datetime/interval scalar.
+ * No type conversion is needed, the operations can be performed using the vectorized long value
+ */
+public class <ClassName> extends LongCol<OperatorName>LongScalar {
+
+  public <ClassName>(int colNum, long value, int outputColumn) {
+    super(colNum, value, outputColumn);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}
+

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+
+/**
+ * Generated from template DTIColumnCompareScalar.txt, which covers comparison 
+ * expressions between a datetime/interval column and a scalar of the same type. The boolean output
+ * is stored in a separate boolean column.
+ */
+public class <ClassName> extends <BaseClassName> {
+
+  public <ClassName>(int colNum, long value, int outputColumn) {
+    super(colNum, value, outputColumn);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.udf.UDFToString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
+import org.apache.hadoop.io.LongWritable;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template DTIScalarArithmeticDTIColumnNoConvert.txt, which covers arithmetic 
+ * expressions between a datetime/interval scalar and a datetime/interval column.
+ * No type conversion is needed, the operations can be performed using the vectorized long value
+ */
+public class <ClassName> extends LongScalar<OperatorName>LongColumn {
+
+  public <ClassName>(long value, int colNum, int outputColumn) {
+    super(value, colNum, outputColumn);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.udf.UDFToString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
+import org.apache.hadoop.io.LongWritable;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template DTIScalarCompareColumn.txt, which covers comparison 
+ * expressions between a datetime/interval scalar and a column of the same type. The boolean output
+ * is stored in a separate boolean column.
+ */
+public class <ClassName> extends <BaseClassName> {
+
+  public <ClassName>(long value, int colNum, int outputColumn) {
+    super(value, colNum, outputColumn);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template DateTimeColumnArithmeticIntervalColumnWithConvert.txt, which covers binary arithmetic 
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+  
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    <VectorOperandType1>[] vector1 = inputColVector1.vector;
+    <VectorOperandType2>[] vector2 = inputColVector2.vector;
+    <VectorReturnType>[] outputVector = outputColVector.vector;
+    
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+    
+    outputColVector.isRepeating = 
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+    
+    // Handle nulls first  
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+          
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or 
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */ 
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) { 
+      outputVector[0] = <OperatorFunction>(<TypeConversionToMillis>(vector1[0]), <OperatorSymbol> (int) vector2[0]);
+    } else if (inputColVector1.isRepeating) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[0]), <OperatorSymbol> (int) vector2[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[0]), <OperatorSymbol> (int) vector2[i]);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[0]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[0]);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[i]);
+        }
+      }
+    }
+    
+    /* For the case when the output can have null values, follow 
+     * the convention that the data values must be 1 for long and 
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<VectorReturnType>";
+  }
+  
+  public int getColNum1() {
+    return colNum1;
+  }
+
+  public void setColNum1(int colNum1) {
+    this.colNum1 = colNum1;
+  }
+
+  public int getColNum2() {
+    return colNum2;
+  }
+
+  public void setColNum2(int colNum2) {
+    this.colNum2 = colNum2;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+  
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.<InputColumnVectorType>;
+import org.apache.hadoop.hive.ql.exec.vector.<OutputColumnVectorType>;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template ColumnArithmeticScalarWithConvert.txt, which covers binary arithmetic 
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <VectorOperandType2> value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, <VectorOperandType2> value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    outputColVector.isRepeating = inputColVector.isRepeating;
+    int n = batch.size;
+    <VectorOperandType1>[] vector = inputColVector.vector;
+    <VectorReturnType>[] outputVector = outputColVector.vector;
+    
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      outputVector[0] = <OperatorFunction>(<TypeConversionToMillis>(vector[0]), <OperatorSymbol> (int) value);
+      
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0]; 
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+    
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+  
+  @Override
+  public String getOutputType() {
+    return "<VectorReturnType>";
+  }
+  
+  public int getColNum() {
+    return colNum;
+  }
+  
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public <VectorOperandType2> getValue() {
+    return value;
+  }
+
+  public void setValue(<VectorOperandType2> value) {
+    this.value = value;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template DateTimeScalarArithmeticIntervalColumnWithConvert.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <VectorOperandType1> value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(<VectorOperandType1> value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = <TypeConversionToMillis>(value);
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    outputColVector.isRepeating = inputColVector.isRepeating;
+    int n = batch.size;
+    <VectorOperandType2>[] vector = inputColVector.vector;
+    <VectorReturnType>[] outputVector = outputColVector.vector;
+    
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      outputVector[0] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[0]);
+      
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0]; 
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
+        }
+      }
+    } else {                         /* there are nulls */ 
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+    
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+  
+  @Override
+  public String getOutputType() {
+    return "<VectorReturnType>";
+  }
+  
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public <VectorOperandType1> getValue() {
+    return value;
+  }
+
+  public void setValue(<VectorOperandType1> value) {
+    this.value = value;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterDTIColumnCompareScalar.txt, which covers comparison 
+ * expressions between a datetime/interval column and a scalar of the same type, however output is not
+ * produced in a separate column. 
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends <BaseClassName> {
+
+  public <ClassName>(int colNum, long value) { 
+    super(colNum, value);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

Added: hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIScalarCompareColumn.txt
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIScalarCompareColumn.txt?rev=1669718&view=auto
==============================================================================
--- hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIScalarCompareColumn.txt (added)
+++ hive/branches/llap/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIScalarCompareColumn.txt Sat Mar 28 00:22:15 2015
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterDTIScalarCompareColumn.txt, which covers comparison 
+ * expressions between a datetime/interval scalar and a column of the same type,
+ * however output is not produced in a separate column. 
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends <BaseClassName> {
+
+  public <ClassName>(long value, int colNum) { 
+    super(value, colNum);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<VectorExprArgType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}



Mime
View raw message