hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1526766 [2/2] - in /hive/branches/vectorization: ./ beeline/src/java/org/apache/hive/beeline/ bin/ common/src/java/org/apache/hadoop/hive/conf/ conf/ data/files/exported_table/ data/files/exported_table/data/ eclipse-templates/ hcatalog/co...
Date Fri, 27 Sep 2013 01:26:37 GMT
Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
(original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
Fri Sep 27 01:26:36 2013
@@ -31,7 +31,6 @@ import org.apache.hadoop.hive.common.Jav
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
@@ -53,10 +52,10 @@ public class CreateTableDesc extends DDL
   String databaseName;
   String tableName;
   boolean isExternal;
-  ArrayList<FieldSchema> cols;
-  ArrayList<FieldSchema> partCols;
-  ArrayList<String> bucketCols;
-  ArrayList<Order> sortCols;
+  List<FieldSchema> cols;
+  List<FieldSchema> partCols;
+  List<String> bucketCols;
+  List<Order> sortCols;
   int numBuckets;
   String fieldDelim;
   String fieldEscape;
@@ -130,8 +129,12 @@ public class CreateTableDesc extends DDL
     this.serdeProps = serdeProps;
     this.tblProps = tblProps;
     this.ifNotExists = ifNotExists;
-    this.skewedColNames = new ArrayList<String>(skewedColNames);
-    this.skewedColValues = new ArrayList<List<String>>(skewedColValues);
+    this.skewedColNames = copyList(skewedColNames);
+    this.skewedColValues = copyList(skewedColValues);
+  }
+
+  private static <T> List<T> copyList(List<T> copy) {
+    return copy == null ? null : new ArrayList<T>(copy);
   }
 
   @Explain(displayName = "columns")
@@ -166,7 +169,7 @@ public class CreateTableDesc extends DDL
     this.tableName = tableName;
   }
 
-  public ArrayList<FieldSchema> getCols() {
+  public List<FieldSchema> getCols() {
     return cols;
   }
 
@@ -174,7 +177,7 @@ public class CreateTableDesc extends DDL
     this.cols = cols;
   }
 
-  public ArrayList<FieldSchema> getPartCols() {
+  public List<FieldSchema> getPartCols() {
     return partCols;
   }
 
@@ -183,7 +186,7 @@ public class CreateTableDesc extends DDL
   }
 
   @Explain(displayName = "bucket columns")
-  public ArrayList<String> getBucketCols() {
+  public List<String> getBucketCols() {
     return bucketCols;
   }
 
@@ -303,7 +306,7 @@ public class CreateTableDesc extends DDL
    * @return the sortCols
    */
   @Explain(displayName = "sort columns")
-  public ArrayList<Order> getSortCols() {
+  public List<Order> getSortCols() {
     return sortCols;
   }
 

Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
(original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
Fri Sep 27 01:26:36 2013
@@ -203,7 +203,7 @@ public class SessionState {
   }
 
   private static final SimpleDateFormat DATE_FORMAT =
-    new SimpleDateFormat("yyyyMMddHHmm");
+      new SimpleDateFormat("yyyyMMddHHmm");
 
   public void setCmd(String cmdString) {
     conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, cmdString);
@@ -448,7 +448,7 @@ public class SessionState {
     } catch (IOException e) {
       console.printError("Unable to validate " + newFile + "\nException: "
           + e.getMessage(), "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+              + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return null;
     }
   }
@@ -465,7 +465,7 @@ public class SessionState {
     } catch (Exception e) {
       console.printError("Unable to register " + newJar + "\nException: "
           + e.getMessage(), "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+              + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return false;
     }
   }
@@ -479,7 +479,7 @@ public class SessionState {
     } catch (Exception e) {
       console.printError("Unable to unregister " + jarsToUnregister
           + "\nException: " + e.getMessage(), "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+              + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return false;
     }
   }
@@ -565,7 +565,7 @@ public class SessionState {
   }
 
   private final HashMap<ResourceType, Set<String>> resource_map =
-    new HashMap<ResourceType, Set<String>>();
+      new HashMap<ResourceType, Set<String>>();
 
   public String add_resource(ResourceType t, String value) {
     // By default don't convert to unix
@@ -783,7 +783,7 @@ public class SessionState {
 
   public void close() throws IOException {
     File resourceDir =
-      new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
+        new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
     LOG.debug("Removing resource dir " + resourceDir);
     try {
       if (resourceDir.exists()) {

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri Sep
27 01:26:36 2013
@@ -29,9 +29,11 @@ import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.FileWriter;
+import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.PrintStream;
 import java.io.Serializable;
+import java.io.StringWriter;
 import java.io.UnsupportedEncodingException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -46,6 +48,7 @@ import java.util.TreeMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -94,6 +97,7 @@ import org.apache.zookeeper.ZooKeeper;
  */
 public class QTestUtil {
 
+  public static final String UTF_8 = "UTF-8";
   private static final Log LOG = LogFactory.getLog("QTestUtil");
 
   private String testWarehouse;
@@ -332,27 +336,26 @@ public class QTestUtil {
     }
   }
 
-  public void addFile(String qFile) throws Exception {
-
-    File qf = new File(qFile);
-    addFile(qf);
+  public String readEntireFileIntoString(File queryFile) throws IOException {
+    InputStreamReader isr = new InputStreamReader(
+        new BufferedInputStream(new FileInputStream(queryFile)), QTestUtil.UTF_8);
+    StringWriter sw = new StringWriter();
+    try {
+      IOUtils.copy(isr, sw);
+    } finally {
+      if (isr != null) {
+        isr.close();
+      }
+    }
+    return sw.toString();
   }
 
-  public void addFile(File qf) throws Exception {
-
-    FileInputStream fis = new FileInputStream(qf);
-    BufferedInputStream bis = new BufferedInputStream(fis);
-    BufferedReader br = new BufferedReader(new InputStreamReader(bis, "UTF8"));
-    StringBuilder qsb = new StringBuilder();
-
-    // Read the entire query
-    String line;
-    while ((line = br.readLine()) != null) {
-      qsb.append(line + "\n");
-    }
-    br.close();
+  public void addFile(String queryFile) throws IOException {
+    addFile(new File(queryFile));
+  }
 
-    String query = qsb.toString();
+  public void addFile(File qf) throws IOException  {
+    String query = readEntireFileIntoString(qf);
     qMap.put(qf.getName(), query);
 
     if(checkHadoopVersionExclude(qf.getName(), query)

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
(original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
Fri Sep 27 01:26:36 2013
@@ -22,11 +22,9 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 
 public class LazyHiveVarcharObjectInspector
     extends AbstractPrimitiveLazyObjectInspector<HiveVarcharWritable>
@@ -62,14 +60,16 @@ public class LazyHiveVarcharObjectInspec
     }
 
     HiveVarchar ret = ((LazyHiveVarchar) o).getWritableObject().getHiveVarchar();
+    VarcharTypeParams typeParams = (VarcharTypeParams)getTypeParams();
     if (!ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        ret, (VarcharTypeParams) typeParams)) {
-      HiveVarchar newValue = new HiveVarchar(ret, ((VarcharTypeParams) typeParams).length);
+        ret, typeParams)) {
+      HiveVarchar newValue = new HiveVarchar(ret, typeParams.length);
       return newValue;
     }
     return ret;
   }
 
+  @Override
   public String toString() {
     return getTypeName();
   }

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
(original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
Fri Sep 27 01:26:36 2013
@@ -29,7 +29,6 @@ public abstract class AbstractPrimitiveO
     PrimitiveObjectInspector {
 
   protected PrimitiveTypeEntry typeEntry;
-  protected BaseTypeParams typeParams;
 
   protected AbstractPrimitiveObjectInspector() {
     super();
@@ -85,14 +84,15 @@ public abstract class AbstractPrimitiveO
   }
 
   public BaseTypeParams getTypeParams() {
-    return typeParams;
+    return typeEntry.typeParams;
   }
 
   public void setTypeParams(BaseTypeParams newParams) {
+    BaseTypeParams typeParams = typeEntry.typeParams;
     if (typeParams != null && !typeEntry.isParameterized()) {
       throw new UnsupportedOperationException(
           "Attempting to add type parameters " + typeParams + " to type " + getTypeName());
     }
-    this.typeParams = newParams;
+    typeEntry.typeParams = newParams;
   }
 }

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
(original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
Fri Sep 27 01:26:36 2013
@@ -18,11 +18,8 @@
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 
@@ -42,13 +39,14 @@ public class JavaHiveVarcharObjectInspec
     }
   }
 
+  @Override
   public HiveVarchar getPrimitiveJavaObject(Object o) {
     if (o == null) {
       return null;
     }
     HiveVarchar value = (HiveVarchar)o;
     if (ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeParams) typeParams)) {
+        value, (VarcharTypeParams) getTypeParams())) {
       return value;
     }
     // value needs to be converted to match the type params (length, etc).
@@ -78,7 +76,7 @@ public class JavaHiveVarcharObjectInspec
   public Object set(Object o, HiveVarchar value) {
     HiveVarchar setValue = (HiveVarchar)o;
     if (ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeParams) typeParams)) {
+        value, (VarcharTypeParams) getTypeParams())) {
       setValue.setValue(value);
     } else {
       // Otherwise value may be too long, convert to appropriate value based on params
@@ -102,6 +100,7 @@ public class JavaHiveVarcharObjectInspec
   }
 
   public int getMaxLength() {
-    return typeParams != null ? ((VarcharTypeParams) typeParams).length : -1;
+    VarcharTypeParams typeParams = (VarcharTypeParams)getTypeParams();
+    return typeParams != null ? typeParams.length : -1;
   }
 }

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
(original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
Fri Sep 27 01:26:36 2013
@@ -21,11 +21,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 
 public class WritableHiveVarcharObjectInspector
     extends AbstractPrimitiveWritableObjectInspector
@@ -59,6 +57,7 @@ public class WritableHiveVarcharObjectIn
     return getPrimitiveWithParams(writable);
   }
 
+  @Override
   public HiveVarcharWritable getPrimitiveWritableObject(Object o) {
     // check input object's length, if it doesn't match
     // then output new writable with correct params.
@@ -87,12 +86,7 @@ public class WritableHiveVarcharObjectIn
 
   private boolean doesWritableMatchTypeParams(HiveVarcharWritable writable) {
     return ParameterizedPrimitiveTypeUtils.doesWritableMatchTypeParams(
-        writable, (VarcharTypeParams) typeParams);
-  }
-
-  private boolean doesPrimitiveMatchTypeParams(HiveVarchar value) {
-    return ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeParams) typeParams);
+        writable, (VarcharTypeParams)getTypeParams());
   }
 
   @Override
@@ -130,6 +124,8 @@ public class WritableHiveVarcharObjectIn
   }
 
   public int getMaxLength() {
-    return typeParams != null ? ((VarcharTypeParams) typeParams).length : -1;
+    VarcharTypeParams typeParams = (VarcharTypeParams)getTypeParams();
+    return typeParams != null ? typeParams.length : -1;
   }
+
 }

Modified: hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
(original)
+++ hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
Fri Sep 27 01:26:36 2013
@@ -42,45 +42,37 @@ import org.apache.hive.service.cli.RowSe
 import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.thrift.TException;
-import org.apache.thrift.TProcessorFactory;
-import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.server.TServer;
-import org.apache.thrift.server.TThreadPoolServer;
-import org.apache.thrift.transport.TServerSocket;
-import org.apache.thrift.transport.TTransportFactory;
-
 
 /**
- * CLIService.
+ * ThriftCLIService.
  *
  */
-public class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable
{
+public abstract class ThriftCLIService extends AbstractService implements TCLIService.Iface,
Runnable {
 
   public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName());
 
-
   protected CLIService cliService;
   private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS);
   private static final TStatus ERROR_STATUS = new TStatus(TStatusCode.ERROR_STATUS);
 
-  private static HiveAuthFactory hiveAuthFactory;
-
-  private int portNum;
-  private InetSocketAddress serverAddress;
-  private TServer server;
+  protected int portNum;
+  protected InetSocketAddress serverAddress;
+  protected TServer server;
+  protected org.mortbay.jetty.Server httpServer;
 
   private boolean isStarted = false;
   protected boolean isEmbedded = false;
 
-  private HiveConf hiveConf;
-
-  private int minWorkerThreads;
-  private int maxWorkerThreads;
+  protected HiveConf hiveConf;
 
+  protected int minWorkerThreads;
+  protected int maxWorkerThreads;
 
+  protected static HiveAuthFactory hiveAuthFactory;
 
-  public ThriftCLIService(CLIService cliService) {
-    super("ThriftCLIService");
+  public ThriftCLIService(CLIService cliService, String serviceName) {
+    super(serviceName);
     this.cliService = cliService;
   }
 
@@ -102,7 +94,18 @@ public class ThriftCLIService extends Ab
   @Override
   public synchronized void stop() {
     if (isStarted && !isEmbedded) {
-      server.stop();
+      if(server != null) {
+        server.stop();
+        LOG.info("Thrift server has stopped");
+      }
+      if((httpServer != null) && httpServer.isStarted()) {
+        try {
+          httpServer.stop();
+          LOG.info("Http server has stopped");
+        } catch (Exception e) {
+          LOG.error("Error stopping Http server: ", e);
+        }
+      }
       isStarted = false;
     }
     super.stop();
@@ -155,10 +158,10 @@ public class ThriftCLIService extends Ab
         // The delegation token is not applicable in the given deployment mode
       }
       sessionHandle = cliService.openSessionWithImpersonation(userName, req.getPassword(),
-            req.getConfiguration(), delegationTokenStr);
+          req.getConfiguration(), delegationTokenStr);
     } else {
       sessionHandle = cliService.openSession(userName, req.getPassword(),
-            req.getConfiguration());
+          req.getConfiguration());
     }
     return sessionHandle;
   }
@@ -203,9 +206,9 @@ public class ThriftCLIService extends Ab
       Boolean runAsync = req.isRunAsync();
       OperationHandle operationHandle = runAsync ?
           cliService.executeStatementAsync(sessionHandle, statement, confOverlay)
-              : cliService.executeStatement(sessionHandle, statement, confOverlay);
-      resp.setOperationHandle(operationHandle.toTOperationHandle());
-      resp.setStatus(OK_STATUS);
+          : cliService.executeStatement(sessionHandle, statement, confOverlay);
+          resp.setOperationHandle(operationHandle.toTOperationHandle());
+          resp.setStatus(OK_STATUS);
     } catch (Exception e) {
       e.printStackTrace();
       resp.setStatus(HiveSQLException.toTStatus(e));
@@ -394,52 +397,6 @@ public class ThriftCLIService extends Ab
     return resp;
   }
 
-
   @Override
-  public void run() {
-    try {
-      hiveAuthFactory = new HiveAuthFactory();
-      TTransportFactory  transportFactory = hiveAuthFactory.getAuthTransFactory();
-      TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
-
-      String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
-      if (portString != null) {
-        portNum = Integer.valueOf(portString);
-      } else {
-        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
-      }
-
-      String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
-      if (hiveHost == null) {
-        hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
-      }
-
-      if (hiveHost != null && !hiveHost.isEmpty()) {
-        serverAddress = new InetSocketAddress(hiveHost, portNum);
-      } else {
-        serverAddress = new  InetSocketAddress(portNum);
-      }
-
-
-      minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
-      maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
-
-
-      TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress))
-      .processorFactory(processorFactory)
-      .transportFactory(transportFactory)
-      .protocolFactory(new TBinaryProtocol.Factory())
-      .minWorkerThreads(minWorkerThreads)
-      .maxWorkerThreads(maxWorkerThreads);
-
-      server = new TThreadPoolServer(sargs);
-
-      LOG.info("ThriftCLIService listening on " + serverAddress);
-
-      server.serve();
-    } catch (Throwable t) {
-      t.printStackTrace();
-    }
-  }
-
+  public abstract void run();
 }

Modified: hive/branches/vectorization/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/java/org/apache/hive/service/server/HiveServer2.java
(original)
+++ hive/branches/vectorization/service/src/java/org/apache/hive/service/server/HiveServer2.java
Fri Sep 27 01:26:36 2013
@@ -26,7 +26,9 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hive.service.CompositeService;
 import org.apache.hive.service.cli.CLIService;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
 
 /**
  * HiveServer2.
@@ -50,9 +52,19 @@ public class HiveServer2 extends Composi
     cliService = new CLIService();
     addService(cliService);
 
-    thriftCLIService = new ThriftCLIService(cliService);
-    addService(thriftCLIService);
+    String transportMode = System.getenv("HIVE_SERVER2_TRANSPORT_MODE");
+    if(transportMode == null) {
+      transportMode = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
+    }
+    if(transportMode != null && (transportMode.equalsIgnoreCase("http") ||
+        transportMode.equalsIgnoreCase("https"))) {
+      thriftCLIService = new ThriftHttpCLIService(cliService);
+    }
+    else {
+      thriftCLIService = new ThriftBinaryCLIService(cliService);
+    }
 
+    addService(thriftCLIService);
     super.init(hiveConf);
   }
 
@@ -70,7 +82,6 @@ public class HiveServer2 extends Composi
    * @param args
    */
   public static void main(String[] args) {
-
     //NOTE: It is critical to do this here so that log4j is reinitialized
     // before any of the other core hive classes are loaded
     try {
@@ -97,3 +108,4 @@ public class HiveServer2 extends Composi
   }
 
 }
+

Modified: hive/branches/vectorization/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
(original)
+++ hive/branches/vectorization/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
Fri Sep 27 01:26:36 2013
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.service.cli.CLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.thrift.TProcessorFactory;
 
 public class TestPlainSaslHelper extends TestCase {
@@ -40,7 +41,7 @@ public class TestPlainSaslHelper extends
 
     CLIService cliService = new CLIService();
     cliService.init(hconf);
-    ThriftCLIService tcliService = new ThriftCLIService(cliService);
+    ThriftCLIService tcliService = new ThriftBinaryCLIService(cliService);
     tcliService.init(hconf);
     TProcessorFactory procFactory = PlainSaslHelper.getPlainProcessorFactory(tcliService);
     assertEquals("doAs enabled processor for unsecure mode",

Modified: hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
(original)
+++ hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
Fri Sep 27 01:26:36 2013
@@ -51,7 +51,7 @@ public abstract class CLIServiceTest {
   }
 
   @Test
-  public void createSessionTest() throws Exception {
+  public void openSessionTest() throws Exception {
     SessionHandle sessionHandle = client
         .openSession("tom", "password", Collections.<String, String>emptyMap());
     assertNotNull(sessionHandle);

Modified: hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
(original)
+++ hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
Fri Sep 27 01:26:36 2013
@@ -27,7 +27,7 @@ import junit.framework.TestCase;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.SessionHandle;
-import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
+import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
 import org.junit.Before;
 import org.junit.Test;
@@ -35,7 +35,7 @@ import org.junit.Test;
 public class TestSessionHooks extends TestCase {
 
   public static final String SESSION_USER_NAME = "user1";
-  private EmbeddedThriftCLIService service;
+  private EmbeddedThriftBinaryCLIService service;
   private ThriftCLIServiceClient client;
 
   public static class SessionHookTest implements HiveSessionHook {
@@ -58,7 +58,7 @@ public class TestSessionHooks extends Te
     super.setUp();
     System.setProperty(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
         TestSessionHooks.SessionHookTest.class.getName());
-    service = new EmbeddedThriftCLIService();
+    service = new EmbeddedThriftBinaryCLIService();
     client = new ThriftCLIServiceClient(service);
   }
 



Mime
View raw message