hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prasan...@apache.org
Subject [1/2] hive git commit: HIVE-12302: Use KryoPool instead of thread-local caching (Prasanth Jayachandran reviewed by Ashutosh Chauhan)
Date Tue, 08 Dec 2015 19:08:11 GMT
Repository: hive
Updated Branches:
  refs/heads/master 560e4feba -> 2bb5e63c9


http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index 058d63d..cb70ac8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
@@ -177,7 +178,7 @@ public class MapRedTask extends ExecDriver implements Serializable {
       OutputStream out = null;
       try {
         out = FileSystem.getLocal(conf).create(planPath);
-        Utilities.serializePlan(plan, out, conf);
+        SerializationUtilities.serializePlan(plan, out, conf);
         out.close();
         out = null;
       } finally {

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index bfe21db..cb7dfa1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -33,6 +33,7 @@ import java.util.Properties;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
@@ -159,7 +160,7 @@ public class MapredLocalTask extends Task<MapredLocalWork> implements Serializab
       OutputStream out = null;
       try {
         out = FileSystem.getLocal(conf).create(planPath);
-        Utilities.serializePlan(plan, out, conf);
+        SerializationUtilities.serializePlan(plan, out, conf);
         out.close();
         out = null;
       } finally {

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
index a0c9b98..f2f3c09 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
@@ -29,15 +29,12 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hive.common.util.HashCodeUtil;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.JoinUtil;
 import org.apache.hadoop.hive.ql.exec.JoinUtil.JoinResult;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer.KeyValueHelper;
 import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapper;
 import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapperBatch;
@@ -58,6 +55,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hive.common.util.BloomFilter;
+import org.apache.hive.common.util.HashCodeUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.esotericsoftware.kryo.Kryo;
 
@@ -159,8 +159,13 @@ public class HybridHashTableContainer
       } else {
         InputStream inputStream = Files.newInputStream(hashMapLocalPath);
         com.esotericsoftware.kryo.io.Input input = new com.esotericsoftware.kryo.io.Input(inputStream);
-        Kryo kryo = Utilities.runtimeSerializationKryo.get();
-        BytesBytesMultiHashMap restoredHashMap = kryo.readObject(input, BytesBytesMultiHashMap.class);
+        Kryo kryo = SerializationUtilities.borrowKryo();
+        BytesBytesMultiHashMap restoredHashMap = null;
+        try {
+          restoredHashMap = kryo.readObject(input, BytesBytesMultiHashMap.class);
+        } finally {
+          SerializationUtilities.releaseKryo(kryo);
+        }
 
         if (rowCount > 0) {
           restoredHashMap.expandAndRehashToTarget(rowCount);
@@ -551,10 +556,14 @@ public class HybridHashTableContainer
 
     com.esotericsoftware.kryo.io.Output output =
         new com.esotericsoftware.kryo.io.Output(outputStream);
-    Kryo kryo = Utilities.runtimeSerializationKryo.get();
-    kryo.writeObject(output, partition.hashMap);  // use Kryo to serialize hashmap
-    output.close();
-    outputStream.close();
+    Kryo kryo = SerializationUtilities.borrowKryo();
+    try {
+      kryo.writeObject(output, partition.hashMap);  // use Kryo to serialize hashmap
+      output.close();
+      outputStream.close();
+    } finally {
+      SerializationUtilities.releaseKryo(kryo);
+    }
 
     partition.hashMapLocalPath = path;
     partition.hashMapOnDisk = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
index 6d391a3..a976de0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java
@@ -17,21 +17,22 @@
  */
 package org.apache.hadoop.hive.ql.exec.persistence;
 
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
 
 /**
  * An eager object container that puts every row directly to output stream.
@@ -58,14 +59,11 @@ public class ObjectContainer<ROW> {
   private Input input;
   private Output output;
 
-  private Kryo kryo;
-
   public ObjectContainer() {
     readBuffer = (ROW[]) new Object[IN_MEMORY_NUM_ROWS];
     for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) {
       readBuffer[i] = (ROW) new Object();
     }
-    kryo = Utilities.runtimeSerializationKryo.get();
     try {
       setupOutput();
     } catch (IOException | HiveException e) {
@@ -101,7 +99,12 @@ public class ObjectContainer<ROW> {
   }
 
   public void add(ROW row) {
-    kryo.writeClassAndObject(output, row);
+    Kryo kryo = SerializationUtilities.borrowKryo();
+    try {
+      kryo.writeClassAndObject(output, row);
+    } finally {
+      SerializationUtilities.releaseKryo(kryo);
+    }
     rowsOnDisk++;
   }
 
@@ -164,8 +167,13 @@ public class ObjectContainer<ROW> {
             rowsInReadBuffer = rowsOnDisk;
           }
 
-          for (int i = 0; i < rowsInReadBuffer; i++) {
-            readBuffer[i] = (ROW) kryo.readClassAndObject(input);
+          Kryo kryo = SerializationUtilities.borrowKryo();
+          try {
+            for (int i = 0; i < rowsInReadBuffer; i++) {
+              readBuffer[i] = (ROW) kryo.readClassAndObject(input);
+            }
+          } finally {
+            SerializationUtilities.releaseKryo(kryo);
           }
 
           if (input.eof()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
index fd7109a..d7c278a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
@@ -24,11 +24,12 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 
 import org.apache.commons.io.output.ByteArrayOutputStream;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.mapred.JobConf;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.mapred.JobConf;
 
+import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
 
@@ -39,15 +40,28 @@ public class KryoSerializer {
     ByteArrayOutputStream stream = new ByteArrayOutputStream();
     Output output = new Output(stream);
 
-    Utilities.sparkSerializationKryo.get().writeObject(output, object);
+    Kryo kryo = SerializationUtilities.borrowKryo();
+    kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
+    try {
+      kryo.writeObject(output, object);
+    } finally {
+      SerializationUtilities.releaseKryo(kryo);
+    }
 
     output.close(); // close() also calls flush()
     return stream.toByteArray();
   }
 
   public static <T> T deserialize(byte[] buffer, Class<T> clazz) {
-    return Utilities.sparkSerializationKryo.get().readObject(
-        new Input(new ByteArrayInputStream(buffer)), clazz);
+    Kryo kryo = SerializationUtilities.borrowKryo();
+    kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
+    T result = null;
+    try {
+      result = kryo.readObject(new Input(new ByteArrayInputStream(buffer)), clazz);
+    } finally {
+      SerializationUtilities.releaseKryo(kryo);
+    }
+    return result;
   }
 
   public static byte[] serializeJobConf(JobConf jobConf) {
@@ -80,8 +94,4 @@ public class KryoSerializer {
     return conf;
   }
 
-  public static void setClassLoader(ClassLoader classLoader) {
-    Utilities.sparkSerializationKryo.get().setClassLoader(classLoader);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
index c4cb2ba..6380774 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
@@ -298,7 +298,6 @@ public class RemoteHiveSparkClient implements HiveSparkClient {
       Map<String, Long> addedJars = jc.getAddedJars();
       if (addedJars != null && !addedJars.isEmpty()) {
         SparkClientUtilities.addToClassPath(addedJars, localJobConf, jc.getLocalTmpDir());
-        KryoSerializer.setClassLoader(Thread.currentThread().getContextClassLoader());
         localJobConf.set(Utilities.HIVE_ADDED_JARS, StringUtils.join(addedJars.keySet(), ";"));
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index 3feab1a..b19c70a 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -30,6 +30,7 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.Map.Entry;
 
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configurable;
@@ -532,14 +533,14 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
     if (!hasObj) {
       Serializable filterObject = scanDesc.getFilterObject();
       if (filterObject != null) {
-        serializedFilterObj = Utilities.serializeObject(filterObject);
+        serializedFilterObj = SerializationUtilities.serializeObject(filterObject);
       }
     }
     if (serializedFilterObj != null) {
       jobConf.set(TableScanDesc.FILTER_OBJECT_CONF_STR, serializedFilterObj);
     }
     if (!hasExpr) {
-      serializedFilterExpr = Utilities.serializeExpression(filterExpr);
+      serializedFilterExpr = SerializationUtilities.serializeExpression(filterExpr);
     }
     String filterText = filterExpr.getExprString();
     if (LOG.isDebugEnabled()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
index 13390de..017676b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
@@ -22,6 +22,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
@@ -128,7 +129,7 @@ public class ProjectionPusher {
     }
 
     final String filterText = filterExpr.getExprString();
-    final String filterExprSerialized = Utilities.serializeExpression(filterExpr);
+    final String filterExprSerialized = SerializationUtilities.serializeExpression(filterExpr);
     jobConf.set(
         TableScanDesc.FILTER_TEXT_CONF_STR,
         filterText);

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
index 7e888bc..6d3a134 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
@@ -23,11 +23,9 @@ import java.sql.Timestamp;
 import java.util.List;
 
 import org.apache.commons.codec.binary.Base64;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -51,6 +49,8 @@ import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
@@ -433,7 +433,7 @@ public class ConvertAstToSearchArg {
   public static SearchArgument createFromConf(Configuration conf) {
     String sargString;
     if ((sargString = conf.get(TableScanDesc.FILTER_EXPR_CONF_STR)) != null) {
-      return create(Utilities.deserializeExpression(sargString));
+      return create(SerializationUtilities.deserializeExpression(sargString));
     } else if ((sargString = conf.get(SARG_PUSHDOWN)) != null) {
       return create(sargString);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 44189ef..c682df2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -18,10 +18,32 @@
 
 package org.apache.hadoop.hive.ql.metadata;
 
-import com.google.common.collect.Sets;
+import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
+import static org.apache.hadoop.hive.serde.serdeConstants.COLLECTION_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.ESCAPE_CHAR;
+import static org.apache.hadoop.hive.serde.serdeConstants.FIELD_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.LINE_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.MAPKEY_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
+import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -81,6 +103,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionTask;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.tez.InPlaceUpdates;
 import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
@@ -100,32 +123,10 @@ import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
-import static org.apache.hadoop.hive.serde.serdeConstants.COLLECTION_DELIM;
-import static org.apache.hadoop.hive.serde.serdeConstants.ESCAPE_CHAR;
-import static org.apache.hadoop.hive.serde.serdeConstants.FIELD_DELIM;
-import static org.apache.hadoop.hive.serde.serdeConstants.LINE_DELIM;
-import static org.apache.hadoop.hive.serde.serdeConstants.MAPKEY_DELIM;
-import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
-import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
+import com.google.common.collect.Sets;
 
 /**
  * This class has functions that implement meta data/DDL operations using calls
@@ -2087,7 +2088,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
           new ArrayList<ObjectPair<Integer,byte[]>>(partSpecs.size());
       for (DropTableDesc.PartSpec partSpec : partSpecs) {
         partExprs.add(new ObjectPair<Integer, byte[]>(partSpec.getPrefixLength(),
-            Utilities.serializeExpressionToKryo(partSpec.getPartSpec())));
+            SerializationUtilities.serializeExpressionToKryo(partSpec.getPartSpec())));
       }
       List<org.apache.hadoop.hive.metastore.api.Partition> tParts = getMSC().dropPartitions(
           dbName, tblName, partExprs, dropOptions);
@@ -2362,7 +2363,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
   public boolean getPartitionsByExpr(Table tbl, ExprNodeGenericFuncDesc expr, HiveConf conf,
       List<Partition> result) throws HiveException, TException {
     assert result != null;
-    byte[] exprBytes = Utilities.serializeExpressionToKryo(expr);
+    byte[] exprBytes = SerializationUtilities.serializeExpressionToKryo(expr);
     String defaultPartitionName = HiveConf.getVar(conf, ConfVars.DEFAULTPARTITIONNAME);
     List<org.apache.hadoop.hive.metastore.api.Partition> msParts =
         new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>();

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
index 1f6b5d7..e9ca5fa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
@@ -485,7 +486,7 @@ public class CommonJoinTaskDispatcher extends AbstractJoinTaskDispatcher impleme
         }
         // deep copy a new mapred work from xml
         // Once HIVE-4396 is in, it would be faster to use a cheaper method to clone the plan
-        MapredWork newWork = Utilities.clonePlan(currTask.getWork());
+        MapredWork newWork = SerializationUtilities.clonePlan(currTask.getWork());
 
         // create map join task and set big table as i
         MapRedTask newTask = convertTaskToMapJoinTask(newWork, pos);

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
index 15f0d70..a71c474 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
@@ -18,6 +18,13 @@
 
 package org.apache.hadoop.hive.ql.optimizer.physical;
 
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
@@ -27,6 +34,7 @@ import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
@@ -53,13 +61,6 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
 /**
  * GenMRSkewJoinProcessor.
  *
@@ -253,7 +254,7 @@ public final class GenMRSkewJoinProcessor {
           HiveConf.ConfVars.HIVE_MAPPER_CANNOT_SPAN_MULTIPLE_PARTITIONS);
       newPlan.setMapperCannotSpanPartns(mapperCannotSpanPartns);
 
-      MapredWork clonePlan = Utilities.clonePlan(currPlan);
+      MapredWork clonePlan = SerializationUtilities.clonePlan(currPlan);
 
       Operator<? extends OperatorDesc>[] parentOps = new TableScanOperator[tags.length];
       for (int k = 0; k < tags.length; k++) {

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
index 895e64e..41d3522 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
@@ -18,10 +18,12 @@
 
 package org.apache.hadoop.hive.ql.optimizer.physical;
 
-import com.google.common.base.Preconditions;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
@@ -32,6 +34,7 @@ import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.SparkHashTableSinkOperator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -62,12 +65,10 @@ import org.apache.hadoop.hive.ql.plan.SparkWork;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import com.google.common.base.Preconditions;
 
 /**
  * Copied from GenMRSkewJoinProcessor. It's used for spark task
@@ -254,7 +255,7 @@ public class GenSparkSkewJoinProcessor {
       // this makes sure MJ has the same downstream operator plan as the original join
       List<Operator<?>> reducerList = new ArrayList<Operator<?>>();
       reducerList.add(reduceWork.getReducer());
-      Operator<? extends OperatorDesc> reducer = Utilities.cloneOperatorTree(
+      Operator<? extends OperatorDesc> reducer = SerializationUtilities.cloneOperatorTree(
           parseCtx.getConf(), reducerList).get(0);
       Preconditions.checkArgument(reducer instanceof JoinOperator,
           "Reducer should be join operator, but actually is " + reducer.getName());

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java
index e94f6e7..dc433fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java
@@ -19,29 +19,18 @@ package org.apache.hadoop.hive.ql.optimizer.physical;
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.Iterator;
 import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
-import java.util.List;
+import java.util.Map;
 import java.util.Set;
-import java.util.SortedSet;
 import java.util.Stack;
-import java.util.TreeSet;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.StatsTask;
-import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.exec.tez.DagUtils;
+import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
-import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -52,13 +41,14 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.lib.Rule;
 import org.apache.hadoop.hive.ql.lib.RuleRegExp;
 import org.apache.hadoop.hive.ql.lib.TaskGraphWalker;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.BaseWork;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.MergeJoinWork;
 import org.apache.hadoop.hive.ql.plan.ReduceWork;
 import org.apache.hadoop.hive.ql.plan.TezWork;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * SerializeFilter is a simple physical optimizer that serializes all filter expressions in
@@ -151,7 +141,7 @@ public class SerializeFilter implements PhysicalPlanResolver {
             LOG.debug("Serializing: " + ts.getConf().getFilterExpr().getExprString());
           }
           ts.getConf().setSerializedFilterExpr(
-            Utilities.serializeExpression(ts.getConf().getFilterExpr()));
+              SerializationUtilities.serializeExpression(ts.getConf().getFilterExpr()));
         }
 
         if (ts.getConf() != null && ts.getConf().getFilterObject() != null) {
@@ -160,7 +150,7 @@ public class SerializeFilter implements PhysicalPlanResolver {
           }
 
           ts.getConf().setSerializedFilterObject(
-            Utilities.serializeObject(ts.getConf().getFilterObject()));
+              SerializationUtilities.serializeObject(ts.getConf().getFilterObject()));
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
index 3b09c2f..658717c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -147,7 +148,7 @@ public class SortMergeJoinTaskDispatcher extends AbstractJoinTaskDispatcher impl
       throws SemanticException {
     try {
       // deep copy a new mapred work
-      MapredWork currJoinWork = Utilities.clonePlan(currWork);
+      MapredWork currJoinWork = SerializationUtilities.clonePlan(currWork);
       SMBMapJoinOperator newSMBJoinOp = getSMBMapJoinOp(currJoinWork);
 
       // change the newly created map-red plan as if it was a join operator
@@ -165,7 +166,7 @@ public class SortMergeJoinTaskDispatcher extends AbstractJoinTaskDispatcher impl
       SMBMapJoinOperator smbJoinOp)
       throws UnsupportedEncodingException, SemanticException {
     // deep copy a new mapred work
-    MapredWork newWork = Utilities.clonePlan(origWork);
+    MapredWork newWork = SerializationUtilities.clonePlan(origWork);
     // create a mapred task for this work
     MapRedTask newTask = (MapRedTask) TaskFactory.get(newWork, physicalContext
         .getParseContext().getConf());

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
index f9978b4..42ad04b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
@@ -22,13 +22,11 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.List;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.Metastore.SplitInfo;
 import org.apache.hadoop.hive.metastore.Metastore.SplitInfos;
 import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.io.orc.OrcProto;
 import org.apache.hadoop.hive.ql.io.orc.ReaderImpl;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The basic implementation of PartitionExpressionProxy that uses ql package classes.
@@ -71,7 +71,7 @@ public class PartitionExpressionForMetastore implements PartitionExpressionProxy
   private ExprNodeGenericFuncDesc deserializeExpr(byte[] exprBytes) throws MetaException {
     ExprNodeGenericFuncDesc expr = null;
     try {
-      expr = Utilities.deserializeExpressionFromKryo(exprBytes);
+      expr = SerializationUtilities.deserializeExpressionFromKryo(exprBytes);
     } catch (Exception ex) {
       LOG.error("Failed to deserialize the expression", ex);
       throw new MetaException(ex.getMessage());

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java
index fb20080..6931ad9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java
@@ -30,8 +30,8 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
 import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext;
 import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalPlanResolver;
@@ -95,7 +95,7 @@ public class SplitSparkWorkResolver implements PhysicalPlanResolver {
     boolean isFirst = true;
 
     for (BaseWork childWork : childWorks) {
-      BaseWork clonedParentWork = Utilities.cloneBaseWork(parentWork);
+      BaseWork clonedParentWork = SerializationUtilities.cloneBaseWork(parentWork);
       // give the cloned work a different name
       clonedParentWork.setName(clonedParentWork.getName().replaceAll("^([a-zA-Z]+)(\\s+)(\\d+)",
           "$1$2" + GenSparkUtils.getUtils().getNextSeqNumber()));

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
index 27d7276..fe0e234 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.AUTOPARALLEL;
+
 import java.util.ArrayList;
 import java.util.Deque;
 import java.util.HashSet;
@@ -26,8 +28,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -55,12 +56,12 @@ import org.apache.hadoop.hive.ql.plan.TezEdgeProperty;
 import org.apache.hadoop.hive.ql.plan.TezEdgeProperty.EdgeType;
 import org.apache.hadoop.hive.ql.plan.TezWork;
 import org.apache.hadoop.hive.ql.plan.UnionWork;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.BiMap;
 import com.google.common.collect.HashBiMap;
 
-import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.AUTOPARALLEL;
-
 /**
  * GenTezUtils is a collection of shared helper methods to produce TezWork.
  * All the methods in this class should be static, but some aren't; this is to facilitate testing.
@@ -216,7 +217,7 @@ public class GenTezUtils {
     roots.addAll(context.eventOperatorSet);
 
     // need to clone the plan.
-    List<Operator<?>> newRoots = Utilities.cloneOperatorTree(conf, roots);
+    List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(conf, roots);
 
     // we're cloning the operator plan but we're retaining the original work. That means
     // that root operators have to be replaced with the cloned ops. The replacement map

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
index 40c23a5..8dc48cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
@@ -28,8 +28,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -43,9 +41,9 @@ import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
-import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.spark.SparkUtilities;
 import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
 import org.apache.hadoop.hive.ql.optimizer.spark.SparkPartitionPruningSinkDesc;
@@ -60,10 +58,12 @@ import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.ReduceWork;
 import org.apache.hadoop.hive.ql.plan.SparkEdgeProperty;
 import org.apache.hadoop.hive.ql.plan.SparkWork;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import org.apache.hadoop.hive.ql.plan.TableDesc;
 
 /**
  * GenSparkUtils is a collection of shared helper methods to produce SparkWork
@@ -207,7 +207,7 @@ public class GenSparkUtils {
     }
 
     // need to clone the plan.
-    List<Operator<?>> newRoots = Utilities.cloneOperatorTree(conf, roots);
+    List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(conf, roots);
 
     // Build a map to map the original FileSinkOperator and the cloned FileSinkOperators
     // This map is used for set the stats flag for the cloned FileSinkOperators in later process

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
index c140f67..4bb661a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
@@ -24,10 +24,10 @@ import java.util.List;
 import java.util.Set;
 import java.util.Stack;
 
-import com.google.common.base.Preconditions;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.spark.SparkUtilities;
@@ -36,6 +36,8 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessor;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
+import com.google.common.base.Preconditions;
+
 /**
  * This processor triggers on SparkPartitionPruningSinkOperator. For a operator tree like
  * this:
@@ -105,7 +107,8 @@ public class SplitOpTreeForDPP implements NodeProcessor {
     filterOp.setChildOperators(Utilities.makeList(selOp));
 
     // Now clone the tree above selOp
-    List<Operator<?>> newRoots = Utilities.cloneOperatorTree(context.parseContext.getConf(), roots);
+    List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(
+        context.parseContext.getConf(), roots);
     for (int i = 0; i < roots.size(); i++) {
       TableScanOperator newTs = (TableScanOperator) newRoots.get(i);
       TableScanOperator oldTs = (TableScanOperator) roots.get(i);

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
index 5c5fafa..e2aaa70 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
@@ -24,17 +24,18 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.stats.StatsAggregator;
 import org.apache.hadoop.hive.ql.stats.StatsCollectionContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
 
 public class FSStatsAggregator implements StatsAggregator {
@@ -62,7 +63,12 @@ public class FSStatsAggregator implements StatsAggregator {
       });
       for (FileStatus file : status) {
         Input in = new Input(fs.open(file.getPath()));
-        statsMap = Utilities.runtimeSerializationKryo.get().readObject(in, statsMap.getClass());
+        Kryo kryo = SerializationUtilities.borrowKryo();
+        try {
+          statsMap = kryo.readObject(in, statsMap.getClass());
+        } finally {
+          SerializationUtilities.releaseKryo(kryo);
+        }
         LOG.info("Read stats : " +statsMap);
         statsList.add(statsMap);
         in.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
index 80f954b..e5d89e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
@@ -24,15 +24,16 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.stats.StatsCollectionContext;
 import org.apache.hadoop.hive.ql.stats.StatsPublisher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Output;
 
 public class FSStatsPublisher implements StatsPublisher {
@@ -100,7 +101,12 @@ public class FSStatsPublisher implements StatsPublisher {
       Output output = new Output(statsFile.getFileSystem(conf).create(statsFile,true));
       LOG.debug("Created file : " + statsFile);
       LOG.debug("Writing stats in it : " + statsMap);
-      Utilities.runtimeSerializationKryo.get().writeObject(output, statsMap);
+      Kryo kryo = SerializationUtilities.borrowKryo();
+      try {
+        kryo.writeObject(output, statsMap);
+      } finally {
+        SerializationUtilities.releaseKryo(kryo);
+      }
       output.close();
       return true;
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java b/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
index d6d513d..5e53604 100644
--- a/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
+++ b/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Stack;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -36,7 +34,7 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -52,6 +50,8 @@ import org.apache.thrift.TException;
 
 import com.google.common.collect.Lists;
 
+import junit.framework.TestCase;
+
 /**
  * Tests hive metastore expression support. This should be moved in metastore module
  * as soon as we are able to use ql from metastore server (requires splitting metastore
@@ -166,8 +166,8 @@ public class TestMetastoreExpr extends TestCase {
   public void checkExpr(int numParts,
       String dbName, String tblName, ExprNodeGenericFuncDesc expr) throws Exception {
     List<Partition> parts = new ArrayList<Partition>();
-    client.listPartitionsByExpr(
-        dbName, tblName, Utilities.serializeExpressionToKryo(expr), null, (short)-1, parts);
+    client.listPartitionsByExpr(dbName, tblName,
+        SerializationUtilities.serializeExpressionToKryo(expr), null, (short)-1, parts);
     assertEquals("Partition check failed: " + expr.getExprString(), numParts, parts.size());
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
index 1364888..c1667c2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
@@ -23,8 +23,6 @@ import java.io.File;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -37,6 +35,8 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.JobConf;
 
+import junit.framework.TestCase;
+
 /**
  * TestPlan.
  *
@@ -83,7 +83,7 @@ public class TestPlan extends TestCase {
       JobConf job = new JobConf(TestPlan.class);
       // serialize the configuration once ..
       ByteArrayOutputStream baos = new ByteArrayOutputStream();
-      Utilities.serializePlan(mrwork, baos, job);
+      SerializationUtilities.serializePlan(mrwork, baos, job);
       baos.close();
       String v1 = baos.toString();
 
@@ -101,7 +101,7 @@ public class TestPlan extends TestCase {
 
       // serialize again
       baos.reset();
-      Utilities.serializePlan(mrwork2, baos, job);
+      SerializationUtilities.serializePlan(mrwork2, baos, job);
       baos.close();
 
       // verify that the two are equal

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
index 028cdd1..bb6a4e1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
@@ -26,16 +26,8 @@ import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
-
-import com.google.common.collect.Sets;
-import com.google.common.io.Files;
-import junit.framework.Assert;
-import junit.framework.TestCase;
 
 import org.apache.commons.io.FileUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -46,6 +38,14 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.JobConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Sets;
+import com.google.common.io.Files;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
 
 public class TestUtilities extends TestCase {
   public static final Logger LOG = LoggerFactory.getLogger(TestUtilities.class);
@@ -85,8 +85,8 @@ public class TestUtilities extends TestCase {
     children.add(constant);
     ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo,
         new GenericUDFFromUtcTimestamp(), children);
-    assertEquals(desc.getExprString(), Utilities.deserializeExpression(
-        Utilities.serializeExpression(desc)).getExprString());
+    assertEquals(desc.getExprString(), SerializationUtilities.deserializeExpression(
+        SerializationUtilities.serializeExpression(desc)).getExprString());
   }
 
   public void testgetDbTableName() throws HiveException{

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index 9f616ab..1ff7eb5 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -17,7 +17,13 @@
  */
 package org.apache.hadoop.hive.ql.io.orc;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -51,6 +57,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -67,7 +74,6 @@ import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.InputFormatChecker;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.SplitStrategy;
-import org.apache.hadoop.hive.ql.io.orc.TestOrcRawRecordMerger.MyRow;
 import org.apache.hadoop.hive.ql.io.sarg.ConvertAstToSearchArg;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
@@ -1609,7 +1615,7 @@ public class TestInputOutputFormat {
     Path mapXml = new Path(workDir, "map.xml");
     localFs.delete(mapXml, true);
     FSDataOutputStream planStream = localFs.create(mapXml);
-    Utilities.serializePlan(mapWork, planStream, conf);
+    SerializationUtilities.serializePlan(mapWork, planStream, conf);
     planStream.close();
     return conf;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSplitElimination.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSplitElimination.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSplitElimination.java
index 3560c43..7a93b54 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSplitElimination.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSplitElimination.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -121,7 +121,7 @@ public class TestOrcSplitElimination {
     childExpr.add(col);
     childExpr.add(con);
     ExprNodeGenericFuncDesc en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    String sargStr = Utilities.serializeExpression(en);
+    String sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     InputSplit[] splits = in.getSplits(conf, 1);
     assertEquals(5, splits.length);
@@ -129,7 +129,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(1);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     assertEquals(0, splits.length);
@@ -137,7 +137,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(2);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     assertEquals(1, splits.length);
@@ -145,7 +145,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(5);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     assertEquals(2, splits.length);
@@ -153,7 +153,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(13);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     assertEquals(3, splits.length);
@@ -161,7 +161,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(29);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     assertEquals(4, splits.length);
@@ -169,7 +169,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(70);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     assertEquals(5, splits.length);
@@ -199,7 +199,7 @@ public class TestOrcSplitElimination {
     childExpr.add(col);
     childExpr.add(con);
     ExprNodeGenericFuncDesc en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    String sargStr = Utilities.serializeExpression(en);
+    String sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     InputSplit[] splits = in.getSplits(conf, 1);
     assertEquals(2, splits.length);
@@ -207,7 +207,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(0);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // no stripes satisfies the condition
@@ -216,7 +216,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(2);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // only first stripe will satisfy condition and hence single split
@@ -225,7 +225,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(5);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // first stripe will satisfy the predicate and will be a single split, last stripe will be a
@@ -235,7 +235,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(13);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // first 2 stripes will satisfy the predicate and merged to single split, last stripe will be a
@@ -245,7 +245,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(29);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // first 3 stripes will satisfy the predicate and merged to single split, last stripe will be a
@@ -255,7 +255,7 @@ public class TestOrcSplitElimination {
     con = new ExprNodeConstantDesc(70);
     childExpr.set(1, con);
     en = new ExprNodeGenericFuncDesc(inspector, udf, childExpr);
-    sargStr = Utilities.serializeExpression(en);
+    sargStr = SerializationUtilities.serializeExpression(en);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // first 2 stripes will satisfy the predicate and merged to single split, last two stripe will
@@ -304,7 +304,7 @@ public class TestOrcSplitElimination {
     childExpr2.add(en1);
     ExprNodeGenericFuncDesc en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
 
-    String sargStr = Utilities.serializeExpression(en2);
+    String sargStr = SerializationUtilities.serializeExpression(en2);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     InputSplit[] splits = in.getSplits(conf, 1);
     assertEquals(2, splits.length);
@@ -321,7 +321,7 @@ public class TestOrcSplitElimination {
     childExpr2.set(1, en1);
     en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
 
-    sargStr = Utilities.serializeExpression(en2);
+    sargStr = SerializationUtilities.serializeExpression(en2);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // no stripe will satisfy the predicate
@@ -339,7 +339,7 @@ public class TestOrcSplitElimination {
     childExpr2.set(1, en1);
     en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
 
-    sargStr = Utilities.serializeExpression(en2);
+    sargStr = SerializationUtilities.serializeExpression(en2);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // only first stripe will satisfy condition and hence single split
@@ -358,7 +358,7 @@ public class TestOrcSplitElimination {
     childExpr2.set(1, en1);
     en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
 
-    sargStr = Utilities.serializeExpression(en2);
+    sargStr = SerializationUtilities.serializeExpression(en2);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // first two stripes will satisfy condition and hence single split
@@ -378,7 +378,7 @@ public class TestOrcSplitElimination {
     childExpr2.set(1, en1);
     en2 = new ExprNodeGenericFuncDesc(inspector, udf2, childExpr2);
 
-    sargStr = Utilities.serializeExpression(en2);
+    sargStr = SerializationUtilities.serializeExpression(en2);
     conf.set("hive.io.filter.expr.serialized", sargStr);
     splits = in.getSplits(conf, 1);
     // only second stripes will satisfy condition and hence single split

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
index 7204521..bf363f3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
@@ -18,12 +18,19 @@
 
 package org.apache.hadoop.hive.ql.io.parquet;
 
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.io.parquet.read.ParquetRecordReaderWrapper;
 import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector;
-import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -34,16 +41,14 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
 import org.apache.parquet.io.api.RecordConsumer;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.MessageTypeParser;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
+import com.google.common.collect.Lists;
 
 public class TestParquetRowGroupFilter extends AbstractTestParquetDirect {
 
@@ -96,7 +101,7 @@ public class TestParquetRowGroupFilter extends AbstractTestParquetDirect {
     children.add(columnDesc);
     children.add(constantDesc);
     ExprNodeGenericFuncDesc genericFuncDesc = new ExprNodeGenericFuncDesc(inspector, udf, children);
-    String searchArgumentStr = Utilities.serializeExpression(genericFuncDesc);
+    String searchArgumentStr = SerializationUtilities.serializeExpression(genericFuncDesc);
     conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, searchArgumentStr);
 
     ParquetRecordReaderWrapper recordReader = (ParquetRecordReaderWrapper)
@@ -109,7 +114,7 @@ public class TestParquetRowGroupFilter extends AbstractTestParquetDirect {
     constantDesc = new ExprNodeConstantDesc(100);
     children.set(1, constantDesc);
     genericFuncDesc = new ExprNodeGenericFuncDesc(inspector, udf, children);
-    searchArgumentStr = Utilities.serializeExpression(genericFuncDesc);
+    searchArgumentStr = SerializationUtilities.serializeExpression(genericFuncDesc);
     conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, searchArgumentStr);
 
     recordReader = (ParquetRecordReaderWrapper)

http://git-wip-us.apache.org/repos/asf/hive/blob/2bb5e63c/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
index e72789d..a0fa700 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
@@ -22,23 +22,22 @@ import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertNull;
 import static junit.framework.Assert.assertTrue;
 
-import com.google.common.collect.Sets;
+import java.beans.XMLDecoder;
+import java.io.ByteArrayInputStream;
+import java.io.UnsupportedEncodingException;
+import java.util.List;
+import java.util.Set;
 
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.io.parquet.read.ParquetFilterPredicateConverter;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.TruthValue;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.parquet.filter2.predicate.FilterPredicate;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.MessageTypeParser;
 import org.junit.Test;
 
-import java.beans.XMLDecoder;
-import java.io.ByteArrayInputStream;
-import java.io.UnsupportedEncodingException;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.parquet.filter2.predicate.FilterPredicate;
+import com.google.common.collect.Sets;
 
 /**
  * These tests cover the conversion from Hive's AST to SearchArguments.
@@ -2713,7 +2712,7 @@ public class TestConvertAstToSearchArg {
           "AAABgj0BRVFVQcwBBW9yZy5hcGFjaGUuaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5Q" +
           "EAAAECAQFib29sZWHu";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());
@@ -2732,7 +2731,7 @@ public class TestConvertAstToSearchArg {
             "Y2hlLmhhZG9vcC5oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUH" +
             "MAQVvcmcuYXBhY2hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAgEBYm9vbGVh7g==";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());
@@ -2752,7 +2751,7 @@ public class TestConvertAstToSearchArg {
             "oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQZvcmcuYXBhY2" +
             "hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABBAEBYm9vbGVh7g==";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());
@@ -2772,7 +2771,7 @@ public class TestConvertAstToSearchArg {
             "vb3AuaGl2ZS5xbC51ZGYuZ2VuZXJpYy5HZW5lcmljVURGT1BFcXVh7AEAAAGCPQFFUVVBzAEGb3JnLm" +
             "FwYWNoZS5oYWRvb3AuaW8uQm9vbGVhbldyaXRhYmzlAQAAAQQBAWJvb2xlYe4=";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());
@@ -2792,7 +2791,7 @@ public class TestConvertAstToSearchArg {
             "lLmhhZG9vcC5oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQ" +
             "ZvcmcuYXBhY2hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABBAEBYm9vbGVh7g==";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());
@@ -2811,7 +2810,7 @@ public class TestConvertAstToSearchArg {
             "dmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QRXF1YewBAAABgj0BRVFVQcwBBW9yZy5hcGFjaGU" +
             "uaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5QEAAAECAQFib29sZWHu";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());
@@ -2832,7 +2831,7 @@ public class TestConvertAstToSearchArg {
             "hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAwkBAgEBYrIAAAgBAwkBB29yZy5hcGFjaGUua" +
             "GFkb29wLmhpdmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QQW7kAQEGAQAAAQMJ";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("(and leaf-0 leaf-1)", sarg.getExpression().toString());
     assertEquals(2, sarg.getLeaves().size());
@@ -2854,7 +2853,7 @@ public class TestConvertAstToSearchArg {
             "aXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQVvcmcuYXBhY2h" +
             "lLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAgEBYm9vbGVh7g==";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());
@@ -2873,7 +2872,7 @@ public class TestConvertAstToSearchArg {
             "b29wLmhpdmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QRXF1YewBAAABgj0BRVFVQcwBBW9yZy5" +
             "hcGFjaGUuaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5QEAAAECAQFib29sZWHu";
     SearchArgument sarg =
-        new ConvertAstToSearchArg(Utilities.deserializeExpression(serialAst))
+        new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst))
             .buildSearchArgument();
     assertEquals("leaf-0", sarg.getExpression().toString());
     assertEquals(1, sarg.getLeaves().size());


Mime
View raw message