hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject [24/50] [abbrv] hive git commit: HIVE-10748 Replace StringBuffer with StringBuilder where possible (Alexander Pivovarov, reviewed by Ashutosh Chauhan, Sergio Peña and Swarnim Kulkarni)
Date Fri, 05 Jun 2015 23:16:37 GMT
HIVE-10748 Replace StringBuffer with StringBuilder where possible (Alexander Pivovarov, reviewed
by Ashutosh Chauhan, Sergio Peña and Swarnim Kulkarni)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8aaf3bd5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8aaf3bd5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8aaf3bd5

Branch: refs/heads/hbase-metastore
Commit: 8aaf3bd5b7702b4f92503dbf7ffcccad82633d6d
Parents: 715871a
Author: Alexander Pivovarov <apivovarov@gmail.com>
Authored: Wed May 20 19:05:42 2015 -0700
Committer: Alexander Pivovarov <apivovarov@gmail.com>
Committed: Tue Jun 2 11:35:51 2015 -0700

----------------------------------------------------------------------
 .../common/jsonexplain/tez/TezJsonParser.java   |  2 +-
 .../hive/common/type/TestHiveBaseChar.java      |  2 +-
 .../hive/common/type/TestHiveVarchar.java       | 22 ++++++++++----------
 .../hive/hcatalog/common/HCatException.java     |  2 +-
 .../hive/hcatalog/data/TestJsonSerDe.java       |  2 +-
 .../streaming/DelimitedInputWriter.java         |  2 +-
 .../hive/hcatalog/streaming/HiveEndPoint.java   |  2 +-
 .../hive/hcatalog/streaming/TestStreaming.java  |  2 +-
 .../apache/hive/jdbc/HivePreparedStatement.java |  2 +-
 .../metastore/txn/CompactionTxnHandler.java     |  9 ++++----
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |  4 ++--
 .../apache/hadoop/hive/ql/exec/Utilities.java   |  9 ++++----
 .../ql/exec/spark/status/SparkJobMonitor.java   |  2 +-
 .../hadoop/hive/ql/exec/tez/TezJobMonitor.java  |  4 ++--
 .../ql/exec/vector/VectorizedBatchUtil.java     |  2 +-
 .../hive/ql/optimizer/GenMRTableScan1.java      |  2 +-
 .../hive/ql/optimizer/GenMapRedUtils.java       |  5 ++++-
 .../ql/optimizer/ReduceSinkMapJoinProc.java     |  2 +-
 .../ql/optimizer/calcite/RelOptHiveTable.java   |  2 +-
 .../spark/SparkReduceSinkMapJoinProc.java       |  2 +-
 .../hive/ql/parse/ProcessAnalyzeTable.java      |  4 ++--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  2 +-
 .../parse/spark/SparkProcessAnalyzeTable.java   |  2 +-
 .../apache/hadoop/hive/ql/plan/FilterDesc.java  |  4 ++--
 .../apache/hadoop/hive/ql/plan/PlanUtils.java   | 19 ++++++++++-------
 .../hadoop/hive/ql/plan/TableScanDesc.java      |  2 +-
 .../hive/ql/txn/compactor/CompactorMR.java      |  9 ++++----
 .../hadoop/hive/ql/txn/compactor/Initiator.java |  2 +-
 .../hadoop/hive/ql/txn/compactor/Worker.java    |  3 +--
 .../objectinspector/ObjectInspectorUtils.java   |  2 +-
 .../hive/serde2/lazy/TestLazySimpleSerDe.java   |  9 +-------
 .../apache/hive/service/auth/HttpAuthUtils.java |  2 +-
 32 files changed, 72 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java
b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java
index 6d6bbc2..43ddff3 100644
--- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java
+++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java
@@ -125,7 +125,7 @@ public class TezJsonParser implements JsonParser {
       if (candidate.tezStageDependency != null && candidate.tezStageDependency.size()
> 0) {
         outputStream.println("Vertex dependency in root stage");
         for (Entry<Vertex, List<Connection>> entry : candidate.tezStageDependency.entrySet())
{
-          StringBuffer sb = new StringBuffer();
+          StringBuilder sb = new StringBuilder();
           sb.append(entry.getKey().name);
           sb.append(" <- ");
           boolean printcomma = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java
index 012c28b..2848465 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java
@@ -55,7 +55,7 @@ public class TestHiveBaseChar extends TestCase {
   }
 
   public static String createRandomSupplementaryCharString(int len) {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for (int idx = 0; idx < len; ++idx) {
       sb.appendCodePoint(getRandomCodePoint(' '));
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
index 309d042..44e2338 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
@@ -18,19 +18,19 @@
 package org.apache.hadoop.hive.common.type;
 
 
-import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.LogUtils;
-import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
 import java.util.Random;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
+import org.junit.Rule;
+import org.junit.Test;
+
+import com.google.code.tempusfugit.concurrency.ConcurrentRule;
+import com.google.code.tempusfugit.concurrency.RepeatingRule;
+import com.google.code.tempusfugit.concurrency.annotations.Concurrent;
+import com.google.code.tempusfugit.concurrency.annotations.Repeating;
 
 public class TestHiveVarchar {
   @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
@@ -80,7 +80,7 @@ public class TestHiveVarchar {
     // Try with supplementary characters
     for (int idx1 = 0; idx1 < lengths.length; ++idx1) {
       // Create random test string
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       int curLen = lengths[idx1];
       for (int idx2 = 0; idx2 < curLen; ++idx2) {
         sb.appendCodePoint(getRandomCodePoint(' '));

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatException.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatException.java
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatException.java
index 265d08d..3749da6 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatException.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatException.java
@@ -104,7 +104,7 @@ public class HCatException extends IOException {
   public static String buildErrorMessage(ErrorType type, String extraMessage, Throwable cause)
{
 
     //Initial message is just the error type message
-    StringBuffer message = new StringBuffer(HCatException.class.getName());
+    StringBuilder message = new StringBuilder(HCatException.class.getName());
     message.append(" : " + type.getErrorCode());
     message.append(" : " + type.getErrorMessage());
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
index 2947c43..618f39b 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
@@ -227,7 +227,7 @@ public class TestJsonSerDe extends TestCase {
       return "";
     }
 
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     int numStrings = columnNames.split(",").length;
     sb.append("_col0");
     for (int i = 1; i < numStrings; i++) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
index eae91cb..6dc69f0 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
@@ -186,7 +186,7 @@ public class DelimitedInputWriter extends AbstractRecordWriter {
   // handles nulls in items[]
   // TODO: perhaps can be made more efficient by creating a byte[] directly
   private static byte[] join(String[] items, char separator) {
-    StringBuffer buff = new StringBuffer(100);
+    StringBuilder buff = new StringBuilder(100);
     if(items.length == 0)
       return "".getBytes();
     int i=0;

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index 3c25486..7e99008 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -418,7 +418,7 @@ public class HiveEndPoint {
         throw new IllegalArgumentException("Partition values:" + partVals +
                 ", does not match the partition Keys in table :" + partKeys );
       }
-      StringBuffer buff = new StringBuffer(partKeys.size()*20);
+      StringBuilder buff = new StringBuilder(partKeys.size()*20);
       buff.append(" ( ");
       int i=0;
       for (FieldSchema schema : partKeys) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index 329e5da..c0af533 100644
--- a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -794,7 +794,7 @@ public class TestStreaming {
       throw new IllegalArgumentException("Partition values:" + partVals
               + ", does not match the partition Keys in table :" + partKeys );
     }
-    StringBuffer buff = new StringBuffer(partKeys.size()*20);
+    StringBuilder buff = new StringBuilder(partKeys.size()*20);
     buff.append(" ( ");
     int i=0;
     for (FieldSchema schema : partKeys) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
index 8a0671f..2625155 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
@@ -132,7 +132,7 @@ public class HivePreparedStatement extends HiveStatement implements PreparedStat
       return sql;
     }
 
-    StringBuffer newSql = new StringBuffer(sql);
+    StringBuilder newSql = new StringBuilder(sql);
 
     int paramLoc = 1;
     while (getCharIndexFromSqlByParamLocation(sql, '?', paramLoc) > 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
index 52147bc..26e72be 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
@@ -340,7 +340,7 @@ public class CompactionTxnHandler extends TxnHandler {
         if (txnids.size() > 0) {
 
           // Remove entries from txn_components, as there may be aborted txn components
-          StringBuffer buf = new StringBuffer();
+          StringBuilder buf = new StringBuilder();
           buf.append("delete from TXN_COMPONENTS where tc_txnid in (");
           boolean first = true;
           for (long id : txnids) {
@@ -404,7 +404,7 @@ public class CompactionTxnHandler extends TxnHandler {
         Set<Long> txnids = new HashSet<Long>();
         while (rs.next()) txnids.add(rs.getLong(1));
         if (txnids.size() > 0) {
-          StringBuffer buf = new StringBuffer("delete from TXNS where txn_id in (");
+          StringBuilder buf = new StringBuilder("delete from TXNS where txn_id in (");
           boolean first = true;
           for (long tid : txnids) {
             if (first) first = false;
@@ -412,8 +412,9 @@ public class CompactionTxnHandler extends TxnHandler {
             buf.append(tid);
           }
           buf.append(")");
-          LOG.debug("Going to execute update <" + buf.toString() + ">");
-          int rc = stmt.executeUpdate(buf.toString());
+          String bufStr = buf.toString();
+          LOG.debug("Going to execute update <" + bufStr + ">");
+          int rc = stmt.executeUpdate(bufStr);
           LOG.debug("Removed " + rc + " records from txns");
           LOG.debug("Going to commit");
           dbConn.commit();

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index ada79bd..35c4cfc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -854,7 +854,7 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable
{
       }
     }
     else {
-      StringBuffer s = new StringBuffer();
+      StringBuilder s = new StringBuilder();
       first = true;
       for (Task<?> parent : task.getParentTasks()) {
         if (!first) {
@@ -886,7 +886,7 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable
{
 
     if (task instanceof ConditionalTask
         && ((ConditionalTask) task).getListTasks() != null) {
-      StringBuffer s = new StringBuffer();
+      StringBuilder s = new StringBuilder();
       first = true;
       for (Task<?> con : ((ConditionalTask) task).getListTasks()) {
         if (!first) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 5d58839..5f92d11 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -1811,11 +1811,12 @@ public final class Utilities {
   private static String adjustBucketNumLen(String bucketNum, String taskId) {
     int bucketNumLen = bucketNum.length();
     int taskIdLen = taskId.length();
-    StringBuffer s = new StringBuffer();
+    StringBuilder s = new StringBuilder();
     for (int i = 0; i < taskIdLen - bucketNumLen; i++) {
       s.append("0");
     }
-    return s.toString() + bucketNum;
+    s.append(bucketNum);
+    return s.toString();
   }
 
   /**
@@ -1836,7 +1837,7 @@ public final class Utilities {
       return filename.replaceAll(oldTaskId, newTaskId);
     }
 
-    StringBuffer snew = new StringBuffer();
+    StringBuilder snew = new StringBuilder();
     for (int idx = 0; idx < spl.length - 1; idx++) {
       if (idx > 0) {
         snew.append(oldTaskId);
@@ -3169,7 +3170,7 @@ public final class Utilities {
    * @return a string with escaped '_' and '%'.
    */
   public static String escapeSqlLike(String key) {
-    StringBuffer sb = new StringBuffer(key.length());
+    StringBuilder sb = new StringBuilder(key.length());
     for (char c: key.toCharArray()) {
       switch(c) {
       case '_':

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
index a9d2dbf..3fceeb0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java
@@ -62,7 +62,7 @@ abstract class SparkJobMonitor {
       return;
     }
 
-    StringBuffer reportBuffer = new StringBuffer();
+    StringBuilder reportBuffer = new StringBuilder();
     SimpleDateFormat dt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
     String currentDate = dt.format(new Date());
     reportBuffer.append(currentDate + "\t");

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
index 78caba8..aa84850 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
@@ -596,7 +596,7 @@ public class TezJobMonitor {
 
   private void printStatusInPlace(Map<String, Progress> progressMap, long startTime,
       boolean vextexStatusFromAM, DAGClient dagClient) {
-    StringBuffer reportBuffer = new StringBuffer();
+    StringBuilder reportBuffer = new StringBuilder();
     int sumComplete = 0;
     int sumTotal = 0;
 
@@ -781,7 +781,7 @@ public class TezJobMonitor {
   }
 
   private String getReport(Map<String, Progress> progressMap) {
-    StringBuffer reportBuffer = new StringBuffer();
+    StringBuilder reportBuffer = new StringBuilder();
 
     SortedSet<String> keys = new TreeSet<String>(progressMap.keySet());
     for (String s: keys) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index 4a16b4c..99cb620 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -643,7 +643,7 @@ public class VectorizedBatchUtil {
   }
 
   public static void debugDisplayOneRow(VectorizedRowBatch batch, int index, String prefix)
{
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     sb.append(prefix + " row " + index + " ");
     for (int column = 0; column < batch.cols.length; column++) {
       ColumnVector colVector = batch.cols[column];

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
index c5f03d9..eed1d7c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
@@ -187,7 +187,7 @@ public class GenMRTableScan1 implements NodeProcessor {
       ParseContext parseCtx, Task<? extends Serializable> currTask,
       StatsWork statsWork, Task<StatsWork> statsTask) throws SemanticException {
     String aggregationKey = op.getConf().getStatsAggPrefix();
-    StringBuffer aggregationKeyBuffer = new StringBuffer(aggregationKey);
+    StringBuilder aggregationKeyBuffer = new StringBuilder(aggregationKey);
     List<Path> inputPaths = GenMapRedUtils.getInputPathsForPartialScan(op, aggregationKeyBuffer);
     aggregationKey = aggregationKeyBuffer.toString();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
index acd9bf5..29854d8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
+import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -1814,7 +1815,7 @@ public final class GenMapRedUtils {
   }
 
   public static List<Path> getInputPathsForPartialScan(TableScanOperator tableScanOp,
-          StringBuffer aggregationKey) throws SemanticException {
+          Appendable aggregationKey) throws SemanticException {
     List<Path> inputPaths = new ArrayList<Path>();
     switch (tableScanOp.getConf().getTableMetadata().getTableSpec().specType) {
       case TABLE_ONLY:
@@ -1829,6 +1830,8 @@ public final class GenMapRedUtils {
         } catch (MetaException e) {
           throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_AGGKEY.getMsg(
               part.getDataLocation().toString() + e.getMessage()));
+        } catch (IOException e) {
+          throw new RuntimeException(e);
         }
         inputPaths.add(part.getDataLocation());
         break;

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
index f7e1dbc..bca91dd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
@@ -283,7 +283,7 @@ public class ReduceSinkMapJoinProc implements NodeProcessor {
 
     Map<Byte, List<ExprNodeDesc>> keyExprMap = mapJoinOp.getConf().getKeys();
     List<ExprNodeDesc> keyCols = keyExprMap.get(Byte.valueOf((byte) 0));
-    StringBuffer keyOrder = new StringBuffer();
+    StringBuilder keyOrder = new StringBuilder();
     for (ExprNodeDesc k: keyCols) {
       keyOrder.append("+");
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
index 43882e7..6c0bd25 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
@@ -221,7 +221,7 @@ public class RelOptHiveTable extends RelOptAbstractTable {
   }
 
   private String getColNamesForLogging(Set<String> colLst) {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     boolean firstEntry = true;
     for (String colName : colLst) {
       if (firstEntry) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java
index e477f04..fd42959 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java
@@ -207,7 +207,7 @@ public class SparkReduceSinkMapJoinProc implements NodeProcessor {
 
     Map<Byte, List<ExprNodeDesc>> keyExprMap = mapJoinOp.getConf().getKeys();
     List<ExprNodeDesc> keyCols = keyExprMap.get(Byte.valueOf((byte) 0));
-    StringBuffer keyOrder = new StringBuffer();
+    StringBuilder keyOrder = new StringBuilder();
     for (int i = 0; i < keyCols.size(); i++) {
       keyOrder.append("+");
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
index 7108a47..f8d6905 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
@@ -168,12 +168,12 @@ public class ProcessAnalyzeTable implements NodeProcessor {
    *
    * It is composed of PartialScanTask followed by StatsTask.
    */
-  private void handlePartialScanCommand(TableScanOperator tableScan, ParseContext parseContext,

+  private void handlePartialScanCommand(TableScanOperator tableScan, ParseContext parseContext,
       StatsWork statsWork, GenTezProcContext context, Task<StatsWork> statsTask)
               throws SemanticException {
 
     String aggregationKey = tableScan.getConf().getStatsAggPrefix();
-    StringBuffer aggregationKeyBuffer = new StringBuffer(aggregationKey);
+    StringBuilder aggregationKeyBuffer = new StringBuilder(aggregationKey);
     List<Path> inputPaths = GenMapRedUtils.getInputPathsForPartialScan(tableScan, aggregationKeyBuffer);
     aggregationKey = aggregationKeyBuffer.toString();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index d609732..428ba38 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -955,7 +955,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
    *
    */
   private ASTNode findCTEFromName(QB qb, String cteName) {
-    StringBuffer qId = new StringBuffer();
+    StringBuilder qId = new StringBuilder();
     if (qb.getId() != null) {
       qId.append(qb.getId());
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
index 9e19733..66e148f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
@@ -168,7 +168,7 @@ public class SparkProcessAnalyzeTable implements NodeProcessor {
       StatsWork statsWork, GenSparkProcContext context, Task<StatsWork> statsTask)
               throws SemanticException {
     String aggregationKey = tableScan.getConf().getStatsAggPrefix();
-    StringBuffer aggregationKeyBuffer = new StringBuffer(aggregationKey);
+    StringBuilder aggregationKeyBuffer = new StringBuilder(aggregationKey);
     List<Path> inputPaths = GenMapRedUtils.getInputPathsForPartialScan(tableScan, aggregationKeyBuffer);
     aggregationKey = aggregationKeyBuffer.toString();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
index 8dff2fc..3a1a4af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
@@ -104,11 +104,11 @@ public class FilterDesc extends AbstractOperatorDesc {
 
   @Explain(displayName = "predicate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED
})
   public String getPredicateString() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     PlanUtils.addExprToStringBuffer(predicate, sb);
     return sb.toString();
   }
-    
+
   public org.apache.hadoop.hive.ql.plan.ExprNodeDesc getPredicate() {
     return predicate;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index cb0b680..76926e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -413,7 +414,7 @@ public final class PlanUtils {
       // We basically use ReduceSinkOperators and set the transfer to
       // be broadcast (instead of partitioned). As a consequence we use
       // a different SerDe than in the MR mapjoin case.
-      StringBuffer order = new StringBuffer();
+      StringBuilder order = new StringBuilder();
       for (FieldSchema f: fieldSchemas) {
         order.append("+");
       }
@@ -930,7 +931,7 @@ public final class PlanUtils {
   }
 
   public static String getExprListString(Collection<?  extends ExprNodeDesc> exprs)
{
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     boolean first = true;
     for (ExprNodeDesc expr: exprs) {
       if (!first) {
@@ -944,11 +945,15 @@ public final class PlanUtils {
     return sb.length() == 0 ? null : sb.toString();
   }
 
-  public static void addExprToStringBuffer(ExprNodeDesc expr, StringBuffer sb) {
-    sb.append(expr.getExprString());
-    sb.append(" (type: ");
-    sb.append(expr.getTypeString());
-    sb.append(")");
+  public static void addExprToStringBuffer(ExprNodeDesc expr, Appendable sb) {
+    try {
+      sb.append(expr.getExprString());
+      sb.append(" (type: ");
+      sb.append(expr.getTypeString());
+      sb.append(")");
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
   }
 
   public static void addInputsForView(ParseContext parseCtx) throws HiveException {

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
index aa291b9..6282380 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
@@ -134,7 +134,7 @@ public class TableScanDesc extends AbstractOperatorDesc {
 
   @Explain(displayName = "filterExpr")
   public String getFilterExprString() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     PlanUtils.addExprToStringBuffer(filterExpr, sb);
     return sb.toString();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
index b8e18ea..c5f2d4d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
@@ -585,7 +585,7 @@ public class CompactorMR {
 
     @Override
     public String toString() {
-      StringBuffer buf = new StringBuffer();
+      StringBuilder buf = new StringBuilder();
       buf.append(size());
       buf.append(':');
       if (size() > 0) {
@@ -631,14 +631,15 @@ public class CompactorMR {
 
     @Override
     public String toString() {
-      StringBuffer buf = new StringBuffer();
+      StringBuilder buf = new StringBuilder();
       buf.append(size());
       buf.append(':');
       if (size() > 0) {
         for (Path p : this) {
-          buf.append(p.toString().length());
+          String pStr = p.toString();
+          buf.append(pStr.length());
           buf.append(':');
-          buf.append(p.toString());
+          buf.append(pStr);
         }
       }
       return buf.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
index 847d751..32a9ef8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
@@ -259,7 +259,7 @@ public class Initiator extends CompactorThread {
           HiveConf.ConfVars.HIVE_COMPACTOR_DELTA_PCT_THRESHOLD);
       boolean bigEnough =   (float)deltaSize/(float)baseSize > deltaPctThreshold;
       if (LOG.isDebugEnabled()) {
-        StringBuffer msg = new StringBuffer("delta size: ");
+        StringBuilder msg = new StringBuilder("delta size: ");
         msg.append(deltaSize);
         msg.append(" base size: ");
         msg.append(baseSize);

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
index f26225a..e164661 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
 import org.apache.hadoop.hive.metastore.txn.CompactionTxnHandler;
-import org.apache.hadoop.hive.metastore.txn.TxnHandler;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -137,7 +136,7 @@ public class Worker extends CompactorThread {
         final ValidTxnList txns =
             CompactionTxnHandler.createValidCompactTxnList(txnHandler.getOpenTxnsInfo());
         LOG.debug("ValidCompactTxnList: " + txns.writeToString());
-        final StringBuffer jobName = new StringBuffer(name);
+        final StringBuilder jobName = new StringBuilder(name);
         jobName.append("-compactor-");
         jobName.append(ci.getFullPartitionName());
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index f3f7d95..041d218 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -475,7 +475,7 @@ public final class ObjectInspectorUtils {
       return result.toString();
     }
     case UNION: {
-      StringBuffer result = new StringBuffer();
+      StringBuilder result = new StringBuilder();
       result.append(oi.getClass().getSimpleName() + "<");
       UnionObjectInspector uoi = (UnionObjectInspector)oi;
       List<ObjectInspector> ois = uoi.getObjectInspectors();

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
index 19fe952..b11ce32 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hive.serde2.lazy;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 
@@ -33,14 +32,8 @@ import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.SimpleMapEqualComparer;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
-import org.apache.hadoop.hive.serde2.objectinspector.TestSimpleMapEqualComparer.TextStringMapHolder;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -72,7 +65,7 @@ public class TestLazySimpleSerDe extends TestCase {
       // Data
       Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\tNULL\t");
       t.append(new byte[]{(byte)Integer.parseInt("10111111", 2)}, 0, 1);
-      StringBuffer sb = new StringBuffer("123\t456\t789\t1000\t5.3\thive and hadoop\t1\tNULL\t");
+      StringBuilder sb = new StringBuilder("123\t456\t789\t1000\t5.3\thive and hadoop\t1\tNULL\t");
       String s = sb.append(new String(Base64.encodeBase64(new byte[]{(byte)Integer.parseInt("10111111",
2)}))).toString();
       Object[] expectedFieldsData = {new ByteWritable((byte) 123),
           new ShortWritable((short) 456), new IntWritable(789),

http://git-wip-us.apache.org/repos/asf/hive/blob/8aaf3bd5/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
index 3ef5577..a58db9c 100644
--- a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
+++ b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
@@ -92,7 +92,7 @@ public final class HttpAuthUtils {
    * cu=<username>&rn=<randomNumber>&s=<cookieSignature>
    */
   public static String createCookieToken(String clientUserName) {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     sb.append(COOKIE_CLIENT_USER_NAME).append(COOKIE_KEY_VALUE_SEPARATOR).append(clientUserName).
     append(COOKIE_ATTR_SEPARATOR);
     sb.append(COOKIE_CLIENT_RAND_NUMBER).append(COOKIE_KEY_VALUE_SEPARATOR).


Mime
View raw message