carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject carbondata git commit: [CARBONDATA-1396]Fix findbugs issues in carbondata-hive
Date Sat, 19 Aug 2017 18:27:21 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 9e871d941 -> 2d24e18b8


[CARBONDATA-1396]Fix findbugs issues in carbondata-hive

Fix findbugs issues in carbondata-hive

This closes #1270


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/2d24e18b
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/2d24e18b
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/2d24e18b

Branch: refs/heads/master
Commit: 2d24e18b8d4c7c0b8f38b05befa822f8967a5b9d
Parents: 9e871d9
Author: Raghunandan S <carbondatacontributions@gmail.com>
Authored: Sat Aug 19 16:31:56 2017 +0530
Committer: Jacky Li <jacky.likun@qq.com>
Committed: Sun Aug 20 02:27:05 2017 +0800

----------------------------------------------------------------------
 .../hive/CarbonDictionaryDecodeReadSupport.java |  8 ++++--
 .../carbondata/hive/CarbonHiveInputSplit.java   | 26 ++++++++++++++++++--
 .../carbondata/hive/CarbonHiveRecordReader.java | 13 ----------
 .../carbondata/hive/CarbonObjectInspector.java  |  2 +-
 .../hive/MapredCarbonInputFormat.java           |  1 -
 .../hive/server/HiveEmbeddedServer2.java        | 17 ++++++++++---
 pom.xml                                         |  1 +
 7 files changed, 45 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/2d24e18b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
index bc66d49..f08b92b 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
@@ -231,7 +231,7 @@ public class CarbonDictionaryDecodeReadSupport<T> implements CarbonReadSupport<T
       case SHORT:
         return new ShortWritable((Short) obj);
       case DATE:
-        return new DateWritable(new Date((Integer) obj));
+        return new DateWritable(new Date((long) obj));
       case TIMESTAMP:
         return new TimestampWritable(new Timestamp((long) obj));
       case STRING:
@@ -239,8 +239,9 @@ public class CarbonDictionaryDecodeReadSupport<T> implements CarbonReadSupport<T
       case DECIMAL:
         return new HiveDecimalWritable(
             HiveDecimal.create(new java.math.BigDecimal(obj.toString())));
+      default:
+        throw new IOException("unsupported data type:" + dataType);
     }
-    throw new IOException("Unknown primitive : " + dataType.getName());
   }
 
   /**
@@ -282,6 +283,9 @@ public class CarbonDictionaryDecodeReadSupport<T> implements CarbonReadSupport<T
       case DECIMAL:
         ((HiveDecimalWritable) writable)
             .set(HiveDecimal.create(new java.math.BigDecimal(obj.toString())));
+        break;
+      default:
+        throw new IOException("unsupported data type:" + dataType);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2d24e18b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
index b922295..9171470 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
@@ -223,10 +223,10 @@ public class CarbonHiveInputSplit extends FileSplit
 
     double seg1 = Double.parseDouble(segmentId);
     double seg2 = Double.parseDouble(other.getSegmentId());
-    if (seg1 - seg2 < 0) {
+    if (Double.compare(seg1, seg2) < 0) {
       return -1;
     }
-    if (seg1 - seg2 > 0) {
+    if (Double.compare(seg1, seg2) > 0) {
       return 1;
     }
 
@@ -262,6 +262,28 @@ public class CarbonHiveInputSplit extends FileSplit
     return 0;
   }
 
+  @Override public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+
+    if (!(obj instanceof CarbonHiveInputSplit)) {
+      return false;
+    }
+
+    CarbonHiveInputSplit other = (CarbonHiveInputSplit) obj;
+    return 0 == this.compareTo(other);
+  }
+
+  @Override public int hashCode() {
+    int result = taskId.hashCode();
+    result = 31 * result + segmentId.hashCode();
+    result = 31 * result + bucketId.hashCode();
+    result = 31 * result + invalidSegments.hashCode();
+    result = 31 * result + numberOfBlocklets;
+    return result;
+  }
+
   @Override public String getBlockPath() {
     return getPath().getName();
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2d24e18b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
index ae87d66..b8bd0e2 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
@@ -20,7 +20,6 @@ import java.io.IOException;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 
@@ -45,9 +44,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -61,7 +58,6 @@ class CarbonHiveRecordReader extends CarbonRecordReader<ArrayWritable>
     implements org.apache.hadoop.mapred.RecordReader<Void, ArrayWritable> {
 
   private ArrayWritable valueObj = null;
-  private CarbonObjectInspector objInspector;
   private long recordReaderCounter = 0;
   private int[] columnIds;
 
@@ -95,14 +91,8 @@ class CarbonHiveRecordReader extends CarbonRecordReader<ArrayWritable>
     List<TypeInfo> columnTypes;
     // Get column names and sort order
     final String colIds = conf.get("hive.io.file.readcolumn.ids");
-    final String columnNameProperty = conf.get(serdeConstants.LIST_COLUMNS);
     final String columnTypeProperty = conf.get(serdeConstants.LIST_COLUMN_TYPES);
 
-    if (columnNameProperty.length() == 0) {
-      columnNames = new ArrayList<String>();
-    } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
-    }
     if (columnTypeProperty.length() == 0) {
       columnTypes = new ArrayList<TypeInfo>();
     } else {
@@ -115,7 +105,6 @@ class CarbonHiveRecordReader extends CarbonRecordReader<ArrayWritable>
 
     if (!colIds.equals("")) {
       String[] arraySelectedColId = colIds.split(",");
-      List<TypeInfo> reqColTypes = new ArrayList<TypeInfo>();
       columnIds = new int[arraySelectedColId.length];
       int columnId = 0;
       for (int j = 0; j < arraySelectedColId.length; j++) {
@@ -124,8 +113,6 @@ class CarbonHiveRecordReader extends CarbonRecordReader<ArrayWritable>
       }
     }
 
-    rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
-    this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
   }
 
   @Override public boolean next(Void aVoid, ArrayWritable value) throws IOException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2d24e18b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
index 4c7f1a6..6722dcf 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
@@ -166,7 +166,7 @@ class CarbonObjectInspector extends SettableStructObjectInspector {
     return hash;
   }
 
-  class StructFieldImpl implements StructField {
+  private static class StructFieldImpl implements StructField {
 
     private final String name;
     private final ObjectInspector inspector;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2d24e18b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
index 273536a..86ebc0d 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
@@ -123,7 +123,6 @@ public class MapredCarbonInputFormat extends CarbonInputFormat<ArrayWritable>
     // getting the table absoluteTableIdentifier from the carbonTable
     // to avoid unnecessary deserialization
 
-    StringBuilder colNames = new StringBuilder();
     AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
 
     String projection = getProjection(configuration, carbonTable,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2d24e18b/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
index ae931fb..5bc6461 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
@@ -19,6 +19,7 @@ package org.apache.carbondata.hive.server;
 
 import java.io.File;
 import java.lang.reflect.Field;
+import java.security.SecureRandom;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
@@ -51,6 +52,7 @@ public class HiveEmbeddedServer2 {
   private HiveServer2 hiveServer;
   private HiveConf config;
   private int port;
+  private static Random secureRandom = new SecureRandom();
 
   public void start() throws Exception {
     log.info("Starting Hive Local/Embedded Server...");
@@ -112,11 +114,18 @@ public class HiveEmbeddedServer2 {
     conf.addToRestrictList("columns.comments");
     conf.set("hive.scratch.dir.permission", "777");
     conf.setVar(ConfVars.SCRATCHDIRPERMISSION, "777");
-    scratchDirFile.mkdirs();
-    // also set the permissions manually since Hive doesn't do it...
-    scratchDirFile.setWritable(true, false);
+    if (!scratchDirFile.exists()) {
+      if (!scratchDirFile.mkdirs()) {
+        throw new IllegalArgumentException("could not create the directory:" + scratchDir);
+      }
+      // also set the permissions manually since Hive doesn't do it...
+      if (!scratchDirFile.setWritable(true, false)) {
+        throw new IllegalArgumentException("could not set write permissions for the directory:"
+
+            scratchDir);
+      }
+    }
 
-    int random = new Random().nextInt();
+    int random = secureRandom.nextInt();
 
     conf.set("hive.metastore.warehouse.dir", scratchDir + "/warehouse" + random);
     conf.set("hive.metastore.metadb.dir", scratchDir + "/metastore_db" + random);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2d24e18b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f722c66..df3c5bd 100644
--- a/pom.xml
+++ b/pom.xml
@@ -101,6 +101,7 @@
     <module>hadoop</module>
     <module>integration/spark-common</module>
     <module>integration/spark-common-test</module>
+    <module>integration/hive</module>
     <module>assembly</module>
   </modules>
 


Mime
View raw message