carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [1/5] carbondata git commit: resolved bug for unable to select outoforder columns in hive
Date Thu, 25 May 2017 03:35:31 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 9669c0b29 -> 25a8ac616


resolved bug for unable to select outoforder columns in hive

commit for starting build again


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/25a8ac61
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/25a8ac61
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/25a8ac61

Branch: refs/heads/master
Commit: 25a8ac616b5407e686f3a5e3106b1a5a34835b85
Parents: b4f65b2
Author: anubhav100 <anubhav.tarar@knoldus.in>
Authored: Mon Apr 17 15:56:55 2017 +0530
Committer: chenliang613 <chenliang613@apache.org>
Committed: Thu May 25 11:34:37 2017 +0800

----------------------------------------------------------------------
 .../carbondata/hive/CarbonHiveRecordReader.java | 17 +++---
 .../apache/carbondata/hive/CarbonHiveSerDe.java | 34 +++++++++---
 .../carbondata/hiveexample/HiveExample.scala    | 57 ++++++++++++++++++++
 3 files changed, 94 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/25a8ac61/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
index fc45d74..eb7faed 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
@@ -78,22 +78,21 @@ public class CarbonHiveRecordReader extends CarbonRecordReader<ArrayWritable>
     }
     List<TableBlockInfo> tableBlockInfoList = CarbonHiveInputSplit.createBlocks(splitList);
     queryModel.setTableBlockInfos(tableBlockInfoList);
-    readSupport.initialize(queryModel.getProjectionColumns(),
-        queryModel.getAbsoluteTableIdentifier());
+    readSupport.initialize(queryModel.getProjectionColumns(), queryModel.getAbsoluteTableIdentifier());
     try {
       carbonIterator = new ChunkRowIterator(queryExecutor.execute(queryModel));
     } catch (QueryExecutionException e) {
       throw new IOException(e.getMessage(), e.getCause());
     }
     if (valueObj == null) {
-      valueObj = new ArrayWritable(Writable.class,
-          new Writable[queryModel.getProjectionColumns().length]);
+      valueObj = new ArrayWritable(Writable.class, new Writable[queryModel.getProjectionColumns().length]);
     }
 
     final TypeInfo rowTypeInfo;
     final List<String> columnNames;
     List<TypeInfo> columnTypes;
     // Get column names and sort order
+    final String colIds = conf.get("hive.io.file.readcolumn.ids");
     final String columnNameProperty = conf.get("hive.io.file.readcolumn.names");
     final String columnTypeProperty = conf.get(serdeConstants.LIST_COLUMN_TYPES);
 
@@ -107,9 +106,15 @@ public class CarbonHiveRecordReader extends CarbonRecordReader<ArrayWritable>
     } else {
       columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
     }
-    columnTypes = columnTypes.subList(0, columnNames.size());
+
+    String[] arraySelectedColId = colIds.split(",");
+    List<TypeInfo> reqColTypes = new ArrayList<TypeInfo>();
+
+    for (String anArrayColId : arraySelectedColId) {
+      reqColTypes.add(columnTypes.get(Integer.parseInt(anArrayColId)));
+    }
     // Create row related objects
-    rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
+    rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, reqColTypes);
     this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/25a8ac61/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
index cbc2514..3a5c50a 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
@@ -83,8 +83,12 @@ public class CarbonHiveSerDe extends AbstractSerDe {
 
     final TypeInfo rowTypeInfo;
     final List<String> columnNames;
+    final List<String> reqColNames;
     final List<TypeInfo> columnTypes;
     // Get column names and sort order
+    assert configuration != null;
+    final String colIds = configuration.get("hive.io.file.readcolumn.ids");
+
     final String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     final String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
 
@@ -98,14 +102,28 @@ public class CarbonHiveSerDe extends AbstractSerDe {
     } else {
       columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
     }
-    // Create row related objects
-    rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
-    this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
-
-    // Stats part
-    serializedSize = 0;
-    deserializedSize = 0;
-    status = LAST_OPERATION.UNKNOWN;
+    if (colIds != null) {
+      reqColNames = new ArrayList<String>();
+
+      String[] arraySelectedColId = colIds.split(",");
+      List<TypeInfo> reqColTypes = new ArrayList<TypeInfo>();
+      for (String anArrayColId : arraySelectedColId) {
+        reqColNames.add(columnNames.get(Integer.parseInt(anArrayColId)));
+        reqColTypes.add(columnTypes.get(Integer.parseInt(anArrayColId)));
+      }
+      // Create row related objects
+      rowTypeInfo = TypeInfoFactory.getStructTypeInfo(reqColNames, reqColTypes);
+      this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
+    } else {
+      // Create row related objects
+      rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
+      this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
+
+      // Stats part
+      serializedSize = 0;
+      deserializedSize = 0;
+      status = LAST_OPERATION.UNKNOWN;
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/carbondata/blob/25a8ac61/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
b/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
index 158cfff..a80fb71 100644
--- a/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
+++ b/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
@@ -175,6 +175,63 @@ object HiveExample {
       rowsFetched = rowsFetched + 1
     }
     println(s"******Total Number Of Rows Fetched ****** $rowsFetched")
+
+    logger.info("Fetching the Individual Columns ")
+    //fetching the seperate columns
+    var individualColRowsFetched = 0
+
+    val resultIndividualCol = stmt.executeQuery("SELECT NAME FROM HIVE_CARBON_EXAMPLE")
+
+    while(resultIndividualCol.next){
+      if (individualColRowsFetched == 0) {
+        println("+--------------+")
+        println("| NAME         |")
+
+        println("+---++---------+")
+
+        val resultName = resultIndividualCol.getString("name")
+
+        println(s"| $resultName    |")
+        println("+---+" + "+---------+")
+      }
+      else {
+        val resultName = resultIndividualCol.getString("NAME")
+
+        println(s"| $resultName      |" )
+        println("+---+" + "+---------+" )
+      }
+      individualColRowsFetched =  individualColRowsFetched +1
+    }
+    println(s" ********** Total Rows Fetched When Quering The Individual Column **********
$individualColRowsFetched")
+
+    logger.info("Fetching the Out Of Order Columns ")
+
+    val resultOutOfOrderCol = stmt.executeQuery("SELECT SALARY,ID,NAME FROM HIVE_CARBON_EXAMPLE")
+    var outOfOrderColFetched = 0
+    while (resultOutOfOrderCol.next()){
+      if (outOfOrderColFetched == 0) {
+        println("+---+" + "+-------+" + "+--------------+")
+        println("| Salary|" + "| ID |" + "| NAME        |")
+
+        println("+---+" + "+-------+" + "+--------------+")
+
+        val resultId = resultOutOfOrderCol.getString("id")
+        val resultName = resultOutOfOrderCol.getString("name")
+        val resultSalary = resultOutOfOrderCol.getString("salary")
+
+        println(s"| $resultSalary |" + s"| $resultId |" + s"| $resultName  |")
+        println("+---+" + "+-------+" + "+--------------+")
+      }
+      else {
+        val resultId = resultOutOfOrderCol.getString("ID")
+        val resultName = resultOutOfOrderCol.getString("NAME")
+        val resultSalary = resultOutOfOrderCol.getString("SALARY")
+
+        println(s"| $resultSalary |" + s"| $resultId |" + s"| $resultName   |")
+        println("+---+" + "+-------+" + "+--------------+")
+      }
+      outOfOrderColFetched =  outOfOrderColFetched +1
+    }
     hiveEmbeddedServer2.stop()
     System.exit(0)
   }


Mime
View raw message