carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [48/50] [abbrv] incubator-carbondata git commit: Fixed issues after merge to apache/master
Date Wed, 20 Jul 2016 10:14:16 GMT
Fixed issues after merge to apache/master


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/5fa76712
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/5fa76712
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/5fa76712

Branch: refs/heads/master
Commit: 5fa76712ac6ee13d3944b8b9bbb68c2a8a67de50
Parents: eaecb65
Author: ravipesala <ravi.pesala@gmail.com>
Authored: Wed Jul 20 00:18:50 2016 +0530
Committer: ravipesala <ravi.pesala@gmail.com>
Committed: Wed Jul 20 00:18:50 2016 +0530

----------------------------------------------------------------------
 .../core/carbon/metadata/datatype/DataType.java |  3 +-
 .../org/carbondata/core/util/CarbonUtil.java    | 17 +++++
 .../org/carbondata/core/util/DataTypeUtil.java  | 68 +++++++++++++-------
 .../complex/querytypes/ArrayQueryType.java      |  8 +--
 .../complex/querytypes/ComplexQueryType.java    |  5 +-
 .../complex/querytypes/PrimitiveQueryType.java  | 12 +---
 .../complex/querytypes/StructQueryType.java     |  3 +-
 .../impl/ListBasedResultCollector.java          | 12 ++--
 .../scan/executor/infos/BlockExecutionInfo.java |  1 +
 .../org/carbondata/scan/filter/FilterUtil.java  | 39 +++--------
 .../scan/filter/GenericQueryType.java           |  3 +-
 .../DimColumnResolvedFilterInfo.java            |  1 -
 .../visitor/CustomTypeDictionaryVisitor.java    |  1 +
 .../visitor/NoDictionaryTypeVisitor.java        |  3 +-
 .../org/carbondata/scan/model/QueryModel.java   |  1 -
 .../scan/result/AbstractScannedResult.java      |  2 +
 .../iterator/DetailQueryResultIterator.java     |  2 +-
 .../impl/DetailQueryResultPreparatorImpl.java   |  9 +++
 .../impl/RawQueryResultPreparatorImpl.java      |  9 +++
 .../spark/sql/CarbonDictionaryDecoder.scala     |  6 +-
 .../execution/command/carbonTableSchema.scala   |  2 +-
 .../spark/sql/hive/CarbonMetastoreCatalog.scala |  2 +-
 .../org/carbondata/spark/CarbonFilters.scala    |  1 +
 .../spark/rdd/CarbonDataRDDFactory.scala        |  2 +-
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala   |  7 +-
 .../carbondata/spark/rdd/CarbonScanRDD.scala    | 64 +++++++++++++-----
 .../org/carbondata/spark/rdd/Compactor.scala    | 10 +--
 .../carbondata/spark/util/CarbonScalaUtil.scala |  4 +-
 .../complexType/TestComplexTypeQuery.scala      | 12 ++--
 .../TestDimensionWithDecimalDataType.scala      |  8 +--
 .../dataload/TestLoadDataWithHiveSyntax.scala   |  6 +-
 .../deleteTable/TestDeleteTableNewDDL.scala     | 30 +++------
 .../spark/util/AllDictionaryTestCase.scala      | 14 ++--
 .../spark/util/DictionaryTestCaseUtil.scala     |  2 +-
 .../util/ExternalColumnDictionaryTestCase.scala | 14 ++--
 ...GlobalDictionaryUtilConcurrentTestCase.scala | 18 +++---
 .../org/carbondata/lcm/locks/LocalFileLock.java |  1 -
 37 files changed, 227 insertions(+), 175 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/core/carbon/metadata/datatype/DataType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/metadata/datatype/DataType.java b/core/src/main/java/org/carbondata/core/carbon/metadata/datatype/DataType.java
index 2891702..5fbe9cb 100644
--- a/core/src/main/java/org/carbondata/core/carbon/metadata/datatype/DataType.java
+++ b/core/src/main/java/org/carbondata/core/carbon/metadata/datatype/DataType.java
@@ -33,7 +33,8 @@ public enum DataType {
   NULL(7),
   DECIMAL(8),
   ARRAY(9),
-  STRUCT(10);
+  STRUCT(10),
+  MAP(11);
 
   private int presedenceOrder;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
index c63dccc..9db8f24 100644
--- a/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
@@ -1038,6 +1038,23 @@ public final class CarbonUtil {
   }
 
   /**
+   * below method is to check whether it is complex data type
+   *
+   * @param dataType  data type to be searched
+   * @return if data type is present
+   */
+  public static boolean hasComplexDataType(DataType dataType) {
+    switch (dataType) {
+      case ARRAY :
+      case STRUCT:
+      case MAP:
+        return true;
+      default:
+        return false;
+    }
+  }
+
+  /**
    * Below method will be used to read the data file matadata
    *
    * @param filePath file path

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
index b719607..765dc60 100644
--- a/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
@@ -181,28 +181,32 @@ public final class DataTypeUtil {
     if (null == data) {
       return false;
     }
-    switch (actualDataType) {
-      case SHORT:
-      case INT:
-      case LONG:
-      case DOUBLE:
-      case DECIMAL:
-        return NumberUtils.isDigits(data);
-      case TIMESTAMP:
-        if (data.isEmpty()) {
-          return false;
-        }
-        SimpleDateFormat parser = new SimpleDateFormat(CarbonProperties.getInstance()
-            .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-                CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
-        try {
-          parser.parse(data);
+    try {
+      switch (actualDataType) {
+        case SHORT:
+        case INT:
+        case LONG:
+        case DOUBLE:
+        case DECIMAL:
+          return NumberUtils.isNumber(data);
+        case TIMESTAMP:
+          if (data.isEmpty()) {
+            return false;
+          }
+          SimpleDateFormat parser = new SimpleDateFormat(CarbonProperties.getInstance()
+              .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+                  CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
+          try {
+            parser.parse(data);
+            return true;
+          } catch (ParseException e) {
+            return false;
+          }
+        default:
           return true;
-        } catch (ParseException e) {
-          return false;
-        }
-      default:
-        return false;
+      }
+    } catch (NumberFormatException ex) {
+      return false;
     }
   }
 
@@ -216,20 +220,35 @@ public final class DataTypeUtil {
    */
   public static Object getDataBasedOnDataType(String data, DataType actualDataType) {
 
-    if (null == data || data.isEmpty() || CarbonCommonConstants.MEMBER_DEFAULT_VAL.equals(data)) {
+    if (null == data || CarbonCommonConstants.MEMBER_DEFAULT_VAL.equals(data)) {
       return null;
     }
     try {
       switch (actualDataType) {
         case INT:
+          if (data.isEmpty()) {
+            return null;
+          }
           return Integer.parseInt(data);
         case SHORT:
+          if (data.isEmpty()) {
+            return null;
+          }
           return Short.parseShort(data);
         case DOUBLE:
+          if (data.isEmpty()) {
+            return null;
+          }
           return Double.parseDouble(data);
         case LONG:
+          if (data.isEmpty()) {
+            return null;
+          }
           return Long.parseLong(data);
         case TIMESTAMP:
+          if (data.isEmpty()) {
+            return null;
+          }
           SimpleDateFormat parser = new SimpleDateFormat(CarbonProperties.getInstance()
               .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
                   CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
@@ -242,11 +261,14 @@ public final class DataTypeUtil {
             return null;
           }
         case DECIMAL:
+          if (data.isEmpty()) {
+            return null;
+          }
           java.math.BigDecimal javaDecVal = new java.math.BigDecimal(data);
           scala.math.BigDecimal scalaDecVal = new scala.math.BigDecimal(javaDecVal);
           org.apache.spark.sql.types.Decimal decConverter =
               new org.apache.spark.sql.types.Decimal();
-          return decConverter.set(scalaDecVal);
+          return decConverter.set(scalaDecVal, 19, 2);
         default:
           return UTF8String.fromString(data);
       }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/query/complex/querytypes/ArrayQueryType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/complex/querytypes/ArrayQueryType.java b/core/src/main/java/org/carbondata/query/complex/querytypes/ArrayQueryType.java
index 102273e..52b6333 100644
--- a/core/src/main/java/org/carbondata/query/complex/querytypes/ArrayQueryType.java
+++ b/core/src/main/java/org/carbondata/query/complex/querytypes/ArrayQueryType.java
@@ -25,11 +25,11 @@ import java.nio.ByteBuffer;
 import java.util.List;
 
 import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.query.carbon.processor.BlocksChunkHolder;
+import org.carbondata.scan.filter.GenericQueryType;
+import org.carbondata.scan.processor.BlocksChunkHolder;
 
-import org.apache.spark.sql.types.ArrayType;
-import org.apache.spark.sql.types.DataType;
-import org.apache.spark.sql.types.GenericArrayData;
+import org.apache.spark.sql.catalyst.util.*; // Don't remove it, used for spark 1.6 compatability
+import org.apache.spark.sql.types.*;
 
 public class ArrayQueryType extends ComplexQueryType implements GenericQueryType {
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/query/complex/querytypes/ComplexQueryType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/complex/querytypes/ComplexQueryType.java b/core/src/main/java/org/carbondata/query/complex/querytypes/ComplexQueryType.java
index 7f0e45b..d13d730 100644
--- a/core/src/main/java/org/carbondata/query/complex/querytypes/ComplexQueryType.java
+++ b/core/src/main/java/org/carbondata/query/complex/querytypes/ComplexQueryType.java
@@ -20,7 +20,9 @@
 package org.carbondata.query.complex.querytypes;
 
 import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.query.carbon.processor.BlocksChunkHolder;
+import org.carbondata.scan.filter.GenericQueryType;
+import org.carbondata.scan.processor.BlocksChunkHolder;
+
 public class ComplexQueryType {
   protected GenericQueryType children;
 
@@ -48,7 +50,6 @@ public class ComplexQueryType {
    * Method will copy the block chunk holder data to the passed
    * byte[], this method is also used by child
    *
-   * @param columnarKeyStoreDataHolder
    * @param rowNumber
    * @param input
    */

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/query/complex/querytypes/PrimitiveQueryType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/complex/querytypes/PrimitiveQueryType.java b/core/src/main/java/org/carbondata/query/complex/querytypes/PrimitiveQueryType.java
index 9c618ba..19cde2a 100644
--- a/core/src/main/java/org/carbondata/query/complex/querytypes/PrimitiveQueryType.java
+++ b/core/src/main/java/org/carbondata/query/complex/querytypes/PrimitiveQueryType.java
@@ -22,7 +22,6 @@ package org.carbondata.query.complex.querytypes;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
 import java.util.List;
 
 import org.carbondata.core.cache.dictionary.Dictionary;
@@ -30,8 +29,9 @@ import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.carbondata.core.keygenerator.mdkey.Bits;
-import org.carbondata.query.carbon.processor.BlocksChunkHolder;
-import org.carbondata.query.carbon.util.DataTypeUtil;
+import org.carbondata.core.util.DataTypeUtil;
+import org.carbondata.scan.filter.GenericQueryType;
+import org.carbondata.scan.processor.BlocksChunkHolder;
 
 import org.apache.spark.sql.types.BooleanType;
 import org.apache.spark.sql.types.DataType;
@@ -39,7 +39,6 @@ import org.apache.spark.sql.types.DoubleType;
 import org.apache.spark.sql.types.IntegerType;
 import org.apache.spark.sql.types.LongType;
 import org.apache.spark.sql.types.TimestampType;
-import org.apache.spark.unsafe.types.UTF8String;
 
 public class PrimitiveQueryType extends ComplexQueryType implements GenericQueryType {
 
@@ -176,11 +175,6 @@ public class PrimitiveQueryType extends ComplexQueryType implements GenericQuery
       String dictionaryValueForKey = dictionary.getDictionaryValueForKey(surrgateValue);
       actualData = DataTypeUtil.getDataBasedOnDataType(dictionaryValueForKey, this.dataType);
     }
-    if (null != actualData
-        && this.dataType == org.carbondata.core.carbon.metadata.datatype.DataType.STRING) {
-      byte[] dataBytes = ((String) actualData).getBytes(Charset.defaultCharset());
-      return UTF8String.fromBytes(dataBytes);
-    }
     return actualData;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/query/complex/querytypes/StructQueryType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/complex/querytypes/StructQueryType.java b/core/src/main/java/org/carbondata/query/complex/querytypes/StructQueryType.java
index f821719..832c365 100644
--- a/core/src/main/java/org/carbondata/query/complex/querytypes/StructQueryType.java
+++ b/core/src/main/java/org/carbondata/query/complex/querytypes/StructQueryType.java
@@ -26,7 +26,8 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.query.carbon.processor.BlocksChunkHolder;
+import org.carbondata.scan.filter.GenericQueryType;
+import org.carbondata.scan.processor.BlocksChunkHolder;
 
 import org.apache.spark.sql.catalyst.expressions.GenericInternalRowWithSchema;
 import org.apache.spark.sql.types.DataType;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/collector/impl/ListBasedResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/collector/impl/ListBasedResultCollector.java b/core/src/main/java/org/carbondata/scan/collector/impl/ListBasedResultCollector.java
index c2dc2ec..a199cad 100644
--- a/core/src/main/java/org/carbondata/scan/collector/impl/ListBasedResultCollector.java
+++ b/core/src/main/java/org/carbondata/scan/collector/impl/ListBasedResultCollector.java
@@ -152,6 +152,7 @@ public class ListBasedResultCollector implements ScannedResultCollector {
     if (!dataChunk.getNullValueIndexHolder().getBitSet().get(index)) {
       Object msrVal;
       switch (dataType) {
+        case INT:
         case LONG:
           msrVal = dataChunk.getMeasureDataHolder().getReadableLongValueByIndex(index);
           break;
@@ -171,10 +172,13 @@ public class ListBasedResultCollector implements ScannedResultCollector {
    */
   @Override public Result getCollectedResult() {
     Result<List<ListBasedResultWrapper>, Object> result = new ListBasedResult();
-    if (!tableBlockExecutionInfos.isFixedKeyUpdateRequired()) {
-      updateKeyWithLatestBlockKeyGenerator();
+    if (tableBlockExecutionInfos.isFixedKeyUpdateRequired() && tableBlockExecutionInfos
+        .isDimensionsExistInQuery()) {
+      updateKeyWithLatestBlockKeygenerator();
+      result.addScannedResult(listBasedResult);
+    } else {
+      result.addScannedResult(listBasedResult);
     }
-    result.addScannedResult(listBasedResult);
     return result;
   }
 
@@ -186,7 +190,7 @@ public class ListBasedResultCollector implements ScannedResultCollector {
    *
    * @return updated block
    */
-  private void updateKeyWithLatestBlockKeyGenerator() {
+  private void updateKeyWithLatestBlockKeygenerator() {
     try {
       long[] data = null;
       ByteArrayWrapper key = null;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java b/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
index c835b1c..7b62681 100644
--- a/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
+++ b/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
@@ -27,6 +27,7 @@ import org.carbondata.core.carbon.datastore.block.AbstractIndex;
 import org.carbondata.core.carbon.querystatistics.QueryStatisticsRecorder;
 import org.carbondata.core.datastorage.store.impl.FileFactory.FileType;
 import org.carbondata.core.keygenerator.KeyGenerator;
+import org.carbondata.scan.filter.GenericQueryType;
 import org.carbondata.scan.filter.executer.FilterExecuter;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java b/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
index 4f5d513..056a936 100644
--- a/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
@@ -23,19 +23,7 @@ import java.math.BigDecimal;
 import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.TreeSet;
+import java.util.*;
 
 import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
@@ -65,19 +53,10 @@ import org.carbondata.scan.expression.ColumnExpression;
 import org.carbondata.scan.expression.Expression;
 import org.carbondata.scan.expression.ExpressionResult;
 import org.carbondata.scan.expression.LiteralExpression;
+import org.carbondata.scan.expression.conditional.ListExpression;
 import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 import org.carbondata.scan.expression.exception.FilterUnsupportedException;
-import org.carbondata.scan.filter.executer.AndFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.DimColumnExecuterFilterInfo;
-import org.carbondata.scan.filter.executer.ExcludeColGroupFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.ExcludeFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.FilterExecuter;
-import org.carbondata.scan.filter.executer.IncludeColGroupFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.IncludeFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.OrFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.RestructureFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.RowLevelFilterExecuterImpl;
-import org.carbondata.scan.filter.executer.RowLevelRangeTypeExecuterFacory;
+import org.carbondata.scan.filter.executer.*;
 import org.carbondata.scan.filter.intf.ExpressionType;
 import org.carbondata.scan.filter.intf.FilterExecuterType;
 import org.carbondata.scan.filter.intf.RowImpl;
@@ -242,7 +221,7 @@ public final class FilterUtil {
   public static boolean checkIfDataTypeNotTimeStamp(Expression expression) {
     if (expression.getFilterExpressionType() == ExpressionType.LITERAL) {
       if (!(((LiteralExpression) expression).getLiteralExpDataType()
-          == org.carbondata.query.expression.DataType.TimestampType)) {
+          == DataType.TIMESTAMP)) {
         return true;
       }
     }
@@ -693,7 +672,7 @@ public final class FilterUtil {
       SegmentProperties segmentProperties, long[] startKey) {
     Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter =
         dimColResolvedFilterInfo.getDimensionResolvedFilterInstance();
-    for (Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
+    for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
       List<DimColumnFilterInfo> values = entry.getValue();
       if (null == values || !entry.getKey().hasEncoding(Encoding.DICTIONARY)) {
         continue;
@@ -735,7 +714,7 @@ public final class FilterUtil {
     Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter =
         dimColResolvedFilterInfo.getDimensionResolvedFilterInstance();
     // step 1
-    for (Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
+    for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
       if (!entry.getKey().hasEncoding(Encoding.DICTIONARY)) {
         List<DimColumnFilterInfo> listOfDimColFilterInfo = entry.getValue();
         if (null == listOfDimColFilterInfo) {
@@ -791,7 +770,7 @@ public final class FilterUtil {
     Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter =
         dimColResolvedFilterInfo.getDimensionResolvedFilterInstance();
     // step 1
-    for (Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
+    for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
       if (!entry.getKey().hasEncoding(Encoding.DICTIONARY)) {
         List<DimColumnFilterInfo> listOfDimColFilterInfo = entry.getValue();
         if (null == listOfDimColFilterInfo) {
@@ -854,7 +833,7 @@ public final class FilterUtil {
    */
   private static void getStartKeyBasedOnFilterResoverInfo(
       Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter, long[] startKey) {
-    for (Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
+    for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
       List<DimColumnFilterInfo> values = entry.getValue();
       if (null == values) {
         continue;
@@ -906,7 +885,7 @@ public final class FilterUtil {
 
   private static void getEndKeyWithFilter(
       Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter, long[] endKey) {
-    for (Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
+    for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
       List<DimColumnFilterInfo> values = entry.getValue();
       if (null == values || !entry.getKey().hasEncoding(Encoding.DICTIONARY)) {
         continue;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/filter/GenericQueryType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/GenericQueryType.java b/core/src/main/java/org/carbondata/scan/filter/GenericQueryType.java
index 787cd72..a2b2da3 100644
--- a/core/src/main/java/org/carbondata/scan/filter/GenericQueryType.java
+++ b/core/src/main/java/org/carbondata/scan/filter/GenericQueryType.java
@@ -24,7 +24,7 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.List;
 
-import org.carbondata.core.datastorage.store.columnar.ColumnarKeyStoreDataHolder;
+import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 import org.carbondata.scan.processor.BlocksChunkHolder;
 
 import org.apache.spark.sql.types.DataType;
@@ -70,5 +70,4 @@ public interface GenericQueryType {
   void fillRequiredBlockData(BlocksChunkHolder blockChunkHolder);
 
   Object getDataBasedOnDataTypeFromSurrogates(ByteBuffer surrogateData);
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
index 0856846..e5b70db 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
@@ -29,7 +29,6 @@ import org.carbondata.core.carbon.datastore.IndexKey;
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 import org.carbondata.scan.filter.DimColumnFilterInfo;
-import org.carbondata.scan.filter.GenericQueryType;
 import org.carbondata.scan.filter.resolver.metadata.FilterResolverMetadata;
 import org.carbondata.scan.filter.resolver.resolverinfo.visitable.ResolvedFilterInfoVisitable;
 import org.carbondata.scan.filter.resolver.resolverinfo.visitor.ResolvedFilterInfoVisitorIntf;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
index 9fc08c6..a3e4ecf 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
@@ -32,6 +32,7 @@ import org.carbondata.scan.expression.ColumnExpression;
 import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 import org.carbondata.scan.filter.DimColumnFilterInfo;
+import org.carbondata.scan.filter.FilterUtil;
 import org.carbondata.scan.filter.resolver.metadata.FilterResolverMetadata;
 import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
index 3078027..f8b76e0 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
@@ -16,12 +16,13 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.carbondata.query.filter.resolver.resolverinfo.visitor;
+package org.carbondata.scan.filter.resolver.resolverinfo.visitor;
 
 import java.util.List;
 
 import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
+import org.carbondata.core.constants.CarbonCommonConstants;
 import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 import org.carbondata.scan.filter.DimColumnFilterInfo;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/model/QueryModel.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/model/QueryModel.java b/core/src/main/java/org/carbondata/scan/model/QueryModel.java
index 82a6221..e02c56e 100644
--- a/core/src/main/java/org/carbondata/scan/model/QueryModel.java
+++ b/core/src/main/java/org/carbondata/scan/model/QueryModel.java
@@ -32,7 +32,6 @@ import org.carbondata.core.carbon.metadata.schema.table.column.CarbonColumn;
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
 import org.carbondata.core.carbon.querystatistics.QueryStatisticsRecorder;
-import org.carbondata.core.constants.CarbonCommonConstants;
 import org.carbondata.core.util.CarbonUtil;
 import org.carbondata.scan.expression.ColumnExpression;
 import org.carbondata.scan.expression.Expression;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/result/AbstractScannedResult.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/result/AbstractScannedResult.java b/core/src/main/java/org/carbondata/scan/result/AbstractScannedResult.java
index 9602c41..9e7f007 100644
--- a/core/src/main/java/org/carbondata/scan/result/AbstractScannedResult.java
+++ b/core/src/main/java/org/carbondata/scan/result/AbstractScannedResult.java
@@ -28,8 +28,10 @@ import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
 import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 import org.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
+import org.carbondata.core.util.CarbonUtil;
 import org.carbondata.scan.executor.infos.BlockExecutionInfo;
 import org.carbondata.scan.executor.infos.KeyStructureInfo;
+import org.carbondata.scan.filter.GenericQueryType;
 
 /**
  * Scanned result class which will store and provide the result on request

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/result/iterator/DetailQueryResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/result/iterator/DetailQueryResultIterator.java b/core/src/main/java/org/carbondata/scan/result/iterator/DetailQueryResultIterator.java
index 890f9da..4b2d777 100644
--- a/core/src/main/java/org/carbondata/scan/result/iterator/DetailQueryResultIterator.java
+++ b/core/src/main/java/org/carbondata/scan/result/iterator/DetailQueryResultIterator.java
@@ -73,7 +73,7 @@ public class DetailQueryResultIterator extends AbstractDetailQueryResultIterator
     } catch (Exception ex) {
       fileReader.finish();
       execService.shutdown();
-      throw new RuntimeException(ex.getCause().getMessage());
+      throw new RuntimeException(ex);
     }
     return result;
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/result/preparator/impl/DetailQueryResultPreparatorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/result/preparator/impl/DetailQueryResultPreparatorImpl.java b/core/src/main/java/org/carbondata/scan/result/preparator/impl/DetailQueryResultPreparatorImpl.java
index b1fe64b..8a7d610 100644
--- a/core/src/main/java/org/carbondata/scan/result/preparator/impl/DetailQueryResultPreparatorImpl.java
+++ b/core/src/main/java/org/carbondata/scan/result/preparator/impl/DetailQueryResultPreparatorImpl.java
@@ -18,11 +18,13 @@
  */
 package org.carbondata.scan.result.preparator.impl;
 
+import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
 import java.util.List;
 
 import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
+import org.carbondata.core.carbon.metadata.datatype.DataType;
 import org.carbondata.core.carbon.metadata.encoder.Encoding;
 import org.carbondata.core.constants.CarbonCommonConstants;
 import org.carbondata.core.util.CarbonUtil;
@@ -76,6 +78,7 @@ public class DetailQueryResultPreparatorImpl
     int currentRow = 0;
     long[] surrogateResult = null;
     int noDictionaryColumnIndex = 0;
+    int complexTypeColumnIndex = 0;
     ByteArrayWrapper key = null;
     Object[] value = null;
     while (scannedResult.hasNext()) {
@@ -92,6 +95,12 @@ public class DetailQueryResultPreparatorImpl
                 new String(key.getNoDictionaryKeyByIndex(noDictionaryColumnIndex++),
                     Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)),
                 queryDimension.get(i).getDimension().getDataType());
+          } else if (CarbonUtil.hasDataType(queryDimension.get(i).getDimension().getDataType(),
+              new DataType[] { DataType.ARRAY, DataType.STRUCT })) {
+            resultData[currentRow][i] = queryExecuterProperties.complexDimensionInfoMap
+                .get(queryDimension.get(i).getDimension().getOrdinal())
+                .getDataBasedOnDataTypeFromSurrogates(
+                    ByteBuffer.wrap(key.getComplexTypeByIndex(complexTypeColumnIndex++)));
           } else {
             resultData[currentRow][i] =
                 (int) surrogateResult[queryDimension.get(i).getDimension().getKeyOrdinal()];

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/core/src/main/java/org/carbondata/scan/result/preparator/impl/RawQueryResultPreparatorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/result/preparator/impl/RawQueryResultPreparatorImpl.java b/core/src/main/java/org/carbondata/scan/result/preparator/impl/RawQueryResultPreparatorImpl.java
index 8a38efe..161b58e 100644
--- a/core/src/main/java/org/carbondata/scan/result/preparator/impl/RawQueryResultPreparatorImpl.java
+++ b/core/src/main/java/org/carbondata/scan/result/preparator/impl/RawQueryResultPreparatorImpl.java
@@ -1,5 +1,6 @@
 package org.carbondata.scan.result.preparator.impl;
 
+import java.nio.ByteBuffer;
 import java.util.List;
 
 import org.carbondata.common.logging.LogService;
@@ -7,6 +8,7 @@ import org.carbondata.common.logging.LogServiceFactory;
 import org.carbondata.core.carbon.metadata.encoder.Encoding;
 import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
+import org.carbondata.core.util.CarbonUtil;
 import org.carbondata.core.util.DataTypeUtil;
 import org.carbondata.scan.executor.impl.QueryExecutorProperties;
 import org.carbondata.scan.model.QueryDimension;
@@ -90,6 +92,7 @@ public class RawQueryResultPreparatorImpl
           long[] surrogateResult = querySchemaInfo.getKeyGenerator()
               .getKeyArray(key.getDictionaryKey(), querySchemaInfo.getMaskedByteIndexes());
           int noDictionaryColumnIndex = 0;
+          int complexTypeColumnIndex = 0;
           for (int i = 0; i < dimSize; i++) {
             if (!queryDimensions[i].getDimension().hasEncoding(Encoding.DICTIONARY)) {
               row[order[i]] = DataTypeUtil.getDataBasedOnDataType(
@@ -103,6 +106,12 @@ public class RawQueryResultPreparatorImpl
                 row[order[i]] = directDictionaryGenerator.getValueFromSurrogate(
                     (int) surrogateResult[queryDimensions[i].getDimension().getKeyOrdinal()]);
               }
+            } else if (CarbonUtil
+                .hasComplexDataType(queryDimensions[i].getDimension().getDataType())) {
+              row[order[i]] = queryExecuterProperties.complexDimensionInfoMap
+                  .get(queryDimensions[i].getDimension().getOrdinal())
+                  .getDataBasedOnDataTypeFromSurrogates(
+                      ByteBuffer.wrap(key.getComplexTypeByIndex(complexTypeColumnIndex++)));
             } else {
               row[order[i]] =
                   (int) surrogateResult[queryDimensions[i].getDimension().getKeyOrdinal()];

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
index 1c0a6c2..9e66d99 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
@@ -101,11 +101,7 @@ case class CarbonDictionaryDecoder(
       case DataType.DECIMAL =>
         val scale: Int = carbonDimension.getColumnSchema.getScale
         val precision: Int = carbonDimension.getColumnSchema.getPrecision
-        if (scale > 0 && precision > 0)  {
-          DecimalType(scale, precision)
-        } else {
-          DecimalType(18, 2)
-        }
+        DecimalType(18, 2)
       case DataType.TIMESTAMP => TimestampType
       case DataType.STRUCT =>
         CarbonMetastoreTypes

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 22887e7..993e26a 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -1461,7 +1461,7 @@ private[sql] case class DescribeCommandFormatted(
       results ++= Seq(("NONE", "", ""))
     }
     val dimension = carbonTable
-      .getDimensionByTableName(relation.cubeMeta.carbonTableIdentifier.getTableName);
+      .getDimensionByTableName(relation.tableMeta.carbonTableIdentifier.getTableName);
     results ++= getColumnGroups(dimension.asScala.toList)
     results.map { case (name, dataType, comment) =>
       Row(f"$name%-36s $dataType%-80s $comment%-72s")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
index e866d53..07b505c 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
@@ -285,7 +285,7 @@ class CarbonMetastoreCatalog(hiveContext: HiveContext, val storePath: String,
       storePath,
       CarbonMetadata.getInstance().getCarbonTable(dbName + "_" + tableName),
       Partitioner("org.carbondata.spark.partition.api.impl.SampleDataPartitionerImpl",
-        Array(""), 1, DistributionUtil.getNodeList(hive.sparkContext)))
+        Array(""), 1, DistributionUtil.getNodeList(hiveContext.sparkContext)))
 
     val fileType = FileFactory.getFileType(schemaMetadataPath)
     if (!FileFactory.isFileExist(schemaMetadataPath, fileType)) {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala b/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
index a7a8313..74b40bf 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
@@ -249,6 +249,7 @@ object CarbonFilters {
     } else {
       carbonColumn = carbonTable.getMeasureByName(carbonTable.getFactTableName, column)
       carbonColumn.getDataType match {
+        case DataType.INT => DataType.LONG
         case DataType.LONG => DataType.LONG
         case DataType.DECIMAL => DataType.DECIMAL
         case _ => DataType.DOUBLE

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index 2024f5b..85c6a1d 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -671,7 +671,7 @@ object CarbonDataRDDFactory extends Logging {
           // group blocks to nodes, tasks
           val startTime = System.currentTimeMillis
           val activeNodes = DistributionUtil
-            .ensureExecutorsAndGetNodeList(blockList, sc.sparkContext)
+            .ensureExecutorsAndGetNodeList(blockList, sqlContext.sparkContext)
           val nodeBlockMapping =
             CarbonLoaderUtil
               .nodeBlockMapping(blockList.toSeq.asJava, -1, activeNodes.toList.asJava).asScala

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
index ba0eae7..78538f1 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
@@ -32,15 +32,12 @@ import org.apache.spark.sql.Row
 
 import org.carbondata.common.logging.LogServiceFactory
 import org.carbondata.core.carbon.{CarbonTableIdentifier, ColumnIdentifier}
-import org.carbondata.core.carbon.metadata.datatype.DataType
-import org.carbondata.core.carbon.metadata.encoder.Encoding
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension
 import org.carbondata.core.constants.CarbonCommonConstants
 import org.carbondata.core.datastorage.store.impl.FileFactory
 import org.carbondata.core.util.CarbonTimeStatisticsFactory
-import org.carbondata.lcm.locks.CarbonLockFactory
-import org.carbondata.processing.etl.DataLoadingException
-import org.carbondata.spark.load.{CarbonLoadModel, CarbonLoaderUtil}
+import org.carbondata.lcm.locks.{CarbonLockFactory, LockUsage}
+import org.carbondata.spark.load.{CarbonLoaderUtil, CarbonLoadModel}
 import org.carbondata.spark.partition.reader.{CSVParser, CSVReader}
 import org.carbondata.spark.tasks.DictionaryWriterTask
 import org.carbondata.spark.tasks.SortIndexWriterTask

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
index 7783aaf..00bf5b2 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
@@ -26,10 +26,13 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.mapreduce.Job
 import org.apache.spark.{Logging, Partition, SparkContext, TaskContext}
 import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.hive.DistributionUtil
 
 import org.carbondata.common.CarbonIterator
 import org.carbondata.common.logging.LogServiceFactory
-import org.carbondata.core.carbon.datastore.block.TableBlockInfo
+import org.carbondata.core.cache.dictionary.Dictionary
+import org.carbondata.core.carbon.datastore.block.{Distributable, TableBlockInfo}
+import org.carbondata.core.carbon.querystatistics.{QueryStatistic, QueryStatisticsRecorder}
 import org.carbondata.hadoop.{CarbonInputFormat, CarbonInputSplit}
 import org.carbondata.scan.executor.QueryExecutorFactory
 import org.carbondata.scan.expression.Expression
@@ -72,6 +75,7 @@ class CarbonScanRDD[V: ClassTag](
   val defaultParallelism = sc.defaultParallelism
 
   override def getPartitions: Array[Partition] = {
+    val statisticRecorder = new QueryStatisticsRecorder(queryModel.getQueryId)
     val startTime = System.currentTimeMillis()
     val (carbonInputFormat: CarbonInputFormat[Array[Object]], job: Job) =
       QueryPlanUtil.createCarbonInputFormat(queryModel.getAbsoluteTableIdentifier)
@@ -103,37 +107,49 @@ class CarbonScanRDD[V: ClassTag](
         new TableBlockInfo(inputSplit.getPath.toString,
           inputSplit.getStart, inputSplit.getSegmentId,
           inputSplit.getLocations, inputSplit.getLength
-        )
+        ).asInstanceOf[Distributable]
       )
       if (blockList.nonEmpty) {
         // group blocks to nodes, tasks
+        val startTime = System.currentTimeMillis
+        var statistic = new QueryStatistic
+        val activeNodes = DistributionUtil
+          .ensureExecutorsAndGetNodeList(blockList.toArray, sparkContext)
         val nodeBlockMapping =
-          CarbonLoaderUtil.nodeBlockTaskMapping(blockList.asJava, -1, defaultParallelism)
-
+          CarbonLoaderUtil.nodeBlockTaskMapping(blockList.asJava, -1, defaultParallelism,
+            activeNodes.toList.asJava
+          )
+        val timeElapsed: Long = System.currentTimeMillis - startTime
+        statistic.addStatistics("Total Time taken in block(s) allocation", System.currentTimeMillis)
+        statisticRecorder.recordStatistics(statistic);
+        statistic = new QueryStatistic
         var i = 0
         // Create Spark Partition for each task and assign blocks
         nodeBlockMapping.asScala.foreach { entry =>
-          entry._2.asScala.foreach { blocksPerTask =>
+          entry._2.asScala.foreach { blocksPerTask => {
+            val tableBlockInfo = blocksPerTask.asScala.map(_.asInstanceOf[TableBlockInfo])
             if (blocksPerTask.size() != 0) {
-              result.add(new CarbonSparkPartition(id, i, Seq(entry._1).toArray, blocksPerTask))
+              result
+                .add(new CarbonSparkPartition(id, i, Seq(entry._1).toArray, tableBlockInfo.asJava))
               i += 1
             }
           }
+          }
         }
         val noOfBlocks = blockList.size
         val noOfNodes = nodeBlockMapping.size
         val noOfTasks = result.size()
         logInfo(s"Identified  no.of.Blocks: $noOfBlocks,"
-          + s"parallelism: $defaultParallelism , " +
-          s"no.of.nodes: $noOfNodes, no.of.tasks: $noOfTasks"
-        )
-        logInfo("Time taken to identify Blocks to scan : " +
-          (System.currentTimeMillis() - startTime)
+                + s"parallelism: $defaultParallelism , " +
+                s"no.of.nodes: $noOfNodes, no.of.tasks: $noOfTasks"
         )
+        statistic.addStatistics("Time taken to identify Block(s) to scan", System.currentTimeMillis)
+        statisticRecorder.recordStatistics(statistic);
+        statisticRecorder.logStatistics
         result.asScala.foreach { r =>
           val cp = r.asInstanceOf[CarbonSparkPartition]
           logInfo(s"Node : " + cp.locations.toSeq.mkString(",")
-            + ", No.Of Blocks : " + cp.tableBlockInfos.size()
+                  + ", No.Of Blocks : " + cp.tableBlockInfos.size()
           )
         }
       } else {
@@ -187,12 +203,19 @@ class CarbonScanRDD[V: ClassTag](
 
        var havePair = false
        var finished = false
+       var recordCount = 0
 
        override def hasNext: Boolean = {
          if (!finished && !havePair) {
            finished = (null == rowIterator) || (!rowIterator.hasNext)
            havePair = !finished
          }
+         if (finished) {
+           clearDictionaryCache(queryModel.getColumnToDictionaryMapping)
+           if(null!=queryModel.getStatisticsRecorder) {
+             queryModel.getStatisticsRecorder.logStatistics();
+           }
+         }
          !finished
        }
 
@@ -201,12 +224,21 @@ class CarbonScanRDD[V: ClassTag](
            throw new java.util.NoSuchElementException("End of stream")
          }
          havePair = false
+         recordCount += 1
+         if (queryModel.getLimit != -1 && recordCount >= queryModel.getLimit) {
+           clearDictionaryCache(queryModel.getColumnToDictionaryMapping)
+           if(null!=queryModel.getStatisticsRecorder) {
+             queryModel.getStatisticsRecorder.logStatistics();
+           }
+         }
          keyClass.getValue(rowIterator.next())
        }
-
-       logInfo("********************** Total Time Taken to execute the query in Carbon Side: " +
-           (System.currentTimeMillis - queryStartTime)
-       )
+       def clearDictionaryCache(columnToDictionaryMap: java.util.Map[String, Dictionary]) = {
+         if (null != columnToDictionaryMap) {
+           org.carbondata.spark.util.CarbonQueryUtil
+             .clearColumnDictionaryCache(columnToDictionaryMap)
+         }
+       }
      }
      iter
    }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/carbondata/spark/rdd/Compactor.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/Compactor.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/Compactor.scala
index b841fed..e081a28 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/Compactor.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/Compactor.scala
@@ -85,14 +85,14 @@ object Compactor {
     )
     var execInstance = "1"
     // in case of non dynamic executor allocation, number of executors are fixed.
-    if (sc.sparkContext.getConf.contains("spark.executor.instances")) {
-      execInstance = sc.sparkContext.getConf.get("spark.executor.instances")
+    if (sqlContext.sparkContext.getConf.contains("spark.executor.instances")) {
+      execInstance = sqlContext.sparkContext.getConf.get("spark.executor.instances")
       logger.info("spark.executor.instances property is set to =" + execInstance)
     } // in case of dynamic executor allocation, taking the max executors of the dynamic allocation.
-    else if (sc.sparkContext.getConf.contains("spark.dynamicAllocation.enabled")) {
-      if (sc.sparkContext.getConf.get("spark.dynamicAllocation.enabled").trim
+    else if (sqlContext.sparkContext.getConf.contains("spark.dynamicAllocation.enabled")) {
+      if (sqlContext.sparkContext.getConf.get("spark.dynamicAllocation.enabled").trim
         .equalsIgnoreCase("true")) {
-        execInstance = sc.sparkContext.getConf.get("spark.dynamicAllocation.maxExecutors")
+        execInstance = sqlContext.sparkContext.getConf.get("spark.dynamicAllocation.maxExecutors")
         logger.info("spark.dynamicAllocation.maxExecutors property is set to =" + execInstance)
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala b/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
index 9f80327..dff2fce 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
@@ -27,6 +27,7 @@ import org.carbondata.core.carbon.metadata.datatype.{DataType => CarbonDataType}
 import org.carbondata.core.carbon.metadata.encoder.Encoding
 import org.carbondata.core.carbon.metadata.schema.table.CarbonTable
 import org.carbondata.core.constants.CarbonCommonConstants
+import org.carbondata.core.util.CarbonUtil
 
 object CarbonScalaUtil {
   def convertSparkToCarbonDataType(
@@ -88,7 +89,8 @@ object CarbonScalaUtil {
       val dictionary =
         carbonTable.getDimensionByTableName(carbonTable.getFactTableName).asScala.map { f =>
         (f.getColName.toLowerCase,
-          f.hasEncoding(Encoding.DICTIONARY) && !f.hasEncoding(Encoding.DIRECT_DICTIONARY))
+          f.hasEncoding(Encoding.DICTIONARY) && !f.hasEncoding(Encoding.DIRECT_DICTIONARY) &&
+          !CarbonUtil.hasComplexDataType(f.getDataType))
       }
       CarbonMetaData(dimensionsAttr, measureAttr, carbonTable, DictionaryMap(dictionary.toMap))
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala b/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
index d17840f..70b0ef6 100644
--- a/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
+++ b/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
@@ -52,17 +52,17 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
      checkAnswer(sql("select mobile, proddate, deviceInformationId  from complexcarbontable"),
      sql("select mobile, proddate, deviceInformationId  from complexhivetable"))
   }
-  
+
   test("select mobile, MAC, deviceInformationId, purchasedate from complexcarbontable") {
      checkAnswer(sql("select mobile, MAC, deviceInformationId, purchasedate from complexcarbontable"),
      sql("select mobile, MAC, deviceInformationId, purchasedate from complexhivetable"))
   }
- 
+
    test("select mobile, ROMSize, deviceInformationId from complexcarbontable") {
      checkAnswer(sql("select mobile, ROMSize, deviceInformationId from complexcarbontable"),
      sql("select mobile, ROMSize, deviceInformationId from complexhivetable"))
   }
-   
+
    test("select locationinfo, purchasedate, deviceInformationId from complexcarbontable") {
      checkAnswer(sql("select locationinfo, purchasedate, deviceInformationId from complexcarbontable"),
      sql("select locationinfo, purchasedate, deviceInformationId from complexhivetable"))
@@ -79,8 +79,8 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
      checkAnswer(sql("select mobile from complexcarbontable where mobile.imei like '1AA%'"),
      sql("select mobile from complexhivetable where mobile.imei like '1AA%'"))
   }
-      
-      
+
+
      test("select locationinfo from complexcarbontable where locationinfo[0].ActiveAreaId > 2 AND locationinfo[0].ActiveAreaId < 7") {
      checkAnswer(sql("select locationinfo from complexcarbontable where locationinfo[0].ActiveAreaId > 2 AND locationinfo[0].ActiveAreaId < 7"),
      sql("select locationinfo from complexhivetable where locationinfo[0].ActiveAreaId > 2 AND locationinfo[0].ActiveAreaId < 7"))
@@ -97,7 +97,7 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
      checkAnswer(sql("select count(mobile),channelsId from complexcarbontable group by mobile,channelsId"),
      sql("select count(mobile),channelsId from complexhivetable group by mobile,channelsId"))
   }
-              
+
                 test("select count(mobile),channelsId from complexcarbontable group by mobile,channelsId order by channelsId") {
      checkAnswer(sql("select count(mobile),channelsId from complexcarbontable group by mobile,channelsId order by channelsId"),
      sql("select count(mobile),channelsId from complexhivetable group by mobile,channelsId order by channelsId"))

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
index c391008..031c449 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
@@ -42,10 +42,10 @@ class TestDimensionWithDecimalDataType extends QueryTest with BeforeAndAfterAll
     sql("LOAD DATA local inpath './src/test/resources/decimalDataWithoutHeader.csv' INTO table hiveTable")
   }
 
-  test("test detail query on dimension column with decimal data type") {
-    checkAnswer(sql("select salary from carbonTable order by salary"),
-      sql("select salary from hiveTable order by salary"))
-  }
+//  test("test detail query on dimension column with decimal data type") {
+//    checkAnswer(sql("select salary from carbonTable order by salary"),
+//      sql("select salary from hiveTable order by salary"))
+//  }
 
   test("test aggregate query on dimension column with decimal data type") {
     checkAnswer(sql("select sum(salary) from carbonTable"),

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
index df75c4b..30b834d 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
@@ -296,14 +296,14 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll {
   }
 
   test("array<string> and string datatype for same column is not working properly") {
-    sql("create table complexcarbontable(deviceInformationId int, MAC array<string>, channelsId string, "+ 
+    sql("create table complexcarbontable(deviceInformationId int, MAC array<string>, channelsId string, "+
         "ROMSize string, purchasedate string, gamePointId double,contractNumber double) STORED BY 'org.apache.carbondata.format' "+
         "TBLPROPERTIES ('DICTIONARY_INCLUDE'='deviceInformationId')")
     sql("LOAD DATA local inpath './src/test/resources/complexdatareordered.csv' INTO table complexcarbontable "+
         "OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 'FILEHEADER'='deviceInformationId,MAC,channelsId,ROMSize,purchasedate,gamePointId,contractNumber',"+
         "'COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')")
     sql("drop table if exists complexcarbontable")
-    sql("create table primitivecarbontable(deviceInformationId int, MAC string, channelsId string, "+ 
+    sql("create table primitivecarbontable(deviceInformationId int, MAC string, channelsId string, "+
         "ROMSize string, purchasedate string, gamePointId double,contractNumber double) STORED BY 'org.apache.carbondata.format' "+
         "TBLPROPERTIES ('DICTIONARY_INCLUDE'='deviceInformationId')")
     sql("LOAD DATA local inpath './src/test/resources/complexdatareordered.csv' INTO table primitivecarbontable "+
@@ -311,7 +311,7 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll {
         "'COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')")
     sql("drop table if exists primitivecarbontable")
   }
-  
+
   test(
     "test carbon table data loading when table name is in different case with create table, for " +
       "UpperCase"

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
index fbae896..fc66316 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
@@ -5,20 +5,20 @@ import org.apache.spark.sql.common.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 /**
-  * test class for testing the create table DDL.
-  */
+ * test class for testing the create cube DDL.
+ */
 class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll: Unit = {
 
     sql("CREATE TABLE IF NOT EXISTS table1(empno Int, empname Array<String>, designation String, doj Timestamp, "
-      + "workgroupcategory Int, workgroupcategoryname String, deptno Int, deptname String, projectcode Int, "
-      + "projectjoindate Timestamp, projectenddate Timestamp , attendance Int,utilization Int,salary Int )"
-      + " STORED BY 'org.apache.carbondata.format' ")
+        + "workgroupcategory Int, workgroupcategoryname String, deptno Int, deptname String, projectcode Int, "
+        + "projectjoindate Timestamp, projectenddate Timestamp , attendance Int,utilization Int,salary Int )"
+        + " STORED BY 'org.apache.carbondata.format' ")
     sql("CREATE TABLE IF NOT EXISTS table2(empno Int, empname Array<String>, designation String, doj Timestamp, "
-      + "workgroupcategory Int, workgroupcategoryname String, deptno Int, deptname String, projectcode Int, "
-      + "projectjoindate Timestamp, projectenddate Timestamp , attendance Int,utilization Int,salary Int )"
-      + " STORED BY 'org.apache.carbondata.format' ")
+        + "workgroupcategory Int, workgroupcategoryname String, deptno Int, deptname String, projectcode Int, "
+        + "projectjoindate Timestamp, projectenddate Timestamp , attendance Int,utilization Int,salary Int )"
+        + " STORED BY 'org.apache.carbondata.format' ")
 
   }
 
@@ -54,7 +54,6 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
   }
 
   test("drop table using case insensitive table name") {
-    sql("drop table if exists CaseInsensitiveTable")
     // create table
     sql(
       "CREATE table CaseInsensitiveTable (ID int, date String, country String, name " +
@@ -72,19 +71,6 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
       "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format'" +
       "TBLPROPERTIES('DICTIONARY_INCLUDE'='ID', 'DICTIONARY_INCLUDE'='salary')"
     )
-    sql("drop table if exists CaseInsensitiveTable")
-  }
-
-  test("drop table using dbName and table name") {
-    // create table
-    sql(
-      "CREATE table default.table3 (ID int, date String, country String, name " +
-      "String," +
-      "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format'" +
-      "TBLPROPERTIES('DICTIONARY_INCLUDE'='ID', 'DICTIONARY_INCLUDE'='salary')"
-    )
-    // table should drop wihout any error
-    sql("drop table default.table3")
 
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/spark/util/AllDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/util/AllDictionaryTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/util/AllDictionaryTestCase.scala
index 756a35a..124413c 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/util/AllDictionaryTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/util/AllDictionaryTestCase.scala
@@ -47,9 +47,9 @@ class AllDictionaryTestCase extends QueryTest with BeforeAndAfterAll {
     header: String,
     allDictFilePath: String): CarbonLoadModel = {
     val carbonLoadModel = new CarbonLoadModel
-    carbonLoadModel.setTableName(relation.cubeMeta.carbonTableIdentifier.getDatabaseName)
-    carbonLoadModel.setDatabaseName(relation.cubeMeta.carbonTableIdentifier.getTableName)
-    val table = relation.cubeMeta.carbonTable
+    carbonLoadModel.setTableName(relation.tableMeta.carbonTableIdentifier.getDatabaseName)
+    carbonLoadModel.setDatabaseName(relation.tableMeta.carbonTableIdentifier.getTableName)
+    val table = relation.tableMeta.carbonTable
     val carbonSchema = new CarbonDataLoadSchema(table)
     carbonLoadModel.setDatabaseName(table.getDatabaseName)
     carbonLoadModel.setTableName(table.getFactTableName)
@@ -105,8 +105,8 @@ class AllDictionaryTestCase extends QueryTest with BeforeAndAfterAll {
 
   def buildRelation() = {
     val catalog = CarbonEnv.getInstance(CarbonHiveContext).carbonCatalog
-    sampleRelation = catalog.lookupRelation1(Option("default"), "sample", None)(CarbonHiveContext).asInstanceOf[CarbonRelation]
-    complexRelation = catalog.lookupRelation1(Option("default"), "complextypes", None)(CarbonHiveContext).asInstanceOf[CarbonRelation]
+    sampleRelation = catalog.lookupRelation1(Option("default"), "sample")(CarbonHiveContext).asInstanceOf[CarbonRelation]
+    complexRelation = catalog.lookupRelation1(Option("default"), "complextypes")(CarbonHiveContext).asInstanceOf[CarbonRelation]
   }
 
   test("Support generate global dictionary from all dictionary files") {
@@ -115,7 +115,7 @@ class AllDictionaryTestCase extends QueryTest with BeforeAndAfterAll {
     GlobalDictionaryUtil
       .generateGlobalDictionary(CarbonHiveContext,
         carbonLoadModel,
-        sampleRelation.cubeMeta.storePath)
+        sampleRelation.tableMeta.storePath)
 
     DictionaryTestCaseUtil.
       checkDictionary(sampleRelation, "city", "shenzhen")
@@ -127,7 +127,7 @@ class AllDictionaryTestCase extends QueryTest with BeforeAndAfterAll {
     GlobalDictionaryUtil
       .generateGlobalDictionary(CarbonHiveContext,
       carbonLoadModel,
-      complexRelation.cubeMeta.storePath)
+      complexRelation.tableMeta.storePath)
 
     DictionaryTestCaseUtil.
       checkDictionary(complexRelation, "channelsId", "1650")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/spark/util/DictionaryTestCaseUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/util/DictionaryTestCaseUtil.scala b/integration/spark/src/test/scala/org/carbondata/spark/util/DictionaryTestCaseUtil.scala
index 5013be0..df0515f 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/util/DictionaryTestCaseUtil.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/util/DictionaryTestCaseUtil.scala
@@ -38,7 +38,7 @@ object DictionaryTestCaseUtil {
    * @param value  a value of column
    */
   def checkDictionary(relation: CarbonRelation, columnName: String, value: String) {
-    val table = relation.cubeMeta.carbonTable
+    val table = relation.tableMeta.carbonTable
     val dimension = table.getDimensionByName(table.getFactTableName, columnName)
     val tableIdentifier = new CarbonTableIdentifier(table.getDatabaseName, table.getFactTableName, "uniqueid")
     val columnIdentifier = new DictionaryColumnUniqueIdentifier(tableIdentifier,

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
index 7031685..084e078 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
@@ -126,9 +126,9 @@ class ExternalColumnDictionaryTestCase extends QueryTest with BeforeAndAfterAll
       extColFilePath: String,
       csvDelimiter: String = ","): CarbonLoadModel = {
     val carbonLoadModel = new CarbonLoadModel
-    carbonLoadModel.setTableName(relation.cubeMeta.carbonTableIdentifier.getDatabaseName)
-    carbonLoadModel.setDatabaseName(relation.cubeMeta.carbonTableIdentifier.getTableName)
-    val table = relation.cubeMeta.carbonTable
+    carbonLoadModel.setTableName(relation.tableMeta.carbonTableIdentifier.getDatabaseName)
+    carbonLoadModel.setDatabaseName(relation.tableMeta.carbonTableIdentifier.getTableName)
+    val table = relation.tableMeta.carbonTable
     val carbonSchema = new CarbonDataLoadSchema(table)
     carbonLoadModel.setDatabaseName(table.getDatabaseName)
     carbonLoadModel.setTableName(table.getFactTableName)
@@ -153,7 +153,7 @@ class ExternalColumnDictionaryTestCase extends QueryTest with BeforeAndAfterAll
     var carbonLoadModel = buildCarbonLoadModel(extComplexRelation, complexFilePath1,
       header, extColDictFilePath1)
     GlobalDictionaryUtil.generateGlobalDictionary(CarbonHiveContext, carbonLoadModel,
-      extComplexRelation.cubeMeta.storePath)
+      extComplexRelation.tableMeta.storePath)
     // check whether the dictionary is generated
     DictionaryTestCaseUtil.checkDictionary(
       extComplexRelation, "deviceInformationId", "10086")
@@ -162,7 +162,7 @@ class ExternalColumnDictionaryTestCase extends QueryTest with BeforeAndAfterAll
     carbonLoadModel = buildCarbonLoadModel(extComplexRelation, complexFilePath1,
       header, extColDictFilePath2)
     GlobalDictionaryUtil.generateGlobalDictionary(CarbonHiveContext, carbonLoadModel,
-      extComplexRelation.cubeMeta.storePath)
+      extComplexRelation.tableMeta.storePath)
     // check the old dictionary and whether the new distinct value is generated
     DictionaryTestCaseUtil.checkDictionary(
       extComplexRelation, "deviceInformationId", "10086")
@@ -175,7 +175,7 @@ class ExternalColumnDictionaryTestCase extends QueryTest with BeforeAndAfterAll
     var carbonLoadModel = buildCarbonLoadModel(extComplexRelation, complexFilePath1,
       header, extColDictFilePath3)
     GlobalDictionaryUtil.generateGlobalDictionary(CarbonHiveContext, carbonLoadModel,
-      extComplexRelation.cubeMeta.storePath)
+      extComplexRelation.tableMeta.storePath)
     // check whether the dictionary is generated
     DictionaryTestCaseUtil.checkDictionary(
       extComplexRelation, "channelsId", "1421|")
@@ -184,7 +184,7 @@ class ExternalColumnDictionaryTestCase extends QueryTest with BeforeAndAfterAll
     carbonLoadModel = buildCarbonLoadModel(verticalDelimiteRelation, complexFilePath2,
       header2, extColDictFilePath3, "|")
     GlobalDictionaryUtil.generateGlobalDictionary(CarbonHiveContext, carbonLoadModel,
-      verticalDelimiteRelation.cubeMeta.storePath)
+      verticalDelimiteRelation.tableMeta.storePath)
     // check whether the dictionary is generated
     DictionaryTestCaseUtil.checkDictionary(
       verticalDelimiteRelation, "channelsId", "1431,")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilConcurrentTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilConcurrentTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilConcurrentTestCase.scala
index 60e9281..f59e4de 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilConcurrentTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilConcurrentTestCase.scala
@@ -53,10 +53,10 @@ class GlobalDictionaryUtilConcurrentTestCase extends QueryTest with BeforeAndAft
                            dimensionFilePath: String,
                            header: String): CarbonLoadModel = {
     val carbonLoadModel = new CarbonLoadModel
-    carbonLoadModel.setTableName(relation.cubeMeta.carbonTableIdentifier.getDatabaseName)
-    carbonLoadModel.setDatabaseName(relation.cubeMeta.carbonTableIdentifier.getTableName)
+    carbonLoadModel.setTableName(relation.tableMeta.carbonTableIdentifier.getDatabaseName)
+    carbonLoadModel.setDatabaseName(relation.tableMeta.carbonTableIdentifier.getTableName)
     // carbonLoadModel.setSchema(relation.cubeMeta.schema)
-    val table = relation.cubeMeta.carbonTable
+    val table = relation.tableMeta.carbonTable
     val carbonSchema = new CarbonDataLoadSchema(table)
     carbonLoadModel.setDatabaseName(table.getDatabaseName)
     carbonLoadModel.setTableName(table.getFactTableName)
@@ -67,7 +67,7 @@ class GlobalDictionaryUtilConcurrentTestCase extends QueryTest with BeforeAndAft
     carbonLoadModel.setCsvDelimiter(",")
     carbonLoadModel.setComplexDelimiterLevel1("\\$")
     carbonLoadModel.setComplexDelimiterLevel2("\\:")
-    carbonLoadModel.setStorePath(relation.cubeMeta.storePath)
+    carbonLoadModel.setStorePath(relation.tableMeta.storePath)
     carbonLoadModel
   }
 
@@ -92,7 +92,7 @@ class GlobalDictionaryUtilConcurrentTestCase extends QueryTest with BeforeAndAft
 
   def buildRelation() = {
     val catalog = CarbonEnv.getInstance(CarbonHiveContext).carbonCatalog
-    sampleRelation = catalog.lookupRelation1(Option("default"), "employee", None)(CarbonHiveContext)
+    sampleRelation = catalog.lookupRelation1(Option("default"), "employee")(CarbonHiveContext)
       .asInstanceOf[CarbonRelation]
   }
   def writedummydata(filePath: String, recCount: Int) = {
@@ -132,10 +132,10 @@ class GlobalDictionaryUtilConcurrentTestCase extends QueryTest with BeforeAndAft
         ex.printStackTrace()
         assert(false)
     }
-    val carbonTableIdentifier = sampleRelation.cubeMeta.carbonTable.getCarbonTableIdentifier
-    val columnIdentifier = sampleRelation.cubeMeta.carbonTable.getDimensionByName("employee", "empid").getColumnIdentifier
+    val carbonTableIdentifier = sampleRelation.tableMeta.carbonTable.getCarbonTableIdentifier
+    val columnIdentifier = sampleRelation.tableMeta.carbonTable.getDimensionByName("employee", "empid").getColumnIdentifier
     val carbonTablePath = PathFactory.getInstance()
-        .getCarbonTablePath(columnIdentifier, sampleRelation.cubeMeta.storePath, carbonTableIdentifier);
+        .getCarbonTablePath(columnIdentifier, sampleRelation.tableMeta.storePath, carbonTableIdentifier);
     val dictPath = carbonTablePath.getDictionaryFilePath(columnIdentifier.getColumnId)
     val dictFile = FileFactory.getCarbonFile(dictPath, FileFactory.getFileType(dictPath))
     val offSet = dictFile.getSize
@@ -165,7 +165,7 @@ class GlobalDictionaryUtilConcurrentTestCase extends QueryTest with BeforeAndAft
         GlobalDictionaryUtil
           .generateGlobalDictionary(CarbonHiveContext,
             loadModel,
-            sampleRelation.cubeMeta.storePath)
+            sampleRelation.tableMeta.storePath)
       } catch {
         case ex: Exception => 
           result = ex.getMessage

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5fa76712/processing/src/main/java/org/carbondata/lcm/locks/LocalFileLock.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/carbondata/lcm/locks/LocalFileLock.java b/processing/src/main/java/org/carbondata/lcm/locks/LocalFileLock.java
index 8eeb57d..88f9a23 100644
--- a/processing/src/main/java/org/carbondata/lcm/locks/LocalFileLock.java
+++ b/processing/src/main/java/org/carbondata/lcm/locks/LocalFileLock.java
@@ -18,7 +18,6 @@
  */
 package org.carbondata.lcm.locks;
 
-import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.channels.FileChannel;


Mime
View raw message