carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gvram...@apache.org
Subject [07/14] incubator-carbondata git commit: support compaction for restructure
Date Thu, 16 Mar 2017 09:28:50 GMT
support compaction for restructure


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/fc1af963
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/fc1af963
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/fc1af963

Branch: refs/heads/master
Commit: fc1af96307fbd2344007740441951638e676f1f4
Parents: df00163
Author: manishgupta88 <tomanishgupta18@gmail.com>
Authored: Fri Mar 10 17:19:45 2017 +0530
Committer: Venkata Ramana G <ramana.gollamudi@huawei.com>
Committed: Thu Mar 16 14:50:43 2017 +0530

----------------------------------------------------------------------
 .../core/metadata/blocklet/DataFileFooter.java  |  13 +++
 .../impl/AbstractScannedResultCollector.java    |   7 +-
 .../executor/impl/AbstractQueryExecutor.java    |  31 ++----
 .../core/scan/executor/util/QueryUtil.java      | 102 +++----------------
 .../scan/executor/util/RestructureUtil.java     |  15 ++-
 .../carbondata/core/scan/filter/FilterUtil.java |  30 ++++++
 .../executer/RowLevelFilterExecuterImpl.java    |  10 +-
 .../core/util/CarbonMetadataUtil.java           |   4 +-
 .../apache/carbondata/core/util/CarbonUtil.java |  53 ----------
 .../core/util/DataFileFooterConverterV3.java    |   1 +
 .../scan/executor/util/RestructureUtilTest.java |  27 ++++-
 format/src/main/thrift/carbondata.thrift        |   3 +-
 .../spark/merger/CarbonCompactionExecutor.java  |  17 ++--
 .../spark/merger/CarbonCompactionUtil.java      |   9 ++
 .../spark/merger/RowResultMerger.java           |   5 +-
 .../execution/command/carbonTableSchema.scala   |  10 +-
 .../apache/spark/sql/hive/CarbonMetastore.scala |   2 +-
 .../newflow/CarbonDataLoadConfiguration.java    |  13 +++
 .../newflow/DataLoadProcessBuilder.java         |   1 +
 .../store/CarbonFactDataHandlerColumnar.java    |   4 +
 .../store/CarbonFactDataHandlerModel.java       |  14 +++
 .../store/writer/CarbonDataWriterVo.java        |  16 +++
 .../writer/v3/CarbonFactDataWriterImplV3.java   |   4 +-
 23 files changed, 194 insertions(+), 197 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/DataFileFooter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/DataFileFooter.java b/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/DataFileFooter.java
index b30dc27..1f45716 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/DataFileFooter.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/DataFileFooter.java
@@ -70,6 +70,11 @@ public class DataFileFooter implements Serializable {
   private BlockInfo blockInfo;
 
   /**
+   * schema updated time stamp to be used for restructure scenarios
+   */
+  private long schemaUpdatedTimeStamp;
+
+  /**
    * @return the versionId
    */
   public ColumnarFormatVersion getVersionId() {
@@ -166,4 +171,12 @@ public class DataFileFooter implements Serializable {
   public void setBlockInfo(BlockInfo tableBlockInfo) {
     this.blockInfo = tableBlockInfo;
   }
+
+  public long getSchemaUpdatedTimeStamp() {
+    return schemaUpdatedTimeStamp;
+  }
+
+  public void setSchemaUpdatedTimeStamp(long schemaUpdatedTimeStamp) {
+    this.schemaUpdatedTimeStamp = schemaUpdatedTimeStamp;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
index b36f9fd..3c487ae 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
@@ -70,14 +70,15 @@ public abstract class AbstractScannedResultCollector implements ScannedResultCol
   protected void fillMeasureData(Object[] msrValues, int offset,
       AbstractScannedResult scannedResult) {
     int measureExistIndex = 0;
-    for (short i = 0; i < tableBlockExecutionInfos.getActualQueryMeasures().length; i++) {
+    for (short i = 0; i < measureInfo.getMeasureDataTypes().length; i++) {
       // if measure exists is block then pass measure column
       // data chunk to the collector
       if (measureInfo.getMeasureExists()[i]) {
-        QueryMeasure queryMeasure = tableBlockExecutionInfos.getActualQueryMeasures()[i];
+        QueryMeasure queryMeasure = tableBlockExecutionInfos.getQueryMeasures()[measureExistIndex];
         msrValues[i + offset] = getMeasureData(
-            scannedResult.getMeasureChunk(measureInfo.getMeasureOrdinals()[measureExistIndex++]),
+            scannedResult.getMeasureChunk(measureInfo.getMeasureOrdinals()[measureExistIndex]),
             scannedResult.getCurrenrRowId(), queryMeasure.getMeasure());
+        measureExistIndex++;
       } else {
         // if not then get the default value and use that value in aggregation
         Object defaultValue = measureInfo.getDefaultValues()[i];

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
index 7e666a1..0bd0bf6 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
@@ -23,7 +23,6 @@ import java.util.Collections;
 import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
@@ -291,18 +290,12 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
     int numberOfElementToConsider = 0;
     // list of dimensions to be projected
     Set<Integer> allProjectionListDimensionIdexes = new LinkedHashSet<>();
-    // maintain a mapping of actual query column position in carbon data file with
-    // position of column in current block. It will used in case of restructure
-    Map<Integer, Integer> queryDimensionToCurrentBlockDimensionOrdinalMapping = QueryUtil
-        .getQueryDimensionToCurrentBlockDimensionOrdinalMapping(updatedQueryDimension,
-            segmentProperties.getDimensions());
     // create a list of filter dimensions present in the current block
     Set<CarbonDimension> updatedFilterDimensions = QueryUtil
         .getUpdatedFilterDimensions(queryProperties.complexFilterDimension,
             segmentProperties.getDimensions());
     int[] dimensionsBlockIndexes = QueryUtil.getDimensionsBlockIndexes(updatedQueryDimension,
-        segmentProperties.getDimensionOrdinalToBlockMapping(),
-        queryDimensionToCurrentBlockDimensionOrdinalMapping, expressionDimensions,
+        segmentProperties.getDimensionOrdinalToBlockMapping(), expressionDimensions,
         updatedFilterDimensions, allProjectionListDimensionIdexes);
     int numberOfColumnToBeReadInOneIO = Integer.parseInt(CarbonProperties.getInstance()
         .getProperty(CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO,
@@ -319,11 +312,6 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
     } else {
       blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(new int[0][0]);
     }
-    // maintain a mapping of actual query column position in carbon data file with
-    // position of column in current block. It will used in case of restructure
-    Map<Integer, Integer> queryMeasuresToCurrentBlockMeasuresOrdinalMapping = QueryUtil
-        .getQueryMeasuresToCurrentBlockMeasuresOrdinalMapping(updatedQueryMeasures,
-            segmentProperties.getMeasures());
     // get the list of updated filter measures present in the current block
     Set<CarbonMeasure> updatedFilterMeasures = QueryUtil
         .getUpdatedFilterMeasures(queryProperties.filterMeasures, segmentProperties.getMeasures());
@@ -331,8 +319,7 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
     List<Integer> allProjectionListMeasureIndexes = new ArrayList<>();
     int[] measureBlockIndexes = QueryUtil
         .getMeasureBlockIndexes(updatedQueryMeasures, expressionMeasures,
-            segmentProperties.getMeasuresOrdinalToBlockMapping(),
-            queryMeasuresToCurrentBlockMeasuresOrdinalMapping, updatedFilterMeasures,
+            segmentProperties.getMeasuresOrdinalToBlockMapping(), updatedFilterMeasures,
             allProjectionListMeasureIndexes);
     if (measureBlockIndexes.length > 0) {
 
@@ -359,16 +346,13 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
     // to update the older block key with new key generator
     // blockExecutionInfo.setKeyStructureInfo(queryProperties.keyStructureInfo);
     // setting the size of fixed key column (dictionary column)
-    blockExecutionInfo.setFixedLengthKeySize(
-        getKeySize(updatedQueryDimension, queryDimensionToCurrentBlockDimensionOrdinalMapping,
-            segmentProperties));
+    blockExecutionInfo.setFixedLengthKeySize(getKeySize(updatedQueryDimension, segmentProperties));
     Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
     List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
     // get the block index to be read from file for query dimension
     // for both dictionary columns and no dictionary columns
     QueryUtil.fillQueryDimensionsBlockIndexes(updatedQueryDimension,
-        segmentProperties.getDimensionOrdinalToBlockMapping(),
-        queryDimensionToCurrentBlockDimensionOrdinalMapping, dictionaryColumnBlockIndex,
+        segmentProperties.getDimensionOrdinalToBlockMapping(), dictionaryColumnBlockIndex,
         noDictionaryColumnBlockIndex);
     int[] queryDictionaryColumnBlockIndexes = ArrayUtils.toPrimitive(
         dictionaryColumnBlockIndex.toArray(new Integer[dictionaryColumnBlockIndex.size()]));
@@ -413,10 +397,9 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
    * @return key size
    */
   private int getKeySize(List<QueryDimension> queryDimension,
-      Map<Integer, Integer> queryDimensionToCurrentBlockDimensionOrdinalMapping,
       SegmentProperties blockMetadataInfo) {
-    List<Integer> fixedLengthDimensionOrdinal =
-        new ArrayList<Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+    Set<Integer> fixedLengthDimensionOrdinal =
+        new HashSet<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
     int counter = 0;
     while (counter < queryDimension.size()) {
       if (queryDimension.get(counter).getDimension().numberOfChild() > 0) {
@@ -426,7 +409,7 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
           Encoding.DICTIONARY)) {
         counter++;
       } else {
-        fixedLengthDimensionOrdinal.add(queryDimensionToCurrentBlockDimensionOrdinalMapping
+        fixedLengthDimensionOrdinal.add(blockMetadataInfo.getDimensionOrdinalToBlockMapping()
             .get(queryDimension.get(counter).getDimension().getOrdinal()));
         counter++;
       }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
index 2f47b6a..afb6553 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
@@ -208,7 +208,6 @@ public class QueryUtil {
    */
   public static int[] getDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
       Map<Integer, Integer> dimensionOrdinalToBlockMapping,
-      Map<Integer, Integer> queryDimensionToCurrentBlockDimensionOrdinalMapping,
       List<CarbonDimension> customAggregationDimension, Set<CarbonDimension> filterDimensions,
       Set<Integer> allProjectionListDimensionIndexes) {
     // using set as in row group columns will point to same block
@@ -220,17 +219,15 @@ public class QueryUtil {
         continue;
       }
 
-      Integer dimensionOrdinal = queryDimensionToCurrentBlockDimensionOrdinalMapping
-          .get(queryDimensions.get(i).getDimension().getOrdinal());
-      allProjectionListDimensionIndexes.add(dimensionOrdinal);
+      Integer dimensionOrdinal = queryDimensions.get(i).getDimension().getOrdinal();
+      allProjectionListDimensionIndexes.add(dimensionOrdinalToBlockMapping.get(dimensionOrdinal));
       if (queryDimensions.get(i).getDimension().numberOfChild() > 0) {
         addChildrenBlockIndex(allProjectionListDimensionIndexes,
             queryDimensions.get(i).getDimension());
       }
 
       if (!filterDimensionOrdinal.contains(dimensionOrdinal)) {
-        blockIndex =
-            dimensionOrdinalToBlockMapping.get(dimensionOrdinal);
+        blockIndex = dimensionOrdinalToBlockMapping.get(dimensionOrdinal);
         dimensionBlockIndex.add(blockIndex);
         if (queryDimensions.get(i).getDimension().numberOfChild() > 0) {
           addChildrenBlockIndex(dimensionBlockIndex, queryDimensions.get(i).getDimension());
@@ -251,30 +248,6 @@ public class QueryUtil {
   }
 
   /**
-   * This method will maintain a position mapping of query dimensions to current block dimensions
-   *
-   * @param queryDimensions
-   * @param currentBlockDimensions
-   * @return
-   */
-  public static Map<Integer, Integer> getQueryDimensionToCurrentBlockDimensionOrdinalMapping(
-      List<QueryDimension> queryDimensions, List<CarbonDimension> currentBlockDimensions) {
-    Map<Integer, Integer> queryToCurrentBlockDimensionOrdinals =
-        new HashMap<>(queryDimensions.size());
-    for (QueryDimension queryDimension : queryDimensions) {
-      if (queryDimension.getDimension().hasEncoding(Encoding.IMPLICIT)) {
-        continue;
-      }
-      int ordinalOfDimensionFromCurrentBlock =
-          getOrdinalOfDimensionFromCurrentBlock(currentBlockDimensions,
-              queryDimension.getDimension());
-      queryToCurrentBlockDimensionOrdinals
-          .put(queryDimension.getDimension().getOrdinal(), ordinalOfDimensionFromCurrentBlock);
-    }
-    return queryToCurrentBlockDimensionOrdinals;
-  }
-
-  /**
    * This method will return the key ordinal of the query dimension from the current block
    *
    * @param blockDimensions
@@ -294,48 +267,6 @@ public class QueryUtil {
   }
 
   /**
-   * This method will return the ordinal of the query dimension from the current block
-   *
-   * @param blockDimensions
-   * @param queryDimension
-   * @return
-   */
-  public static int getOrdinalOfDimensionFromCurrentBlock(List<CarbonDimension> blockDimensions,
-      CarbonDimension queryDimension) {
-    int keyOrdinalInCurrentDimensionBlock = -1;
-    for (CarbonDimension blockDimension : blockDimensions) {
-      if (queryDimension.getColumnId().equals(blockDimension.getColumnId())) {
-        keyOrdinalInCurrentDimensionBlock = blockDimension.getOrdinal();
-        break;
-      }
-    }
-    return keyOrdinalInCurrentDimensionBlock;
-  }
-
-  /**
-   * This method will maintain a position mapping of query measures to current block measures
-   *
-   * @param queryMeasures
-   * @param currentBlockMeasures
-   * @return
-   */
-  public static Map<Integer, Integer> getQueryMeasuresToCurrentBlockMeasuresOrdinalMapping(
-      List<QueryMeasure> queryMeasures, List<CarbonMeasure> currentBlockMeasures) {
-    Map<Integer, Integer> queryToCurrentBlockDimensionOrdinals =
-        new HashMap<>(queryMeasures.size());
-    for (QueryMeasure queryMeasure : queryMeasures) {
-      for (CarbonMeasure currentBlockMeasure : currentBlockMeasures) {
-        if (queryMeasure.getMeasure().getColumnId().equals(currentBlockMeasure.getColumnId())) {
-          queryToCurrentBlockDimensionOrdinals
-              .put(queryMeasure.getMeasure().getOrdinal(), currentBlockMeasure.getOrdinal());
-          break;
-        }
-      }
-    }
-    return queryToCurrentBlockDimensionOrdinals;
-  }
-
-  /**
    * Below method will be used to add the children block index
    * this will be basically for complex dimension which will have children
    *
@@ -487,21 +418,18 @@ public class QueryUtil {
    */
   public static int[] getMeasureBlockIndexes(List<QueryMeasure> queryMeasures,
       List<CarbonMeasure> expressionMeasure, Map<Integer, Integer> ordinalToBlockIndexMapping,
-      Map<Integer, Integer> queryMeasuresToCurrentBlockMeasuresOrdinalMapping,
       Set<CarbonMeasure> filterMeasures, List<Integer> allProjectionListMeasureIdexes) {
     Set<Integer> measureBlockIndex = new HashSet<Integer>();
     Set<Integer> filterMeasureOrdinal = getFilterMeasureOrdinal(filterMeasures);
     for (int i = 0; i < queryMeasures.size(); i++) {
-      Integer measureOrdinal = queryMeasuresToCurrentBlockMeasuresOrdinalMapping
-          .get(queryMeasures.get(i).getMeasure().getOrdinal());
-      allProjectionListMeasureIdexes.add(queryMeasures.get(i).getMeasure().getOrdinal());
+      Integer measureOrdinal = queryMeasures.get(i).getMeasure().getOrdinal();
+      allProjectionListMeasureIdexes.add(measureOrdinal);
       if (!filterMeasureOrdinal.contains(measureOrdinal)) {
         measureBlockIndex.add(ordinalToBlockIndexMapping.get(measureOrdinal));
       }
     }
     for (int i = 0; i < expressionMeasure.size(); i++) {
-      measureBlockIndex.add(queryMeasuresToCurrentBlockMeasuresOrdinalMapping
-          .get(ordinalToBlockIndexMapping.get(expressionMeasure.get(i).getOrdinal())));
+      measureBlockIndex.add(ordinalToBlockIndexMapping.get(expressionMeasure.get(i).getOrdinal()));
     }
     int[] measureIndexes =
         ArrayUtils.toPrimitive(measureBlockIndex.toArray(new Integer[measureBlockIndex.size()]));
@@ -746,20 +674,17 @@ public class QueryUtil {
    */
   public static void fillQueryDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
       Map<Integer, Integer> columnOrdinalToBlockIndexMapping,
-      Map<Integer, Integer> queryDimensionToCurrentBlockDimensionOrdinalMapping,
       Set<Integer> dictionaryDimensionBlockIndex, List<Integer> noDictionaryDimensionBlockIndex) {
     for (QueryDimension queryDimension : queryDimensions) {
       if (CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.DICTIONARY)
           && queryDimension.getDimension().numberOfChild() == 0) {
-        dictionaryDimensionBlockIndex.add(columnOrdinalToBlockIndexMapping.get(
-            queryDimensionToCurrentBlockDimensionOrdinalMapping
-                .get(queryDimension.getDimension().getOrdinal())));
+        dictionaryDimensionBlockIndex
+            .add(columnOrdinalToBlockIndexMapping.get(queryDimension.getDimension().getOrdinal()));
       } else if (
           !CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.IMPLICIT)
               && queryDimension.getDimension().numberOfChild() == 0) {
-        noDictionaryDimensionBlockIndex.add(columnOrdinalToBlockIndexMapping.get(
-            queryDimensionToCurrentBlockDimensionOrdinalMapping
-                .get(queryDimension.getDimension().getOrdinal())));
+        noDictionaryDimensionBlockIndex
+            .add(columnOrdinalToBlockIndexMapping.get(queryDimension.getDimension().getOrdinal()));
       }
     }
   }
@@ -897,11 +822,12 @@ public class QueryUtil {
     if (null != filterDimensions) {
       for (CarbonDimension filterDimension : filterDimensions) {
         // do not fill nay details for implicit dimension type
-        if (filterDimension.hasEncoding(Encoding.IMPLICIT)) {
+        if (filterDimension.hasEncoding(Encoding.IMPLICIT)
+            || filterDimension.getNumberOfChild() == 0) {
           continue;
         }
-        // fillParentDetails(dimensionToBlockIndexMap, filterDimension, complexTypeMap,
-        //     eachComplexColumnValueSize, columnIdToDictionaryMap);
+        fillParentDetails(dimensionToBlockIndexMap, filterDimension, complexTypeMap,
+            eachComplexColumnValueSize, columnIdToDictionaryMap);
       }
     }
     return complexTypeMap;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
index 847a795..59fd215 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
@@ -74,7 +74,10 @@ public class RestructureUtil {
       } else {
         for (CarbonDimension tableDimension : tableBlockDimensions) {
           if (tableDimension.getColumnId().equals(queryDimension.getDimension().getColumnId())) {
-            presentDimension.add(queryDimension);
+            QueryDimension currentBlockDimension = new QueryDimension(tableDimension.getColName());
+            currentBlockDimension.setDimension(tableDimension);
+            currentBlockDimension.setQueryOrder(queryDimension.getQueryOrder());
+            presentDimension.add(currentBlockDimension);
             isDimensionExists[dimIndex] = true;
             break;
           }
@@ -91,7 +94,10 @@ public class RestructureUtil {
     for (QueryDimension queryDimension : queryDimensions) {
       for (CarbonDimension tableDimension : tableComplexDimension) {
         if (tableDimension.getColumnId().equals(queryDimension.getDimension().getColumnId())) {
-          presentDimension.add(queryDimension);
+          QueryDimension currentBlockDimension = new QueryDimension(tableDimension.getColName());
+          currentBlockDimension.setDimension(tableDimension);
+          currentBlockDimension.setQueryOrder(queryDimension.getQueryOrder());
+          presentDimension.add(currentBlockDimension);
           isDimensionExists[dimIndex] = true;
           break;
         }
@@ -302,7 +308,10 @@ public class RestructureUtil {
       // otherwise adding a default value of a measure
       for (CarbonMeasure carbonMeasure : currentBlockMeasures) {
         if (carbonMeasure.getColumnId().equals(queryMeasure.getMeasure().getColumnId())) {
-          presentMeasure.add(queryMeasure);
+          QueryMeasure currentBlockMeasure = new QueryMeasure(carbonMeasure.getColName());
+          currentBlockMeasure.setMeasure(carbonMeasure);
+          currentBlockMeasure.setQueryOrder(queryMeasure.getQueryOrder());
+          presentMeasure.add(currentBlockMeasure);
           measureOrdinalList.add(carbonMeasure.getOrdinal());
           measureExistsInCurrentBlock[index] = true;
           break;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index deb07c7..627a413 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -24,6 +24,7 @@ import java.nio.charset.Charset;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.BitSet;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
@@ -45,6 +46,7 @@ import org.apache.carbondata.core.cache.dictionary.DictionaryChunksWrapper;
 import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
 import org.apache.carbondata.core.cache.dictionary.ForwardDictionary;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
 import org.apache.carbondata.core.datastore.IndexKey;
 import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
@@ -80,6 +82,7 @@ import org.apache.carbondata.core.scan.filter.intf.RowIntf;
 import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 import org.apache.carbondata.core.scan.filter.resolver.RowLevelFilterResolverImpl;
 import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
+import org.apache.carbondata.core.util.BitSetGroup;
 import org.apache.carbondata.core.util.ByteUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
@@ -1459,4 +1462,31 @@ public final class FilterUtil {
 
   }
 
+  /**
+   * This method will create default bitset group. Applicable for restructure scenarios.
+   *
+   * @param pageCount
+   * @param totalRowCount
+   * @param defaultValue
+   * @return
+   */
+  public static BitSetGroup createBitSetGroupWithDefaultValue(int pageCount, int totalRowCount,
+      boolean defaultValue) {
+    BitSetGroup bitSetGroup = new BitSetGroup(pageCount);
+    int numberOfRows = Integer
+        .parseInt(CarbonV3DataFormatConstants.NUMBER_OF_ROWS_PER_BLOCKLET_COLUMN_PAGE_DEFAULT);
+    int pagesTobeFullFilled = totalRowCount / numberOfRows;
+    int rowCountForLastPage = totalRowCount % numberOfRows;
+    for (int i = 0; i < pagesTobeFullFilled; i++) {
+      BitSet bitSet = new BitSet(numberOfRows);
+      bitSet.set(0, numberOfRows, defaultValue);
+      bitSetGroup.setBitSet(bitSet, i);
+    }
+    // create and fill bitset for the last page
+    BitSet bitSet = new BitSet(rowCountForLastPage);
+    bitSet.set(0, rowCountForLastPage, defaultValue);
+    bitSetGroup.setBitSet(bitSet, pageCount);
+    return bitSetGroup;
+  }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
index b48382e..4f28ef3 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
@@ -494,10 +494,12 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
     }
 
     if (null != msrColEvalutorInfoList) {
-      if (null == blockChunkHolder.getMeasureRawDataChunk()[measureBlocksIndex[0]]) {
-        blockChunkHolder.getMeasureRawDataChunk()[measureBlocksIndex[0]] =
-            blockChunkHolder.getDataBlock()
-                .getMeasureChunk(blockChunkHolder.getFileReader(), measureBlocksIndex[0]);
+      for (MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo : msrColEvalutorInfoList) {
+        if (null == blockChunkHolder.getMeasureRawDataChunk()[measureBlocksIndex[0]]) {
+          blockChunkHolder.getMeasureRawDataChunk()[measureBlocksIndex[0]] =
+              blockChunkHolder.getDataBlock()
+                  .getMeasureChunk(blockChunkHolder.getFileReader(), measureBlocksIndex[0]);
+        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
index abffea4..03f43cd 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
@@ -890,15 +890,17 @@ public class CarbonMetadataUtil {
    *
    * @param isFooterPresent  is footer present in carbon data file
    * @param columnSchemaList list of column schema
+   * @param schemaUpdatedTimeStamp  schema updated time stamp to be used for restructure scenarios
    * @return file header thrift object
    */
   public static FileHeader getFileHeader(boolean isFooterPresent,
-      List<ColumnSchema> columnSchemaList) {
+      List<ColumnSchema> columnSchemaList, long schemaUpdatedTimeStamp) {
     FileHeader fileHeader = new FileHeader();
     ColumnarFormatVersion version = CarbonProperties.getInstance().getFormatVersion();
     fileHeader.setIs_footer_present(isFooterPresent);
     fileHeader.setColumn_schema(columnSchemaList);
     fileHeader.setVersion(version.number());
+    fileHeader.setTime_stamp(schemaUpdatedTimeStamp);
     return fileHeader;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 33d9aab..4e2d995 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -1729,59 +1729,6 @@ public final class CarbonUtil {
   }
 
   /**
-   * This method will prepare the cardinality of dictionary columns based on the latest schema
-   *
-   * @param lookUpDimensions         dimensions list where a given dimension will be searched to get
-   *                                 the index for getting the cardinality of that column
-   * @param masterSchemaDimensions   latest schema dimensions
-   * @param mappingColumnCardinality cardinality of columns in the given carbondata file
-   * @return
-   */
-  public static int[] getUpdatedColumnCardinalities(List<ColumnSchema> lookUpDimensions,
-      List<CarbonDimension> masterSchemaDimensions, int[] mappingColumnCardinality) {
-    List<Integer> updatedDictionaryColumnCardinalities =
-        new ArrayList<>(masterSchemaDimensions.size());
-    for (CarbonDimension masterDimension : masterSchemaDimensions) {
-      // dimension should be visible and should be a dictionary column
-      if (!masterDimension.isInvisible() && hasEncoding(masterDimension.getEncoder(),
-          Encoding.DICTIONARY)) {
-        int destinationDimensionIndex = 0;
-        boolean isDimensionFoundInDestinationSegment = false;
-        for (ColumnSchema destinationDimension : lookUpDimensions) {
-          if (masterDimension.getColumnId().equals(destinationDimension.getColumnUniqueId())) {
-            isDimensionFoundInDestinationSegment = true;
-            break;
-          }
-          destinationDimensionIndex++;
-        }
-        if (!isDimensionFoundInDestinationSegment) {
-          if (hasEncoding(masterDimension.getEncoder(), Encoding.DIRECT_DICTIONARY)) {
-            updatedDictionaryColumnCardinalities.add(Integer.MAX_VALUE);
-          } else {
-            if (null != masterDimension.getDefaultValue()) {
-              // added +1 because if default value is provided then the cardinality of
-              // column will be 2. 1 for member default value and 1 for the value
-              // provided by the user
-              updatedDictionaryColumnCardinalities
-                  .add(CarbonCommonConstants.DICTIONARY_DEFAULT_CARDINALITY + 1);
-            } else {
-              updatedDictionaryColumnCardinalities
-                  .add(CarbonCommonConstants.DICTIONARY_DEFAULT_CARDINALITY);
-            }
-          }
-        } else {
-          // add the cardinality of the existing column in the schema
-          updatedDictionaryColumnCardinalities
-              .add(mappingColumnCardinality[destinationDimensionIndex]);
-        }
-      }
-    }
-    int[] updatedCardinalities = ArrayUtils.toPrimitive(updatedDictionaryColumnCardinalities
-        .toArray(new Integer[updatedDictionaryColumnCardinalities.size()]));
-    return updatedCardinalities;
-  }
-
-  /**
    * Below method will be used to convert byte data to surrogate key based
    * column value size
    *

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
index 3fcf427..143c1b1 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
@@ -57,6 +57,7 @@ public class DataFileFooterConverterV3 extends AbstractDataFileFooterConverter {
     dataFileFooter.setVersionId(ColumnarFormatVersion.valueOf((short) fileHeader.getVersion()));
     dataFileFooter.setNumberOfRows(footer.getNum_rows());
     dataFileFooter.setSegmentInfo(getSegmentInfo(footer.getSegment_info()));
+    dataFileFooter.setSchemaUpdatedTimeStamp(fileHeader.getTime_stamp());
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (int i = 0; i < table_columns.size(); i++) {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java b/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
index 634a301..09df23e 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
@@ -16,6 +16,7 @@
  */
 package org.apache.carbondata.core.scan.executor.util;
 
+import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
@@ -35,6 +36,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.UUID;
 
 public class RestructureUtilTest {
 
@@ -44,18 +46,23 @@ public class RestructureUtilTest {
     encodingList.add(Encoding.DICTIONARY);
     ColumnSchema columnSchema1 = new ColumnSchema();
     columnSchema1.setColumnName("Id");
+    columnSchema1.setColumnUniqueId(UUID.randomUUID().toString());
     columnSchema1.setEncodingList(encodingList);
     ColumnSchema columnSchema2 = new ColumnSchema();
     columnSchema2.setColumnName("Name");
+    columnSchema2.setColumnUniqueId(UUID.randomUUID().toString());
     columnSchema2.setEncodingList(encodingList);
     ColumnSchema columnSchema3 = new ColumnSchema();
     columnSchema3.setColumnName("Age");
+    columnSchema3.setColumnUniqueId(UUID.randomUUID().toString());
     columnSchema3.setEncodingList(encodingList);
     ColumnSchema columnSchema4 = new ColumnSchema();
     columnSchema4.setColumnName("Salary");
+    columnSchema4.setColumnUniqueId(UUID.randomUUID().toString());
     columnSchema4.setEncodingList(encodingList);
     ColumnSchema columnSchema5 = new ColumnSchema();
     columnSchema5.setColumnName("Address");
+    columnSchema5.setColumnUniqueId(UUID.randomUUID().toString());
     columnSchema5.setEncodingList(encodingList);
 
     CarbonDimension tableBlockDimension1 = new CarbonDimension(columnSchema1, 1, 1, 1, 1);
@@ -80,19 +87,29 @@ public class RestructureUtilTest {
 
     List<QueryDimension> result = null;
     result = RestructureUtil
-        .createDimensionInfoAndGetUpdatedQueryDimension(blockExecutionInfo, queryDimensions, tableBlockDimensions,
-            tableComplexDimensions);
-
-    assertThat(result, is(equalTo(Arrays.asList(queryDimension1, queryDimension2))));
+        .createDimensionInfoAndGetUpdatedQueryDimension(blockExecutionInfo, queryDimensions,
+            tableBlockDimensions, tableComplexDimensions);
+    List<CarbonDimension> resultDimension = new ArrayList<>(result.size());
+    for (QueryDimension queryDimension : result) {
+      resultDimension.add(queryDimension.getDimension());
+    }
+    assertThat(resultDimension,
+        is(equalTo(Arrays.asList(queryDimension1.getDimension(), queryDimension2.getDimension()))));
   }
 
   @Test public void testToGetAggregatorInfos() {
     ColumnSchema columnSchema1 = new ColumnSchema();
     columnSchema1.setColumnName("Id");
+    columnSchema1.setDataType(DataType.STRING);
+    columnSchema1.setColumnUniqueId(UUID.randomUUID().toString());
     ColumnSchema columnSchema2 = new ColumnSchema();
     columnSchema2.setColumnName("Name");
+    columnSchema2.setDataType(DataType.STRING);
+    columnSchema2.setColumnUniqueId(UUID.randomUUID().toString());
     ColumnSchema columnSchema3 = new ColumnSchema();
     columnSchema3.setColumnName("Age");
+    columnSchema3.setDataType(DataType.STRING);
+    columnSchema3.setColumnUniqueId(UUID.randomUUID().toString());
 
     CarbonMeasure carbonMeasure1 = new CarbonMeasure(columnSchema1, 1);
     CarbonMeasure carbonMeasure2 = new CarbonMeasure(columnSchema2, 2);
@@ -113,7 +130,7 @@ public class RestructureUtilTest {
     MeasureInfo measureInfo = blockExecutionInfo.getMeasureInfo();
     boolean[] measuresExist = { true, true, false };
     assertThat(measureInfo.getMeasureExists(), is(equalTo(measuresExist)));
-    Object[] defaultValues = { null, null, "3".getBytes() };
+    Object[] defaultValues = { null, null, "3" };
     assertThat(measureInfo.getDefaultValues(), is(equalTo(defaultValues)));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/format/src/main/thrift/carbondata.thrift
----------------------------------------------------------------------
diff --git a/format/src/main/thrift/carbondata.thrift b/format/src/main/thrift/carbondata.thrift
index 198afde..21ff29d 100644
--- a/format/src/main/thrift/carbondata.thrift
+++ b/format/src/main/thrift/carbondata.thrift
@@ -199,7 +199,8 @@ struct FileFooter3{
 struct FileHeader{
 	1: required i32 version; // version used for data compatibility
 	2: required list<schema.ColumnSchema> column_schema;  // Description of columns in this file
-	3: optional bool is_footer_present; //  to check whether footer is present or not      
+	3: optional bool is_footer_present; //  to check whether footer is present or not
+	4: optional i64 time_stamp; // timestamp to compare column schema against master schema
 }
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionExecutor.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionExecutor.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionExecutor.java
index 4458457..f61f1c9 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionExecutor.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionExecutor.java
@@ -19,6 +19,7 @@ package org.apache.carbondata.spark.merger;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -93,13 +94,17 @@ public class CarbonCompactionExecutor {
       String segmentId = taskMap.getKey();
       List<DataFileFooter> listMetadata = dataFileMetadataSegMapping.get(segmentId);
 
-      List<ColumnSchema> updatedColumnSchemaList = CarbonUtil
-          .getColumnSchemaList(carbonTable.getDimensionByTableName(carbonTable.getFactTableName()),
-              carbonTable.getMeasureByTableName(carbonTable.getFactTableName()));
-      int[] updatedColumnCardinalities = CarbonUtil
-          .getUpdatedColumnCardinalities(listMetadata.get(0).getColumnInTable(),
-              carbonTable.getDimensionByTableName(carbonTable.getFactTableName()),
+      // update cardinality of source segment according to new schema
+      Map<String, Integer> columnToCardinalityMap =
+          new HashMap<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+      CarbonCompactionUtil
+          .addColumnCardinalityToMap(columnToCardinalityMap, listMetadata.get(0).getColumnInTable(),
               listMetadata.get(0).getSegmentInfo().getColumnCardinality());
+      List<ColumnSchema> updatedColumnSchemaList =
+          new ArrayList<>(listMetadata.get(0).getColumnInTable().size());
+      int[] updatedColumnCardinalities = CarbonCompactionUtil
+          .updateColumnSchemaAndGetCardinality(columnToCardinalityMap, carbonTable,
+              updatedColumnSchemaList);
       SegmentProperties sourceSegProperties =
           new SegmentProperties(updatedColumnSchemaList, updatedColumnCardinalities);
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionUtil.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionUtil.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionUtil.java
index f63778d..5f5b149 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionUtil.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/CarbonCompactionUtil.java
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
@@ -320,6 +321,14 @@ public class CarbonCompactionUtil {
         updatedColumnSchemaList.add(dimension.getColumnSchema());
       }
     }
+    // add measures to the column schema list
+    List<CarbonMeasure> masterSchemaMeasures =
+        carbonTable.getMeasureByTableName(carbonTable.getFactTableName());
+    for (CarbonMeasure measure : masterSchemaMeasures) {
+      if (!measure.isInvisible()) {
+        updatedColumnSchemaList.add(measure.getColumnSchema());
+      }
+    }
     int[] updatedCardinality = ArrayUtils
         .toPrimitive(updatedCardinalityList.toArray(new Integer[updatedCardinalityList.size()]));
     return updatedCardinality;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
index 91a5c03..dd41c59 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
@@ -131,7 +131,6 @@ public class RowResultMerger {
     } else {
       carbonFactDataHandlerModel.setMdKeyIndex(measureCount);
     }
-    carbonFactDataHandlerModel.setColCardinality(segProp.getDimColumnsCardinality());
     carbonFactDataHandlerModel.setBlockSizeInMB(carbonTable.getBlockSizeInMB());
     dataHandler = new CarbonFactDataHandlerColumnar(carbonFactDataHandlerModel);
 
@@ -261,6 +260,10 @@ public class RowResultMerger {
         .getColumnSchemaList(carbonTable.getDimensionByTableName(tableName),
             carbonTable.getMeasureByTableName(tableName));
     carbonFactDataHandlerModel.setWrapperColumnSchema(wrapperColumnSchema);
+    // get the cardinality for all all the columns including no dictionary columns
+    int[] formattedCardinality =
+        CarbonUtil.getFormattedCardinality(segprop.getDimColumnsCardinality(), wrapperColumnSchema);
+    carbonFactDataHandlerModel.setColCardinality(formattedCardinality);
     //TO-DO Need to handle complex types here .
     Map<Integer, GenericDataType> complexIndexMap =
         new HashMap<Integer, GenericDataType>(segprop.getComplexDimensions().size());

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 4642559..7f27c75 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.execution.command
 
 import java.io.File
-import java.util.concurrent.{Callable, ExecutorService, Executors, Future}
+import java.util.concurrent.{Callable, Executors, ExecutorService, Future}
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable.ListBuffer
@@ -41,14 +41,14 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.dictionary.server.DictionaryServer
 import org.apache.carbondata.core.locks.{CarbonLockFactory, LockUsage}
+import org.apache.carbondata.core.metadata.{CarbonMetadata, CarbonTableIdentifier}
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
 import org.apache.carbondata.core.metadata.encoder.Encoding
-import org.apache.carbondata.core.metadata.schema.table.column.{CarbonColumn, CarbonDimension}
 import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, TableInfo}
-import org.apache.carbondata.core.metadata.{CarbonMetadata, CarbonTableIdentifier}
+import org.apache.carbondata.core.metadata.schema.table.column.{CarbonColumn, CarbonDimension}
 import org.apache.carbondata.core.mutate.{CarbonUpdateUtil, TupleIdEnum}
-import org.apache.carbondata.core.util.path.CarbonStorePath
 import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
+import org.apache.carbondata.core.util.path.CarbonStorePath
 import org.apache.carbondata.format.SchemaEvolutionEntry
 import org.apache.carbondata.processing.constants.TableOptionConstant
 import org.apache.carbondata.processing.etl.DataLoadingException
@@ -1234,4 +1234,4 @@ private[sql] case class DescribeCommandFormatted(
     }
     results
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetastore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetastore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetastore.scala
index 61f6ff6..145a2b5 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetastore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetastore.scala
@@ -399,7 +399,7 @@ class CarbonMetastore(conf: RuntimeConfig, val storePath: String) {
    * @param dbName
    * @param tableName
    */
-  def removeTableFromMetadata(dbName: String, tableName: String) = {
+  def removeTableFromMetadata(dbName: String, tableName: String): Unit = {
     val metadataToBeRemoved: Option[TableMeta] = getTableFromMetadata(dbName, tableName)
     metadataToBeRemoved match {
       case Some(tableMeta) =>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
index cd58a00..926f4f6 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
@@ -68,6 +68,11 @@ public class CarbonDataLoadConfiguration {
 
   private int measureCount;
 
+  /**
+   * schema updated time stamp to be used for restructure scenarios
+   */
+  private long schemaUpdatedTimeStamp;
+
   public CarbonDataLoadConfiguration() {
   }
 
@@ -232,4 +237,12 @@ public class CarbonDataLoadConfiguration {
   public DataField[] getMeasureFields() {
     return measureFields;
   }
+
+  public long getSchemaUpdatedTimeStamp() {
+    return schemaUpdatedTimeStamp;
+  }
+
+  public void setSchemaUpdatedTimeStamp(long schemaUpdatedTimeStamp) {
+    this.schemaUpdatedTimeStamp = schemaUpdatedTimeStamp;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
index 5e01abe..6f6637a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
@@ -114,6 +114,7 @@ public final class DataLoadProcessBuilder {
     CarbonTable carbonTable = loadModel.getCarbonDataLoadSchema().getCarbonTable();
     AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
     configuration.setTableIdentifier(identifier);
+    configuration.setSchemaUpdatedTimeStamp(carbonTable.getTableLastUpdatedTime());
     configuration.setHeader(loadModel.getCsvHeaderColumns());
     configuration.setPartitionId(loadModel.getPartitionId());
     configuration.setSegmentId(loadModel.getSegmentId());

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
index abebf59..4192943 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
@@ -259,6 +259,8 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
 
   private int bucketNumber;
 
+  private long schemaUpdatedTimeStamp;
+
   /**
    * current data format version
    */
@@ -354,6 +356,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
     this.completeDimLens = carbonFactDataHandlerModel.getDimLens();
     this.dimLens = this.segmentProperties.getDimColumnsCardinality();
     this.carbonDataFileAttributes = carbonFactDataHandlerModel.getCarbonDataFileAttributes();
+    this.schemaUpdatedTimeStamp = carbonFactDataHandlerModel.getSchemaUpdatedTimeStamp();
     //TODO need to pass carbon table identifier to metadata
     CarbonTable carbonTable = CarbonMetadata.getInstance()
         .getCarbonTable(databaseName + CarbonCommonConstants.UNDERSCORE + tableName);
@@ -1420,6 +1423,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
     carbonDataWriterVo.setSegmentProperties(segmentProperties);
     carbonDataWriterVo.setTableBlocksize(tableBlockSize);
     carbonDataWriterVo.setBucketNumber(bucketNumber);
+    carbonDataWriterVo.setSchemaUpdatedTimeStamp(schemaUpdatedTimeStamp);
     return carbonDataWriterVo;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
index 594f0e2..92b0007 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
@@ -179,6 +179,11 @@ public class CarbonFactDataHandlerModel {
   private int bucketId = 0;
 
   /**
+   * schema updated time stamp to be used for restructure scenarios
+   */
+  private long schemaUpdatedTimeStamp;
+
+  /**
    * Create the model using @{@link CarbonDataLoadConfiguration}
    * @param configuration
    * @return CarbonFactDataHandlerModel
@@ -253,6 +258,7 @@ public class CarbonFactDataHandlerModel {
     String carbonDataDirectoryPath = getCarbonDataFolderLocation(configuration);
 
     CarbonFactDataHandlerModel carbonFactDataHandlerModel = new CarbonFactDataHandlerModel();
+    carbonFactDataHandlerModel.setSchemaUpdatedTimeStamp(configuration.getSchemaUpdatedTimeStamp());
     carbonFactDataHandlerModel.setDatabaseName(
         identifier.getDatabaseName());
     carbonFactDataHandlerModel
@@ -499,5 +505,13 @@ public class CarbonFactDataHandlerModel {
   public int getBucketId() {
     return bucketId;
   }
+
+  public long getSchemaUpdatedTimeStamp() {
+    return schemaUpdatedTimeStamp;
+  }
+
+  public void setSchemaUpdatedTimeStamp(long schemaUpdatedTimeStamp) {
+    this.schemaUpdatedTimeStamp = schemaUpdatedTimeStamp;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
index f093035..7ba794a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
@@ -66,6 +66,8 @@ public class CarbonDataWriterVo {
 
   private int bucketNumber;
 
+  private long schemaUpdatedTimeStamp;
+
   /**
    * @return the storeLocation
    */
@@ -304,4 +306,18 @@ public class CarbonDataWriterVo {
   public void setBucketNumber(int bucketNumber) {
     this.bucketNumber = bucketNumber;
   }
+
+  /**
+   * @return
+   */
+  public long getSchemaUpdatedTimeStamp() {
+    return schemaUpdatedTimeStamp;
+  }
+
+  /**
+   * @param schemaUpdatedTimeStamp
+   */
+  public void setSchemaUpdatedTimeStamp(long schemaUpdatedTimeStamp) {
+    this.schemaUpdatedTimeStamp = schemaUpdatedTimeStamp;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/fc1af963/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
index df75366..1bc6c9d 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
@@ -375,8 +375,8 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter<short[]>
     try {
       if (fileChannel.size() == 0) {
         // below code is to write the file header
-        byte[] fileHeader =
-            CarbonUtil.getByteArray(CarbonMetadataUtil.getFileHeader(true, thriftColumnSchemaList));
+        byte[] fileHeader = CarbonUtil.getByteArray(CarbonMetadataUtil
+            .getFileHeader(true, thriftColumnSchemaList, dataWriterVo.getSchemaUpdatedTimeStamp()));
         ByteBuffer buffer = ByteBuffer.allocate(fileHeader.length);
         buffer.put(fileHeader);
         buffer.flip();


Mime
View raw message