carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [48/52] [partial] incubator-carbondata git commit: move core package
Date Mon, 16 Jan 2017 14:53:25 GMT
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
deleted file mode 100644
index 7d3f5cf..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.impl;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.store.DimensionChunkStoreFactory;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
-import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.scan.result.vector.ColumnVectorInfo;
-
-/**
- * This class is gives access to column group dimension data chunk store
- */
-public class ColumnGroupDimensionDataChunk extends AbstractDimensionDataChunk {
-
-  /**
-   * Constructor for this class
-   *
-   * @param dataChunk       data chunk
-   * @param chunkAttributes chunk attributes
-   */
-  public ColumnGroupDimensionDataChunk(byte[] dataChunk, int columnValueSize, int numberOfRows) {
-    this.dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, false, numberOfRows, dataChunk.length,
-        DimensionStoreType.FIXEDLENGTH);
-    this.dataChunkStore.putArray(null, null, dataChunk);
-  }
-
-  /**
-   * Below method will be used to fill the data based on offset and row id
-   *
-   * @param data              data to filed
-   * @param offset            offset from which data need to be filed
-   * @param rowId             row id of the chunk
-   * @param restructuringInfo define the structure of the key
-   * @return how many bytes was copied
-   */
-  @Override public int fillChunkData(byte[] data, int offset, int rowId,
-      KeyStructureInfo restructuringInfo) {
-    byte[] row = dataChunkStore.getRow(rowId);
-    byte[] maskedKey = getMaskedKey(row, restructuringInfo);
-    System.arraycopy(maskedKey, 0, data, offset, maskedKey.length);
-    return maskedKey.length;
-  }
-
-  /**
-   * Converts to column dictionary integer value
-   *
-   * @param rowId
-   * @param columnIndex
-   * @param row
-   * @param restructuringInfo @return
-   */
-  @Override public int fillConvertedChunkData(int rowId, int columnIndex, int[] row,
-      KeyStructureInfo info) {
-    byte[] data = dataChunkStore.getRow(rowId);
-    long[] keyArray = info.getKeyGenerator().getKeyArray(data);
-    int[] ordinal = info.getMdkeyQueryDimensionOrdinal();
-    for (int i = 0; i < ordinal.length; i++) {
-      row[columnIndex++] = (int) keyArray[ordinal[i]];
-    }
-    return columnIndex;
-  }
-
-  /**
-   * Below method will be used to get the masked key
-   *
-   * @param data   data
-   * @param offset offset of
-   * @param info
-   * @return
-   */
-  private byte[] getMaskedKey(byte[] data, KeyStructureInfo info) {
-    byte[] maskedKey = new byte[info.getMaskByteRanges().length];
-    int counter = 0;
-    int byteRange = 0;
-    for (int i = 0; i < info.getMaskByteRanges().length; i++) {
-      byteRange = info.getMaskByteRanges()[i];
-      maskedKey[counter++] = (byte) (data[byteRange] & info.getMaxKey()[byteRange]);
-    }
-    return maskedKey;
-  }
-
-  /**
-   * @return inverted index
-   */
-  @Override public int getInvertedIndex(int index) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * @return whether columns where explictly sorted or not
-   */
-  @Override public boolean isExplicitSorted() {
-    return false;
-  }
-
-  /**
-   * to compare the data
-   *
-   * @param index        row index to be compared
-   * @param compareValue value to compare
-   * @return compare result
-   */
-  @Override public int compareTo(int index, byte[] compareValue) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param vectorInfo
-   * @param column
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillConvertedChunkData(ColumnVectorInfo[] vectorInfo, int column,
-      KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[column];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray = restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(k));
-      int index = 0;
-      for (int i = column; i < column + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return column + ordinal.length;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param rowMapping
-   * @param vectorInfo
-   * @param column
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillConvertedChunkData(int[] rowMapping, ColumnVectorInfo[] vectorInfo,
-      int column, KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[column];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray = restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(k));
-      int index = 0;
-      for (int i = column; i < column + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return column + ordinal.length;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
deleted file mode 100644
index 328bb53..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.impl;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.store.DimensionChunkStoreFactory;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.scan.result.vector.CarbonColumnVector;
-import org.apache.carbondata.scan.result.vector.ColumnVectorInfo;
-
-/**
- * This class is gives access to fixed length dimension data chunk store
- */
-public class FixedLengthDimensionDataChunk extends AbstractDimensionDataChunk {
-
-  /**
-   * Constructor
-   *
-   * @param dataChunk            data chunk
-   * @param invertedIndex        inverted index
-   * @param invertedIndexReverse reverse inverted index
-   * @param numberOfRows         number of rows
-   * @param columnValueSize      size of each column value
-   */
-  public FixedLengthDimensionDataChunk(byte[] dataChunk, int[] invertedIndex,
-      int[] invertedIndexReverse, int numberOfRows, int columnValueSize) {
-    long totalSize = null != invertedIndex ?
-        dataChunk.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) :
-        dataChunk.length;
-    dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, null != invertedIndex, numberOfRows, totalSize,
-            DimensionStoreType.FIXEDLENGTH);
-    dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunk);
-  }
-
-  /**
-   * Below method will be used to fill the data based on offset and row id
-   *
-   * @param data             data to filed
-   * @param offset           offset from which data need to be filed
-   * @param index            row id of the chunk
-   * @param keyStructureInfo define the structure of the key
-   * @return how many bytes was copied
-   */
-  @Override public int fillChunkData(byte[] data, int offset, int index,
-      KeyStructureInfo keyStructureInfo) {
-    dataChunkStore.fillRow(index, data, offset);
-    return dataChunkStore.getColumnValueSize();
-  }
-
-  /**
-   * Converts to column dictionary integer value
-   *
-   * @param rowId
-   * @param columnIndex
-   * @param row
-   * @param restructuringInfo
-   * @return
-   */
-  @Override public int fillConvertedChunkData(int rowId, int columnIndex, int[] row,
-      KeyStructureInfo restructuringInfo) {
-    row[columnIndex] = dataChunkStore.getSurrogate(rowId);
-    return columnIndex + 1;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param vectorInfo
-   * @param column
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillConvertedChunkData(ColumnVectorInfo[] vectorInfo, int column,
-      KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[column];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = columnVectorInfo.size + offset;
-    CarbonColumnVector vector = columnVectorInfo.vector;
-    for (int j = offset; j < len; j++) {
-      int dict = dataChunkStore.getSurrogate(j);
-      if (columnVectorInfo.directDictionaryGenerator == null) {
-        vector.putInt(vectorOffset++, dict);
-      } else {
-        Object valueFromSurrogate =
-            columnVectorInfo.directDictionaryGenerator.getValueFromSurrogate(dict);
-        if (valueFromSurrogate == null) {
-          vector.putNull(vectorOffset++);
-        } else {
-          switch (columnVectorInfo.directDictionaryGenerator.getReturnType()) {
-            case INT:
-              vector.putInt(vectorOffset++, (int) valueFromSurrogate);
-              break;
-            case LONG:
-              vector.putLong(vectorOffset++, (long) valueFromSurrogate);
-              break;
-          }
-        }
-      }
-    }
-    return column + 1;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param rowMapping
-   * @param vectorInfo
-   * @param column
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillConvertedChunkData(int[] rowMapping, ColumnVectorInfo[] vectorInfo,
-      int column, KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[column];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = columnVectorInfo.size + offset;
-    CarbonColumnVector vector = columnVectorInfo.vector;
-    for (int j = offset; j < len; j++) {
-      int dict = dataChunkStore.getSurrogate(rowMapping[j]);
-      if (columnVectorInfo.directDictionaryGenerator == null) {
-        vector.putInt(vectorOffset++, dict);
-      } else {
-        Object valueFromSurrogate =
-            columnVectorInfo.directDictionaryGenerator.getValueFromSurrogate(dict);
-        if (valueFromSurrogate == null) {
-          vector.putNull(vectorOffset++);
-        } else {
-          switch (columnVectorInfo.directDictionaryGenerator.getReturnType()) {
-            case INT:
-              vector.putInt(vectorOffset++, (int) valueFromSurrogate);
-              break;
-            case LONG:
-              vector.putLong(vectorOffset++, (long) valueFromSurrogate);
-              break;
-          }
-        }
-      }
-    }
-    return column + 1;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
deleted file mode 100644
index e93a86c..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.impl;
-
-import java.util.Arrays;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.store.DimensionChunkStoreFactory;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.scan.result.vector.CarbonColumnVector;
-import org.apache.carbondata.scan.result.vector.ColumnVectorInfo;
-
-/**
- * This class is gives access to variable length dimension data chunk store
- */
-public class VariableLengthDimensionDataChunk extends AbstractDimensionDataChunk {
-
-  /**
-   * Constructor for this class
-   *
-   * @param dataChunkStore  data chunk
-   * @param chunkAttributes chunk attributes
-   */
-  public VariableLengthDimensionDataChunk(byte[] dataChunks, int[] invertedIndex,
-      int[] invertedIndexReverse, int numberOfRows) {
-    long totalSize = null != invertedIndex ?
-        (dataChunks.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) + (
-            numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE)) :
-        (dataChunks.length + (numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE));
-    dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(0, null != invertedIndex, numberOfRows, totalSize,
-            DimensionStoreType.VARIABLELENGTH);
-    dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunks);
-  }
-
-  /**
-   * Below method will be used to fill the data based on offset and row id
-   *
-   * @param data              data to filed
-   * @param offset            offset from which data need to be filed
-   * @param index             row id of the chunk
-   * @param restructuringInfo define the structure of the key
-   * @return how many bytes was copied
-   */
-  @Override public int fillChunkData(byte[] data, int offset, int index,
-      KeyStructureInfo restructuringInfo) {
-    // no required in this case because this column chunk is not the part if
-    // mdkey
-    return 0;
-  }
-
-  /**
-   * Converts to column dictionary integer value
-   *
-   * @param rowId
-   * @param columnIndex
-   * @param row
-   * @param restructuringInfo
-   * @return
-   */
-  @Override public int fillConvertedChunkData(int rowId, int columnIndex, int[] row,
-      KeyStructureInfo restructuringInfo) {
-    return columnIndex + 1;
-  }
-
-  /**
-   * @return whether column is dictionary column or not
-   */
-  @Override public boolean isNoDicitionaryColumn() {
-    return true;
-  }
-
-  /**
-   * @return length of each column
-   */
-  @Override public int getColumnValueSize() {
-    return -1;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param vectorInfo
-   * @param column
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillConvertedChunkData(ColumnVectorInfo[] vectorInfo, int column,
-      KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[column];
-    CarbonColumnVector vector = columnVectorInfo.vector;
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    for (int i = offset; i < len; i++) {
-      byte[] value = dataChunkStore.getRow(i);
-      // Considering only String case now as we support only
-      // string in no dictionary case at present.
-      if (value == null || Arrays.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, value)) {
-        vector.putNull(vectorOffset++);
-      } else {
-        vector.putBytes(vectorOffset++, value);
-      }
-    }
-    return column + 1;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param rowMapping
-   * @param vectorInfo
-   * @param column
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillConvertedChunkData(int[] rowMapping, ColumnVectorInfo[] vectorInfo,
-      int column, KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[column];
-    CarbonColumnVector vector = columnVectorInfo.vector;
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    for (int i = offset; i < len; i++) {
-      byte[] value = dataChunkStore.getRow(rowMapping[i]);
-      // Considering only String case now as we support only
-      // string in no dictionary case at present.
-      if (value == null || Arrays.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, value)) {
-        vector.putNull(vectorOffset++);
-      } else {
-        vector.putBytes(vectorOffset++, value);
-      }
-    }
-    return column + 1;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/CarbonDataReaderFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/CarbonDataReaderFactory.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/CarbonDataReaderFactory.java
deleted file mode 100644
index 13fe3d2..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/CarbonDataReaderFactory.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader;
-
-import org.apache.carbondata.core.carbon.ColumnarFormatVersion;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.v1.CompressedDimensionChunkFileBasedReaderV1;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.v2.CompressedDimensionChunkFileBasedReaderV2;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.measure.v1.CompressedMeasureChunkFileBasedReaderV1;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.measure.v2.CompressedMeasureChunkFileBasedReaderV2;
-import org.apache.carbondata.core.carbon.metadata.blocklet.BlockletInfo;
-
-/**
- * Factory class to get the data reader instance based on version
- */
-public class CarbonDataReaderFactory {
-
-  /**
-   * static instance
-   */
-  private static final CarbonDataReaderFactory CARBON_DATA_READER_FACTORY =
-      new CarbonDataReaderFactory();
-
-  /**
-   * private constructor
-   */
-  private CarbonDataReaderFactory() {
-
-  }
-
-  /**
-   * To get the instance of the reader factor
-   *
-   * @return reader factory
-   */
-  public static CarbonDataReaderFactory getInstance() {
-    return CARBON_DATA_READER_FACTORY;
-  }
-
-  /**
-   * Below method will be used to get the dimension column chunk reader based on version number
-   *
-   * @param version             reader version
-   * @param blockletInfo        blocklet info
-   * @param eachColumnValueSize size of each dimension column
-   * @param filePath            carbon data file path
-   * @return dimension column data reader based on version number
-   */
-  public DimensionColumnChunkReader getDimensionColumnChunkReader(ColumnarFormatVersion version,
-      BlockletInfo blockletInfo, int[] eachColumnValueSize, String filePath) {
-    switch (version) {
-      case V1:
-        return new CompressedDimensionChunkFileBasedReaderV1(blockletInfo, eachColumnValueSize,
-            filePath);
-      default:
-        return new CompressedDimensionChunkFileBasedReaderV2(blockletInfo, eachColumnValueSize,
-            filePath);
-    }
-  }
-
-  /**
-   * Below method will be used to get the measure column chunk reader based version number
-   *
-   * @param version      reader version
-   * @param blockletInfo blocklet info
-   * @param filePath     carbon data file path
-   * @return measure column data reader based on version number
-   */
-  public MeasureColumnChunkReader getMeasureColumnChunkReader(ColumnarFormatVersion version,
-      BlockletInfo blockletInfo, String filePath) {
-    switch (version) {
-      case V1:
-        return new CompressedMeasureChunkFileBasedReaderV1(blockletInfo, filePath);
-      default:
-        return new CompressedMeasureChunkFileBasedReaderV2(blockletInfo, filePath);
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java
deleted file mode 100644
index 4758da1..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader;
-
-import java.io.IOException;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-
-/**
- * Interface for reading the data chunk
- * Its concrete implementation can be used to read the chunk.
- * compressed or uncompressed chunk
- */
-public interface DimensionColumnChunkReader {
-
-  /**
-   * Below method will be used to read the chunk based on block indexes
-   *
-   * @param fileReader   file reader to read the blocks from file
-   * @param blockIndexes blocks to be read
-   * @return dimension column chunks
-   */
-  DimensionColumnDataChunk[] readDimensionChunks(FileHolder fileReader, int[][] blockIndexes)
-      throws IOException;
-
-  /**
-   * Below method will be used to read the chunk based on block index
-   *
-   * @param fileReader file reader to read the blocks from file
-   * @param blockIndex block to be read
-   * @return dimension column chunk
-   */
-  DimensionColumnDataChunk readDimensionChunk(FileHolder fileReader, int blockIndex)
-      throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java
deleted file mode 100644
index 9421ebf..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader;
-
-import java.io.IOException;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-
-/**
- * Reader interface for reading the measure blocks from file
- */
-public interface MeasureColumnChunkReader {
-
-  /**
-   * Method to read the blocks data based on block indexes
-   *
-   * @param fileReader   file reader to read the blocks
-   * @param blockIndexes blocks to be read
-   * @return measure data chunks
-   */
-  MeasureColumnDataChunk[] readMeasureChunks(FileHolder fileReader, int[][] blockIndexes)
-      throws IOException;
-
-  /**
-   * Method to read the blocks data based on block index
-   *
-   * @param fileReader file reader to read the blocks
-   * @param blockIndex block to be read
-   * @return measure data chunk
-   */
-  MeasureColumnDataChunk readMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java
deleted file mode 100644
index df34f8a..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.DimensionColumnChunkReader;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.compression.Compressor;
-import org.apache.carbondata.core.datastorage.store.compression.CompressorFactory;
-import org.apache.carbondata.core.keygenerator.mdkey.NumberCompressor;
-import org.apache.carbondata.core.util.CarbonProperties;
-
-/**
- * Class which will have all the common properties and behavior among all type
- * of reader
- */
-public abstract class AbstractChunkReader implements DimensionColumnChunkReader {
-
-  /**
-   * compressor will be used to uncompress the data
-   */
-  protected static final Compressor COMPRESSOR = CompressorFactory.getInstance().getCompressor();
-
-  /**
-   * size of the each column value
-   * for no dictionary column it will be -1
-   */
-  protected int[] eachColumnValueSize;
-
-  /**
-   * full qualified path of the data file from
-   * which data will be read
-   */
-  protected String filePath;
-
-  /**
-   * this will be used to uncompress the
-   * row id and rle chunk
-   */
-  protected NumberCompressor numberComressor;
-
-  /**
-   * number of element in each chunk
-   */
-  protected int numberOfRows;
-
-  /**
-   * Constructor to get minimum parameter to create
-   * instance of this class
-   *
-   * @param eachColumnValueSize  size of the each column value
-   * @param filePath             file from which data will be read
-   */
-  public AbstractChunkReader(final int[] eachColumnValueSize, final String filePath,
-      int numberOfRows) {
-    this.eachColumnValueSize = eachColumnValueSize;
-    this.filePath = filePath;
-    int numberOfElement = 0;
-    try {
-      numberOfElement = Integer.parseInt(CarbonProperties.getInstance()
-          .getProperty(CarbonCommonConstants.BLOCKLET_SIZE,
-              CarbonCommonConstants.BLOCKLET_SIZE_DEFAULT_VAL));
-    } catch (NumberFormatException exception) {
-      numberOfElement = Integer.parseInt(CarbonCommonConstants.BLOCKLET_SIZE_DEFAULT_VAL);
-    }
-    this.numberComressor = new NumberCompressor(numberOfElement);
-    this.numberOfRows = numberOfRows;
-  }
-
-  /**
-   * Below method will be used to create the inverted index reverse
-   * this will be used to point to actual data in the chunk
-   *
-   * @param invertedIndex inverted index
-   * @return reverse inverted index
-   */
-  protected int[] getInvertedReverseIndex(int[] invertedIndex) {
-    int[] columnIndexTemp = new int[invertedIndex.length];
-
-    for (int i = 0; i < invertedIndex.length; i++) {
-      columnIndexTemp[invertedIndex[i]] = i;
-    }
-    return columnIndexTemp;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
deleted file mode 100644
index a521e1e..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.v1;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.impl.ColumnGroupDimensionDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.impl.FixedLengthDimensionDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.impl.VariableLengthDimensionDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.AbstractChunkReader;
-import org.apache.carbondata.core.carbon.metadata.blocklet.BlockletInfo;
-import org.apache.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
-import org.apache.carbondata.core.carbon.metadata.encoder.Encoding;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import org.apache.carbondata.core.datastorage.store.columnar.UnBlockIndexer;
-import org.apache.carbondata.core.util.CarbonUtil;
-
-/**
- * Compressed dimension chunk reader class
- */
-public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkReader {
-
-  /**
-   * data chunk list which holds the information
-   * about the data block metadata
-   */
-  private final List<DataChunk> dimensionColumnChunk;
-
-  /**
-   * Constructor to get minimum parameter to create instance of this class
-   *
-   * @param blockletInfo        blocklet info
-   * @param eachColumnValueSize size of the each column value
-   * @param filePath            file from which data will be read
-   */
-  public CompressedDimensionChunkFileBasedReaderV1(final BlockletInfo blockletInfo,
-      final int[] eachColumnValueSize, final String filePath) {
-    super(eachColumnValueSize, filePath, blockletInfo.getNumberOfRows());
-    this.dimensionColumnChunk = blockletInfo.getDimensionColumnChunk();
-  }
-
-  /**
-   * Below method will be used to read the chunk based on block indexes
-   *
-   * @param fileReader   file reader to read the blocks from file
-   * @param blockIndexes blocks to be read
-   * @return dimension column chunks
-   */
-  @Override public DimensionColumnDataChunk[] readDimensionChunks(FileHolder fileReader,
-      int[][] blockIndexes) throws IOException {
-    // read the column chunk based on block index and add
-    DimensionColumnDataChunk[] dataChunks =
-        new DimensionColumnDataChunk[dimensionColumnChunk.size()];
-    for (int i = 0; i < blockIndexes.length; i++) {
-      for (int j = blockIndexes[i][0]; j <= blockIndexes[i][1]; j++) {
-        dataChunks[j] = readDimensionChunk(fileReader, j);
-      }
-    }
-    return dataChunks;
-  }
-
-  /**
-   * Below method will be used to read the chunk based on block index
-   *
-   * @param fileReader file reader to read the blocks from file
-   * @param blockIndex block to be read
-   * @return dimension column chunk
-   */
-  @Override public DimensionColumnDataChunk readDimensionChunk(FileHolder fileReader,
-      int blockIndex) throws IOException {
-    byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
-    int[] rlePage = null;
-
-    // first read the data and uncompressed it
-    dataPage = COMPRESSOR.unCompressByte(fileReader
-        .readByteArray(filePath, dimensionColumnChunk.get(blockIndex).getDataPageOffset(),
-            dimensionColumnChunk.get(blockIndex).getDataPageLength()));
-    // if row id block is present then read the row id chunk and uncompress it
-    if (CarbonUtil.hasEncoding(dimensionColumnChunk.get(blockIndex).getEncodingList(),
-        Encoding.INVERTED_INDEX)) {
-      invertedIndexes = CarbonUtil
-          .getUnCompressColumnIndex(dimensionColumnChunk.get(blockIndex).getRowIdPageLength(),
-              fileReader.readByteArray(filePath,
-                  dimensionColumnChunk.get(blockIndex).getRowIdPageOffset(),
-                  dimensionColumnChunk.get(blockIndex).getRowIdPageLength()), numberComressor, 0);
-      // get the reverse index
-      invertedIndexesReverse = getInvertedReverseIndex(invertedIndexes);
-    }
-    // if rle is applied then read the rle block chunk and then uncompress
-    //then actual data based on rle block
-    if (CarbonUtil
-        .hasEncoding(dimensionColumnChunk.get(blockIndex).getEncodingList(), Encoding.RLE)) {
-      // read and uncompress the rle block
-      rlePage = numberComressor.unCompress(fileReader
-              .readByteArray(filePath, dimensionColumnChunk.get(blockIndex).getRlePageOffset(),
-                  dimensionColumnChunk.get(blockIndex).getRlePageLength()), 0,
-          dimensionColumnChunk.get(blockIndex).getRlePageLength());
-      // uncompress the data with rle indexes
-      dataPage = UnBlockIndexer.uncompressData(dataPage, rlePage, eachColumnValueSize[blockIndex]);
-      rlePage = null;
-    }
-    // fill chunk attributes
-    DimensionColumnDataChunk columnDataChunk = null;
-    if (dimensionColumnChunk.get(blockIndex).isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionDataChunk(dataPage, eachColumnValueSize[blockIndex],
-          numberOfRows);
-    }
-    // if no dictionary column then first create a no dictionary column chunk
-    // and set to data chunk instance
-    else if (!CarbonUtil
-        .hasEncoding(dimensionColumnChunk.get(blockIndex).getEncodingList(), Encoding.DICTIONARY)) {
-      columnDataChunk =
-          new VariableLengthDimensionDataChunk(dataPage, invertedIndexes, invertedIndexesReverse,
-              numberOfRows);
-    } else {
-      // to store fixed length column chunk values
-      columnDataChunk =
-          new FixedLengthDimensionDataChunk(dataPage, invertedIndexes, invertedIndexesReverse,
-              numberOfRows, eachColumnValueSize[blockIndex]);
-    }
-    return columnDataChunk;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
deleted file mode 100644
index c4bc01b..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.v2;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.impl.ColumnGroupDimensionDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.impl.FixedLengthDimensionDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.impl.VariableLengthDimensionDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.AbstractChunkReader;
-import org.apache.carbondata.core.carbon.metadata.blocklet.BlockletInfo;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import org.apache.carbondata.core.datastorage.store.columnar.UnBlockIndexer;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.format.DataChunk2;
-import org.apache.carbondata.format.Encoding;
-
-/**
- * Compressed dimension chunk reader class for version 2
- */
-public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkReader {
-
-  /**
-   * dimension chunks offset
-   */
-  private List<Long> dimensionChunksOffset;
-
-  /**
-   * dimension chunks length
-   */
-  private List<Short> dimensionChunksLength;
-
-  /**
-   * Constructor to get minimum parameter to create instance of this class
-   *
-   * @param blockletInfo
-   * @param eachColumnValueSize
-   * @param filePath
-   */
-  public CompressedDimensionChunkFileBasedReaderV2(final BlockletInfo blockletInfo,
-      final int[] eachColumnValueSize, final String filePath) {
-    super(eachColumnValueSize, filePath, blockletInfo.getNumberOfRows());
-    this.dimensionChunksOffset = blockletInfo.getDimensionChunkOffsets();
-    this.dimensionChunksLength = blockletInfo.getDimensionChunksLength();
-
-  }
-
-  /**
-   * Below method will be used to read the chunk based on block indexes
-   * Reading logic of below method is:
-   * Except last column all the column chunk can be read in group
-   * if not last column then read data of all the column present in block index
-   * together then process it.
-   * For last column read is separately and process
-   *
-   * @param fileReader   file reader to read the blocks from file
-   * @param blockIndexes blocks range to be read
-   * @return dimension column chunks
-   */
-  @Override public DimensionColumnDataChunk[] readDimensionChunks(final FileHolder fileReader,
-      final int[][] blockIndexes) throws IOException {
-    // read the column chunk based on block index and add
-    DimensionColumnDataChunk[] dataChunks =
-        new DimensionColumnDataChunk[dimensionChunksOffset.size()];
-    // if blocklet index is empty then return empry data chunk
-    if (blockIndexes.length == 0) {
-      return dataChunks;
-    }
-    DimensionColumnDataChunk[] groupChunk = null;
-    int index = 0;
-    // iterate till block indexes -1 as block index will be in sorted order, so to avoid
-    // the last column reading in group
-    for (int i = 0; i < blockIndexes.length - 1; i++) {
-      index = 0;
-      groupChunk = readDimensionChunksInGroup(fileReader, blockIndexes[i][0], blockIndexes[i][1]);
-      for (int j = blockIndexes[i][0]; j <= blockIndexes[i][1]; j++) {
-        dataChunks[j] = groupChunk[index++];
-      }
-    }
-    // check last index is present in block index, if it is present then read separately
-    if (blockIndexes[blockIndexes.length - 1][0] == dimensionChunksOffset.size() - 1) {
-      dataChunks[blockIndexes[blockIndexes.length - 1][0]] =
-          readDimensionChunk(fileReader, blockIndexes[blockIndexes.length - 1][0]);
-    }
-    // otherwise read the data in group
-    else {
-      groupChunk = readDimensionChunksInGroup(fileReader, blockIndexes[blockIndexes.length - 1][0],
-          blockIndexes[blockIndexes.length - 1][1]);
-      index = 0;
-      for (int j = blockIndexes[blockIndexes.length - 1][0];
-           j <= blockIndexes[blockIndexes.length - 1][1]; j++) {
-        dataChunks[j] = groupChunk[index++];
-      }
-    }
-    return dataChunks;
-  }
-
-  /**
-   * Below method will be used to read the chunk based on block index
-   *
-   * @param fileReader file reader to read the blocks from file
-   * @param blockIndex block to be read
-   * @return dimension column chunk
-   */
-  @Override public DimensionColumnDataChunk readDimensionChunk(FileHolder fileReader,
-      int blockIndex) throws IOException {
-    byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
-    int[] rlePage = null;
-    DataChunk2 dimensionColumnChunk = null;
-    byte[] data = null;
-    int copySourcePoint = 0;
-    byte[] dimensionChunk = null;
-    if (dimensionChunksOffset.size() - 1 == blockIndex) {
-      dimensionChunk = fileReader.readByteArray(filePath, dimensionChunksOffset.get(blockIndex),
-          dimensionChunksLength.get(blockIndex));
-      dimensionColumnChunk = CarbonUtil
-          .readDataChunk(dimensionChunk, copySourcePoint, dimensionChunksLength.get(blockIndex));
-      int totalDimensionDataLength =
-          dimensionColumnChunk.data_page_length + dimensionColumnChunk.rle_page_length
-              + dimensionColumnChunk.rowid_page_length;
-      data = fileReader.readByteArray(filePath,
-          dimensionChunksOffset.get(blockIndex) + dimensionChunksLength.get(blockIndex),
-          totalDimensionDataLength);
-    } else {
-      long currentDimensionOffset = dimensionChunksOffset.get(blockIndex);
-      data = fileReader.readByteArray(filePath, currentDimensionOffset,
-          (int) (dimensionChunksOffset.get(blockIndex + 1) - currentDimensionOffset));
-      dimensionColumnChunk =
-          CarbonUtil.readDataChunk(data, copySourcePoint, dimensionChunksLength.get(blockIndex));
-      copySourcePoint += dimensionChunksLength.get(blockIndex);
-    }
-
-    // first read the data and uncompressed it
-    dataPage =
-        COMPRESSOR.unCompressByte(data, copySourcePoint, dimensionColumnChunk.data_page_length);
-    copySourcePoint += dimensionColumnChunk.data_page_length;
-    // if row id block is present then read the row id chunk and uncompress it
-    if (hasEncoding(dimensionColumnChunk.encoders, Encoding.INVERTED_INDEX)) {
-      invertedIndexes = CarbonUtil
-          .getUnCompressColumnIndex(dimensionColumnChunk.rowid_page_length, data, numberComressor,
-              copySourcePoint);
-      copySourcePoint += dimensionColumnChunk.rowid_page_length;
-      // get the reverse index
-      invertedIndexesReverse = getInvertedReverseIndex(invertedIndexes);
-    }
-    // if rle is applied then read the rle block chunk and then uncompress
-    //then actual data based on rle block
-    if (hasEncoding(dimensionColumnChunk.encoders, Encoding.RLE)) {
-      rlePage =
-          numberComressor.unCompress(data, copySourcePoint, dimensionColumnChunk.rle_page_length);
-      // uncompress the data with rle indexes
-      dataPage = UnBlockIndexer.uncompressData(dataPage, rlePage, eachColumnValueSize[blockIndex]);
-      rlePage = null;
-    }
-    // fill chunk attributes
-    DimensionColumnDataChunk columnDataChunk = null;
-
-    if (dimensionColumnChunk.isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionDataChunk(dataPage, eachColumnValueSize[blockIndex],
-          numberOfRows);
-    }
-    // if no dictionary column then first create a no dictionary column chunk
-    // and set to data chunk instance
-    else if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
-      columnDataChunk =
-          new VariableLengthDimensionDataChunk(dataPage, invertedIndexes, invertedIndexesReverse,
-              numberOfRows);
-    } else {
-      // to store fixed length column chunk values
-      columnDataChunk =
-          new FixedLengthDimensionDataChunk(dataPage, invertedIndexes, invertedIndexesReverse,
-              numberOfRows, eachColumnValueSize[blockIndex]);
-    }
-    return columnDataChunk;
-  }
-
-  /**
-   * Below method will be used to read the dimension chunks in group.
-   * This is to enhance the IO performance. Will read the data from start index
-   * to end index(including)
-   *
-   * @param fileReader      stream used for reading
-   * @param startBlockIndex start block index
-   * @param endBlockIndex   end block index
-   * @return dimension column chunk array
-   */
-  private DimensionColumnDataChunk[] readDimensionChunksInGroup(FileHolder fileReader,
-      int startBlockIndex, int endBlockIndex) throws IOException {
-    long currentDimensionOffset = dimensionChunksOffset.get(startBlockIndex);
-    byte[] data = fileReader.readByteArray(filePath, currentDimensionOffset,
-        (int) (dimensionChunksOffset.get(endBlockIndex + 1) - currentDimensionOffset));
-    int copySourcePoint = 0;
-    // read the column chunk based on block index and add
-    DimensionColumnDataChunk[] dataChunks =
-        new DimensionColumnDataChunk[endBlockIndex - startBlockIndex + 1];
-    byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
-    int[] rlePage = null;
-    DataChunk2 dimensionColumnChunk = null;
-    int index = 0;
-    for (int i = startBlockIndex; i <= endBlockIndex; i++) {
-      invertedIndexes = null;
-      invertedIndexesReverse = null;
-      dimensionColumnChunk =
-          CarbonUtil.readDataChunk(data, copySourcePoint, dimensionChunksLength.get(i));
-      copySourcePoint += dimensionChunksLength.get(i);
-      // first read the data and uncompressed it
-      dataPage =
-          COMPRESSOR.unCompressByte(data, copySourcePoint, dimensionColumnChunk.data_page_length);
-      copySourcePoint += dimensionColumnChunk.data_page_length;
-      // if row id block is present then read the row id chunk and uncompress it
-      if (hasEncoding(dimensionColumnChunk.encoders, Encoding.INVERTED_INDEX)) {
-        invertedIndexes = CarbonUtil
-            .getUnCompressColumnIndex(dimensionColumnChunk.rowid_page_length, data, numberComressor,
-                copySourcePoint);
-        copySourcePoint += dimensionColumnChunk.rowid_page_length;
-        // get the reverse index
-        invertedIndexesReverse = getInvertedReverseIndex(invertedIndexes);
-      }
-      // if rle is applied then read the rle block chunk and then uncompress
-      //then actual data based on rle block
-      if (hasEncoding(dimensionColumnChunk.encoders, Encoding.RLE)) {
-        // read and uncompress the rle block
-        rlePage =
-            numberComressor.unCompress(data, copySourcePoint, dimensionColumnChunk.rle_page_length);
-        copySourcePoint += dimensionColumnChunk.rle_page_length;
-        // uncompress the data with rle indexes
-        dataPage = UnBlockIndexer.uncompressData(dataPage, rlePage, eachColumnValueSize[i]);
-        rlePage = null;
-      }
-      // fill chunk attributes
-      DimensionColumnDataChunk columnDataChunk = null;
-      if (dimensionColumnChunk.isRowMajor()) {
-        // to store fixed length column chunk values
-        columnDataChunk =
-            new ColumnGroupDimensionDataChunk(dataPage, eachColumnValueSize[i], numberOfRows);
-      }
-      // if no dictionary column then first create a no dictionary column chunk
-      // and set to data chunk instance
-      else if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
-        columnDataChunk =
-            new VariableLengthDimensionDataChunk(dataPage, invertedIndexes, invertedIndexesReverse,
-                numberOfRows);
-      } else {
-        // to store fixed length column chunk values
-        columnDataChunk =
-            new FixedLengthDimensionDataChunk(dataPage, invertedIndexes, invertedIndexesReverse,
-                numberOfRows, eachColumnValueSize[i]);
-      }
-      dataChunks[index++] = columnDataChunk;
-    }
-    return dataChunks;
-  }
-
-  /**
-   * Below method will be used to check whether particular encoding is present
-   * in the dimension or not
-   *
-   * @param encoding encoding to search
-   * @return if encoding is present in dimension
-   */
-  private boolean hasEncoding(List<Encoding> encodings, Encoding encoding) {
-    return encodings.contains(encoding);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
deleted file mode 100644
index 0a44336..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader.measure;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.MeasureColumnChunkReader;
-
-/**
- * Measure block reader abstract class
- */
-public abstract class AbstractMeasureChunkReader implements MeasureColumnChunkReader {
-
-  /**
-   * file path from which blocks will be read
-   */
-  protected String filePath;
-
-  /**
-   * Constructor to get minimum parameter to create instance of this class
-   *
-   * @param filePath           file from which data will be read
-   */
-  public AbstractMeasureChunkReader(String filePath) {
-    this.filePath = filePath;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
deleted file mode 100644
index c7fbca8..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader.measure.v1;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.measure.AbstractMeasureChunkReader;
-import org.apache.carbondata.core.carbon.metadata.blocklet.BlockletInfo;
-import org.apache.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import org.apache.carbondata.core.datastorage.store.compression.ReaderCompressModel;
-import org.apache.carbondata.core.datastorage.store.compression.ValueCompressionHolder;
-import org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
-import org.apache.carbondata.core.metadata.ValueEncoderMeta;
-import org.apache.carbondata.core.util.ValueCompressionUtil;
-
-/**
- * Compressed measure chunk reader
- */
-public class CompressedMeasureChunkFileBasedReaderV1 extends AbstractMeasureChunkReader {
-
-  /**
-   * measure chunk have the information about the metadata present in the file
-   */
-  private final List<DataChunk> measureColumnChunks;
-
-  /**
-   * Constructor to get minimum parameter to create instance of this class
-   *
-   * @param blockletInfo BlockletInfo
-   * @param filePath     file from which data will be read
-   */
-  public CompressedMeasureChunkFileBasedReaderV1(final BlockletInfo blockletInfo,
-      final String filePath) {
-    super(filePath);
-    this.measureColumnChunks = blockletInfo.getMeasureColumnChunk();
-  }
-
-  /**
-   * Method to read the blocks data based on block indexes
-   *
-   * @param fileReader   file reader to read the blocks
-   * @param blockIndexes blocks to be read
-   * @return measure data chunks
-   */
-  @Override public MeasureColumnDataChunk[] readMeasureChunks(final FileHolder fileReader,
-      final int[][] blockIndexes) throws IOException {
-    MeasureColumnDataChunk[] datChunk = new MeasureColumnDataChunk[measureColumnChunks.size()];
-    for (int i = 0; i < blockIndexes.length; i++) {
-      for (int j = blockIndexes[i][0]; j <= blockIndexes[i][1]; j++) {
-        datChunk[j] = readMeasureChunk(fileReader, j);
-      }
-    }
-    return datChunk;
-  }
-
-  /**
-   * Method to read the blocks data based on block index
-   *
-   * @param fileReader file reader to read the blocks
-   * @param blockIndex block to be read
-   * @return measure data chunk
-   */
-  @Override public MeasureColumnDataChunk readMeasureChunk(final FileHolder fileReader,
-      final int blockIndex) throws IOException {
-    ValueEncoderMeta meta = measureColumnChunks.get(blockIndex).getValueEncoderMeta().get(0);
-    ReaderCompressModel compressModel = ValueCompressionUtil.getReaderCompressModel(meta);
-
-    ValueCompressionHolder values = compressModel.getValueCompressionHolder();
-    byte[] dataPage = fileReader
-            .readByteArray(filePath, measureColumnChunks.get(blockIndex).getDataPageOffset(),
-                    measureColumnChunks.get(blockIndex).getDataPageLength());
-
-    // unCompress data
-    values.uncompress(compressModel.getConvertedDataType(), dataPage, 0,
-            measureColumnChunks.get(blockIndex).getDataPageLength(), compressModel.getMantissa(),
-            compressModel.getMaxValue());
-
-    CarbonReadDataHolder measureDataHolder = new CarbonReadDataHolder(values);
-
-    // create and set the data chunk
-    MeasureColumnDataChunk datChunk = new MeasureColumnDataChunk();
-    datChunk.setMeasureDataHolder(measureDataHolder);
-    // set the enun value indexes
-    datChunk
-        .setNullValueIndexHolder(measureColumnChunks.get(blockIndex).getNullValueIndexForColumn());
-    return datChunk;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
deleted file mode 100644
index 32cf784..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.reader.measure.v2;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.List;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.reader.measure.AbstractMeasureChunkReader;
-import org.apache.carbondata.core.carbon.metadata.blocklet.BlockletInfo;
-import org.apache.carbondata.core.carbon.metadata.blocklet.datachunk.PresenceMeta;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import org.apache.carbondata.core.datastorage.store.compression.CompressorFactory;
-import org.apache.carbondata.core.datastorage.store.compression.ValueCompressionHolder;
-import org.apache.carbondata.core.datastorage.store.compression.WriterCompressModel;
-import org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
-import org.apache.carbondata.core.metadata.ValueEncoderMeta;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.format.DataChunk2;
-
-/**
- * Class to read the measure column data for version 2
- */
-public class CompressedMeasureChunkFileBasedReaderV2 extends AbstractMeasureChunkReader {
-
-  /**
-   * measure column chunks offset
-   */
-  private List<Long> measureColumnChunkOffsets;
-
-  /**
-   * measure column chunks length
-   */
-  private List<Short> measureColumnChunkLength;
-
-  /**
-   * Constructor to get minimum parameter to create instance of this class
-   *
-   * @param blockletInfo BlockletInfo
-   * @param filePath     file from which data will be read
-   */
-  public CompressedMeasureChunkFileBasedReaderV2(final BlockletInfo blockletInfo,
-      final String filePath) {
-    super(filePath);
-    this.measureColumnChunkOffsets = blockletInfo.getMeasureChunkOffsets();
-    this.measureColumnChunkLength = blockletInfo.getMeasureChunksLength();
-  }
-
-  /**
-   * Below method will be used to convert the thrift presence meta to wrapper
-   * presence meta
-   *
-   * @param presentMetadataThrift
-   * @return wrapper presence meta
-   */
-  private static PresenceMeta getPresenceMeta(
-      org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
-    PresenceMeta presenceMeta = new PresenceMeta();
-    presenceMeta.setRepresentNullValues(presentMetadataThrift.isRepresents_presence());
-    presenceMeta.setBitSet(BitSet.valueOf(CompressorFactory.getInstance().getCompressor()
-        .unCompressByte(presentMetadataThrift.getPresent_bit_stream())));
-    return presenceMeta;
-  }
-
-  /**
-   * Below method will be used to read the chunk based on block indexes
-   * Reading logic of below method is: Except last column all the column chunk
-   * can be read in group if not last column then read data of all the column
-   * present in block index together then process it. For last column read is
-   * separately and process
-   *
-   * @param fileReader   file reader to read the blocks from file
-   * @param blockIndexes blocks range to be read
-   * @return measure column chunks
-   * @throws IOException
-   */
-  public MeasureColumnDataChunk[] readMeasureChunks(FileHolder fileReader, int[][] blockIndexes)
-      throws IOException {
-    // read the column chunk based on block index and add
-    MeasureColumnDataChunk[] dataChunks =
-        new MeasureColumnDataChunk[measureColumnChunkOffsets.size()];
-    if (blockIndexes.length == 0) {
-      return dataChunks;
-    }
-    MeasureColumnDataChunk[] groupChunk = null;
-    int index = 0;
-    for (int i = 0; i < blockIndexes.length - 1; i++) {
-      index = 0;
-      groupChunk = readMeasureChunksInGroup(fileReader, blockIndexes[i][0], blockIndexes[i][1]);
-      for (int j = blockIndexes[i][0]; j <= blockIndexes[i][1]; j++) {
-        dataChunks[j] = groupChunk[index++];
-      }
-    }
-    if (blockIndexes[blockIndexes.length - 1][0] == measureColumnChunkOffsets.size() - 1) {
-      dataChunks[blockIndexes[blockIndexes.length - 1][0]] =
-          readMeasureChunk(fileReader, blockIndexes[blockIndexes.length - 1][0]);
-    } else {
-      groupChunk = readMeasureChunksInGroup(fileReader, blockIndexes[blockIndexes.length - 1][0],
-          blockIndexes[blockIndexes.length - 1][1]);
-      index = 0;
-      for (int j = blockIndexes[blockIndexes.length - 1][0];
-           j <= blockIndexes[blockIndexes.length - 1][1]; j++) {
-        dataChunks[j] = groupChunk[index++];
-      }
-    }
-    return dataChunks;
-  }
-
-  /**
-   * Method to read the blocks data based on block index
-   *
-   * @param fileReader file reader to read the blocks
-   * @param blockIndex block to be read
-   * @return measure data chunk
-   * @throws IOException
-   */
-  @Override public MeasureColumnDataChunk readMeasureChunk(FileHolder fileReader, int blockIndex)
-      throws IOException {
-    MeasureColumnDataChunk datChunk = new MeasureColumnDataChunk();
-    DataChunk2 measureColumnChunk = null;
-    byte[] measureDataChunk = null;
-    byte[] data = null;
-    int copyPoint = 0;
-    if (measureColumnChunkOffsets.size() - 1 == blockIndex) {
-      measureDataChunk = fileReader
-          .readByteArray(filePath, measureColumnChunkOffsets.get(blockIndex),
-              measureColumnChunkLength.get(blockIndex));
-      measureColumnChunk = CarbonUtil
-          .readDataChunk(measureDataChunk, copyPoint, measureColumnChunkLength.get(blockIndex));
-      data = fileReader.readByteArray(filePath,
-          measureColumnChunkOffsets.get(blockIndex) + measureColumnChunkLength.get(blockIndex),
-          measureColumnChunk.data_page_length);
-    } else {
-      long currentMeasureOffset = measureColumnChunkOffsets.get(blockIndex);
-      data = fileReader.readByteArray(filePath, currentMeasureOffset,
-          (int) (measureColumnChunkOffsets.get(blockIndex + 1) - currentMeasureOffset));
-      measureColumnChunk =
-          CarbonUtil.readDataChunk(data, copyPoint, measureColumnChunkLength.get(blockIndex));
-      copyPoint += measureColumnChunkLength.get(blockIndex);
-    }
-    List<ValueEncoderMeta> valueEncodeMeta = new ArrayList<>();
-    for (int i = 0; i < measureColumnChunk.getEncoder_meta().size(); i++) {
-      valueEncodeMeta.add(
-          CarbonUtil.deserializeEncoderMeta(measureColumnChunk.getEncoder_meta().get(i).array()));
-    }
-    WriterCompressModel compressionModel = CarbonUtil.getValueCompressionModel(valueEncodeMeta);
-
-    ValueCompressionHolder values = compressionModel.getValueCompressionHolder()[0];
-
-    // uncompress
-    values.uncompress(compressionModel.getConvertedDataType()[0], data,
-        copyPoint, measureColumnChunk.data_page_length, compressionModel.getMantissa()[0],
-            compressionModel.getMaxValue()[0]);
-
-    CarbonReadDataHolder measureDataHolder = new CarbonReadDataHolder(values);
-
-    // set the data chunk
-    datChunk.setMeasureDataHolder(measureDataHolder);
-
-    // set the enun value indexes
-    datChunk.setNullValueIndexHolder(getPresenceMeta(measureColumnChunk.presence));
-    return datChunk;
-  }
-
-  /**
-   * Below method will be used to read the dimension chunks in group. This is
-   * to enhance the IO performance. Will read the data from start index to end
-   * index(including)
-   *
-   * @param fileReader      stream used for reading
-   * @param startBlockIndex start block index
-   * @param endBlockIndex   end block index
-   * @return measure column chunk array
-   * @throws IOException
-   */
-  private MeasureColumnDataChunk[] readMeasureChunksInGroup(FileHolder fileReader,
-      int startBlockIndex, int endBlockIndex) throws IOException {
-    long currentMeasureOffset = measureColumnChunkOffsets.get(startBlockIndex);
-    byte[] data = fileReader.readByteArray(filePath, currentMeasureOffset,
-        (int) (measureColumnChunkOffsets.get(endBlockIndex + 1) - currentMeasureOffset));
-    MeasureColumnDataChunk[] dataChunks =
-        new MeasureColumnDataChunk[endBlockIndex - startBlockIndex + 1];
-    MeasureColumnDataChunk dataChunk = null;
-    int index = 0;
-    int copyPoint = 0;
-    DataChunk2 measureColumnChunk = null;
-    for (int i = startBlockIndex; i <= endBlockIndex; i++) {
-      dataChunk = new MeasureColumnDataChunk();
-      measureColumnChunk =
-          CarbonUtil.readDataChunk(data, copyPoint, measureColumnChunkLength.get(i));
-      copyPoint += measureColumnChunkLength.get(i);
-      List<ValueEncoderMeta> valueEncodeMeta = new ArrayList<>();
-      for (int j = 0; j < measureColumnChunk.getEncoder_meta().size(); j++) {
-        valueEncodeMeta.add(
-            CarbonUtil.deserializeEncoderMeta(measureColumnChunk.getEncoder_meta().get(j).array()));
-      }
-      WriterCompressModel compressionModel = CarbonUtil.getValueCompressionModel(valueEncodeMeta);
-
-      ValueCompressionHolder values = compressionModel.getValueCompressionHolder()[0];
-
-      // uncompress
-      values.uncompress(compressionModel.getConvertedDataType()[0], data, copyPoint,
-              measureColumnChunk.data_page_length, compressionModel.getMantissa()[0],
-              compressionModel.getMaxValue()[0]);
-
-      CarbonReadDataHolder measureDataHolder = new CarbonReadDataHolder(values);
-
-      copyPoint += measureColumnChunk.data_page_length;
-      // set the data chunk
-      dataChunk.setMeasureDataHolder(measureDataHolder);
-
-      // set the enun value indexes
-      dataChunk.setNullValueIndexHolder(getPresenceMeta(measureColumnChunk.presence));
-      dataChunks[index++] = dataChunk;
-    }
-    return dataChunks;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionChunkStoreFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionChunkStoreFactory.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionChunkStoreFactory.java
deleted file mode 100644
index d1df23c..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionChunkStoreFactory.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.store;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.safe.SafeFixedLengthDimensionDataChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.safe.SafeVariableLengthDimensionDataChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.unsafe.UnsafeFixedLengthDimensionDataChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.unsafe.UnsafeVariableLengthDimesionDataChunkStore;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.util.CarbonProperties;
-
-/**
- * Below class will be used to get the dimension store type
- */
-public class DimensionChunkStoreFactory {
-
-  /**
-   * store factory instance
-   */
-  public static final DimensionChunkStoreFactory INSTANCE = new DimensionChunkStoreFactory();
-
-  /**
-   * is unsafe
-   */
-  private static final boolean isUnsafe;
-
-  static {
-    isUnsafe = Boolean.parseBoolean(CarbonProperties.getInstance()
-        .getProperty(CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION,
-            CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION_DEFAULTVALUE));
-  }
-
-  private DimensionChunkStoreFactory() {
-
-  }
-
-  /**
-   * Below method will be used to get the dimension store type
-   *
-   * @param columnValueSize column value size
-   * @param isInvertedIndex is inverted index
-   * @param numberOfRows    number of rows
-   * @param totalSize       total size of data
-   * @param storeType       store type
-   * @return dimension store type
-   */
-  public DimensionDataChunkStore getDimensionChunkStore(int columnValueSize,
-      boolean isInvertedIndex, int numberOfRows, long totalSize, DimensionStoreType storeType) {
-
-    if (isUnsafe) {
-      if (storeType == DimensionStoreType.FIXEDLENGTH) {
-        return new UnsafeFixedLengthDimensionDataChunkStore(totalSize, columnValueSize,
-            isInvertedIndex, numberOfRows);
-      } else {
-        return new UnsafeVariableLengthDimesionDataChunkStore(totalSize, isInvertedIndex,
-            numberOfRows);
-      }
-
-    } else {
-      if (storeType == DimensionStoreType.FIXEDLENGTH) {
-        return new SafeFixedLengthDimensionDataChunkStore(isInvertedIndex, columnValueSize);
-      } else {
-        return new SafeVariableLengthDimensionDataChunkStore(isInvertedIndex, numberOfRows);
-      }
-    }
-  }
-
-  /**
-   * dimension store type enum
-   */
-  public enum DimensionStoreType {
-    FIXEDLENGTH, VARIABLELENGTH;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionDataChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionDataChunkStore.java
deleted file mode 100644
index a2741bb..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/DimensionDataChunkStore.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.store;
-
-/**
- * Interface responsibility is to store dimension data in memory.
- * storage can be on heap or offheap.
- */
-public interface DimensionDataChunkStore {
-
-  /**
-   * Below method will be used to put the rows and its metadata in offheap
-   *
-   * @param invertedIndex        inverted index to be stored
-   * @param invertedIndexReverse inverted index reverse to be stored
-   * @param data                 data to be stored
-   */
-  void putArray(int[] invertedIndex, int[] invertedIndexReverse, byte[] data);
-
-  /**
-   * Below method will be used to get the row
-   * based on row id passed
-   *
-   * @param index
-   * @return row
-   */
-  byte[] getRow(int rowId);
-
-  /**
-   * Below method will be used to fill the row values to buffer array
-   *
-   * @param rowId  row id of the data to be filled
-   * @param data   buffer in which data will be filled
-   * @param offset off the of the buffer
-   */
-  void fillRow(int rowId, byte[] buffer, int offset);
-
-  /**
-   * Below method will be used to get the inverted index
-   *
-   * @param rowId row id
-   * @return inverted index based on row id passed
-   */
-  int getInvertedIndex(int rowId);
-
-  /**
-   * Below method will be used to get the surrogate key of the
-   * based on the row id passed
-   *
-   * @param rowId row id
-   * @return surrogate key
-   */
-  int getSurrogate(int rowId);
-
-  /**
-   * @return size of each column value
-   */
-  int getColumnValueSize();
-
-  /**
-   * @return whether column was explicitly sorted or not
-   */
-  boolean isExplicitSorted();
-
-  /**
-   * Below method will be used to free the memory occupied by
-   * the column chunk
-   */
-  void freeMemory();
-
-  /**
-   * to compare the two byte array
-   *
-   * @param index        index of first byte array
-   * @param compareValue value of to be compared
-   * @return compare result
-   */
-  int compareTo(int index, byte[] compareValue);
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
deleted file mode 100644
index 87ef7b5..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.carbon.datastore.chunk.store;
-
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.safe.SafeByteMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.safe.SafeDoubleMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.safe.SafeIntMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.safe.SafeLongMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.safe.SafeShortMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.unsafe.UnsafeByteMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.unsafe.UnsafeDoubleMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.unsafe.UnsafeIntMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.unsafe.UnsafeLongMeasureChunkStore;
-import org.apache.carbondata.core.carbon.datastore.chunk.store.impl.unsafe.UnsafeShortMeasureChunkStore;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.ValueCompressionUtil.DataType;
-
-/**
- * Factory class for getting the measure store type
- */
-public class MeasureChunkStoreFactory {
-
-  /**
-   * instance type
-   */
-  public static final MeasureChunkStoreFactory INSTANCE = new MeasureChunkStoreFactory();
-
-  /**
-   * is unsafe
-   */
-  private static final boolean isUnsafe;
-
-  static {
-    isUnsafe = Boolean.parseBoolean(CarbonProperties.getInstance()
-        .getProperty(CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION,
-            CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION_DEFAULTVALUE));
-  }
-
-  private MeasureChunkStoreFactory() {
-  }
-
-  /**
-   * Below method will be used to get the measure data chunk store based on data type
-   *
-   * @param dataType     data type
-   * @param numberOfRows number of rows
-   * @return measure chunk store
-   */
-  public MeasureDataChunkStore getMeasureDataChunkStore(DataType dataType, int numberOfRows) {
-    if (!isUnsafe) {
-      switch (dataType) {
-        case DATA_BYTE:
-          return new SafeByteMeasureChunkStore(numberOfRows);
-        case DATA_SHORT:
-          return new SafeShortMeasureChunkStore(numberOfRows);
-        case DATA_INT:
-          return new SafeIntMeasureChunkStore(numberOfRows);
-        case DATA_LONG:
-          return new SafeLongMeasureChunkStore(numberOfRows);
-        case DATA_DOUBLE:
-          return new SafeDoubleMeasureChunkStore(numberOfRows);
-        default:
-          return new SafeDoubleMeasureChunkStore(numberOfRows);
-      }
-    } else {
-      switch (dataType) {
-        case DATA_BYTE:
-          return new UnsafeByteMeasureChunkStore(numberOfRows);
-        case DATA_SHORT:
-          return new UnsafeShortMeasureChunkStore(numberOfRows);
-        case DATA_INT:
-          return new UnsafeIntMeasureChunkStore(numberOfRows);
-        case DATA_LONG:
-          return new UnsafeLongMeasureChunkStore(numberOfRows);
-        case DATA_DOUBLE:
-          return new UnsafeDoubleMeasureChunkStore(numberOfRows);
-        default:
-          return new UnsafeDoubleMeasureChunkStore(numberOfRows);
-      }
-    }
-  }
-}


Mime
View raw message