carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject [1/2] carbondata git commit: Query statistics issue in case of multiple blocklet and block
Date Wed, 17 May 2017 17:45:13 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 781d6fa73 -> 636f2730b


Query statistics issue in case of multiple blocklet and block


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/ded55071
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/ded55071
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/ded55071

Branch: refs/heads/master
Commit: ded550717e455f7ae2f179cef659fa4bbffc8788
Parents: 781d6fa
Author: akashrn5 <akashnilugal@gmail.com>
Authored: Wed May 17 15:06:52 2017 +0530
Committer: ravipesala <ravi.pesala@gmail.com>
Committed: Wed May 17 23:14:11 2017 +0530

----------------------------------------------------------------------
 .../carbondata/core/datastore/FileHolder.java   |  8 +++
 .../core/datastore/impl/DFSFileHolderImpl.java  | 21 +++++++-
 .../core/datastore/impl/FileHolderImpl.java     | 17 +++++++
 .../AbstractDetailQueryResultIterator.java      | 19 ++++++-
 .../scan/scanner/AbstractBlockletScanner.java   |  7 ---
 .../core/scan/scanner/impl/FilterScanner.java   | 53 +++++++++++++-------
 .../core/stats/QueryStatisticsConstants.java    |  5 +-
 .../core/stats/QueryStatisticsRecorderImpl.java |  8 ++-
 .../carbondata/core/util/BitSetGroup.java       | 13 ++++-
 9 files changed, 122 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java b/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
index 1b972bc..712e116 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
@@ -17,6 +17,7 @@
 
 package org.apache.carbondata.core.datastore;
 
+import java.io.DataInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
@@ -97,4 +98,11 @@ public interface FileHolder {
    * This method will be used to close all the streams currently present in the cache
    */
   void finish() throws IOException;
+
+  void setQueryId(String queryId);
+
+  String getQueryId();
+
+  DataInputStream getDataInputStream(String filePath, long offset) throws IOException;
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
index d14cff7..6e7a55b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
@@ -16,6 +16,8 @@
  */
 package org.apache.carbondata.core.datastore.impl;
 
+import java.io.BufferedInputStream;
+import java.io.DataInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.HashMap;
@@ -29,13 +31,15 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-
 public class DFSFileHolderImpl implements FileHolder {
   /**
    * cache to hold filename and its stream
    */
   private Map<String, FSDataInputStream> fileNameAndStreamCache;
 
+  private String queryId;
+
+
   public DFSFileHolderImpl() {
     this.fileNameAndStreamCache =
         new HashMap<String, FSDataInputStream>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
@@ -138,4 +142,19 @@ public class DFSFileHolderImpl implements FileHolder {
     byteBuffer.rewind();
     return byteBuffer;
   }
+
+  @Override public void setQueryId(String queryId) {
+    this.queryId = queryId;
+  }
+
+  @Override public String getQueryId() {
+    return queryId;
+  }
+
+  @Override public DataInputStream getDataInputStream(String filePath, long offset)
+      throws IOException {
+    FSDataInputStream fsDataInputStream = updateCache(filePath);
+    fsDataInputStream.seek(offset);
+    return new DataInputStream(new BufferedInputStream(fsDataInputStream, 1 * 1024 * 1024));
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
index 36b48f5..4471013 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
@@ -17,6 +17,8 @@
 
 package org.apache.carbondata.core.datastore.impl;
 
+import java.io.BufferedInputStream;
+import java.io.DataInputStream;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -34,6 +36,7 @@ public class FileHolderImpl implements FileHolder {
    * cache to hold filename and its stream
    */
   private Map<String, FileChannel> fileNameAndStreamCache;
+  private String queryId;
 
   /**
    * FileHolderImpl Constructor
@@ -203,4 +206,18 @@ public class FileHolderImpl implements FileHolder {
     return byteBuffer;
   }
 
+  @Override public void setQueryId(String queryId) {
+    this.queryId = queryId;
+  }
+
+  @Override public String getQueryId() {
+    return queryId;
+  }
+
+  @Override public DataInputStream getDataInputStream(String filePath, long offset)
+      throws IOException {
+    FileInputStream stream = new FileInputStream(filePath);
+    stream.skip(offset);
+    return new DataInputStream(new BufferedInputStream(stream));
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
index cdab4e4..ceef258 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
@@ -97,6 +97,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends
CarbonIterato
     this.blockExecutionInfos = infos;
     this.fileReader = FileFactory.getFileHolder(
         FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getStorePath()));
+    this.fileReader.setQueryId(queryModel.getQueryId());
     this.execService = execService;
     intialiseInfos();
     initQueryStatiticsModel();
@@ -148,7 +149,6 @@ public abstract class AbstractDetailQueryResultIterator<E> extends
CarbonIterato
     if (blockExecutionInfos.size() > 0) {
       BlockExecutionInfo executionInfo = blockExecutionInfos.get(0);
       blockExecutionInfos.remove(executionInfo);
-      queryStatisticsModel.setRecorder(recorder);
       return new DataBlockIteratorImpl(executionInfo, fileReader, batchSize, queryStatisticsModel,
           execService);
     }
@@ -157,24 +157,41 @@ public abstract class AbstractDetailQueryResultIterator<E> extends
CarbonIterato
 
   protected void initQueryStatiticsModel() {
     this.queryStatisticsModel = new QueryStatisticsModel();
+    this.queryStatisticsModel.setRecorder(recorder);
     QueryStatistic queryStatisticTotalBlocklet = new QueryStatistic();
     queryStatisticsModel.getStatisticsTypeAndObjMap()
         .put(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, queryStatisticTotalBlocklet);
+    queryStatisticsModel.getRecorder().recordStatistics(queryStatisticTotalBlocklet);
+
     QueryStatistic queryStatisticValidScanBlocklet = new QueryStatistic();
     queryStatisticsModel.getStatisticsTypeAndObjMap()
         .put(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM, queryStatisticValidScanBlocklet);
+    queryStatisticsModel.getRecorder().recordStatistics(queryStatisticValidScanBlocklet);
+
     QueryStatistic totalNumberOfPages = new QueryStatistic();
     queryStatisticsModel.getStatisticsTypeAndObjMap()
         .put(QueryStatisticsConstants.TOTAL_PAGE_SCANNED, totalNumberOfPages);
+    queryStatisticsModel.getRecorder().recordStatistics(totalNumberOfPages);
+
     QueryStatistic validPages = new QueryStatistic();
     queryStatisticsModel.getStatisticsTypeAndObjMap()
         .put(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages);
+    queryStatisticsModel.getRecorder().recordStatistics(validPages);
+
+    QueryStatistic scannedPages = new QueryStatistic();
+    queryStatisticsModel.getStatisticsTypeAndObjMap()
+        .put(QueryStatisticsConstants.PAGE_SCANNED, scannedPages);
+    queryStatisticsModel.getRecorder().recordStatistics(scannedPages);
+
     QueryStatistic scanTime = new QueryStatistic();
     queryStatisticsModel.getStatisticsTypeAndObjMap()
         .put(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime);
+    queryStatisticsModel.getRecorder().recordStatistics(scanTime);
+
     QueryStatistic readTime = new QueryStatistic();
     queryStatisticsModel.getStatisticsTypeAndObjMap()
         .put(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime);
+    queryStatisticsModel.getRecorder().recordStatistics(readTime);
   }
 
   public void processNextBatch(CarbonColumnarBatch columnarBatch) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
b/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
index e8bfc74..0fb9782 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
@@ -60,26 +60,21 @@ public abstract class AbstractBlockletScanner implements BlockletScanner
{
         .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM);
     totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM,
         totalBlockletStatistic.getCount() + 1);
-    queryStatisticsModel.getRecorder().recordStatistics(totalBlockletStatistic);
     QueryStatistic validScannedBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap()
         .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM);
     validScannedBlockletStatistic
         .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM,
             validScannedBlockletStatistic.getCount() + 1);
-    queryStatisticsModel.getRecorder().recordStatistics(validScannedBlockletStatistic);
     // adding statistics for valid number of pages
     QueryStatistic validPages = queryStatisticsModel.getStatisticsTypeAndObjMap()
         .get(QueryStatisticsConstants.VALID_PAGE_SCANNED);
     validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED,
         validPages.getCount() + blocksChunkHolder.getDataBlock().numberOfPages());
-    queryStatisticsModel.getRecorder().recordStatistics(validPages);
     // adding statistics for number of pages
     QueryStatistic totalPagesScanned = queryStatisticsModel.getStatisticsTypeAndObjMap()
         .get(QueryStatisticsConstants.TOTAL_PAGE_SCANNED);
     totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED,
         totalPagesScanned.getCount() + blocksChunkHolder.getDataBlock().numberOfPages());
-    queryStatisticsModel.getRecorder().recordStatistics(totalPagesScanned);
-
     scannedResult.setBlockletId(
         blockExecutionInfo.getBlockId() + CarbonCommonConstants.FILE_SEPARATOR + blocksChunkHolder
             .getDataBlock().nodeNumber());
@@ -132,7 +127,6 @@ public abstract class AbstractBlockletScanner implements BlockletScanner
{
         .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);
     scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
         scanTime.getCount() + (System.currentTimeMillis() - startTime));
-    queryStatisticsModel.getRecorder().recordStatistics(scanTime);
     return scannedResult;
   }
 
@@ -151,7 +145,6 @@ public abstract class AbstractBlockletScanner implements BlockletScanner
{
         .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
     readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME,
         readTime.getCount() + (System.currentTimeMillis() - startTime));
-    queryStatisticsModel.getRecorder().recordStatistics(readTime);
   }
 
   @Override public AbstractScannedResult createEmptyResult() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
index 86a2e8b..a224687 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
@@ -98,7 +98,6 @@ public class FilterScanner extends AbstractBlockletScanner {
         .get(QueryStatisticsConstants.TOTAL_PAGE_SCANNED);
     totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED,
         totalPagesScanned.getCount() + blocksChunkHolder.getDataBlock().numberOfPages());
-    queryStatisticsModel.getRecorder().recordStatistics(totalPagesScanned);
     // apply min max
     if (isMinMaxEnabled) {
       BitSet bitSet = this.filterExecuter
@@ -121,7 +120,6 @@ public class FilterScanner extends AbstractBlockletScanner {
         .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
     readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME,
         readTime.getCount() + (System.currentTimeMillis() - startTime));
-    queryStatisticsModel.getRecorder().recordStatistics(readTime);
   }
 
   /**
@@ -143,12 +141,26 @@ public class FilterScanner extends AbstractBlockletScanner {
   private AbstractScannedResult fillScannedResult(BlocksChunkHolder blocksChunkHolder)
       throws FilterUnsupportedException, IOException {
     long startTime = System.currentTimeMillis();
+    QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap()
+        .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM);
+    totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM,
+        totalBlockletStatistic.getCount() + 1);
     // apply filter on actual data
     BitSetGroup bitSetGroup = this.filterExecuter.applyFilter(blocksChunkHolder);
     // if indexes is empty then return with empty result
     if (bitSetGroup.isEmpty()) {
       CarbonUtil.freeMemory(blocksChunkHolder.getDimensionRawDataChunk(),
           blocksChunkHolder.getMeasureRawDataChunk());
+
+      QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
+          .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);
+      scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
+          scanTime.getCount() + (System.currentTimeMillis() - startTime));
+
+      QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap()
+          .get(QueryStatisticsConstants.PAGE_SCANNED);
+      scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED,
+          scannedPages.getCount() + bitSetGroup.getScannedPages());
       return createEmptyResult();
     }
 
@@ -162,18 +174,15 @@ public class FilterScanner extends AbstractBlockletScanner {
     validScannedBlockletStatistic
         .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM,
             validScannedBlockletStatistic.getCount() + 1);
-    queryStatisticsModel.getRecorder().recordStatistics(validScannedBlockletStatistic);
     // adding statistics for valid number of pages
     QueryStatistic validPages = queryStatisticsModel.getStatisticsTypeAndObjMap()
         .get(QueryStatisticsConstants.VALID_PAGE_SCANNED);
     validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED,
         validPages.getCount() + bitSetGroup.getValidPages());
-    queryStatisticsModel.getRecorder().recordStatistics(validPages);
-    QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap()
-        .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM);
-    totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM,
-        totalBlockletStatistic.getCount() + 1);
-    queryStatisticsModel.getRecorder().recordStatistics(totalBlockletStatistic);
+    QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap()
+        .get(QueryStatisticsConstants.PAGE_SCANNED);
+    scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED,
+        scannedPages.getCount() + bitSetGroup.getScannedPages());
     int[] rowCount = new int[bitSetGroup.getNumberOfPages()];
     // get the row indexes from bot set
     int[][] indexesGroup = new int[bitSetGroup.getNumberOfPages()][];
@@ -199,8 +208,11 @@ public class FilterScanner extends AbstractBlockletScanner {
     FileHolder fileReader = blocksChunkHolder.getFileReader();
     int[][] allSelectedDimensionBlocksIndexes =
         blockExecutionInfo.getAllSelectedDimensionBlocksIndexes();
+
+    long dimensionReadTime = System.currentTimeMillis();
     DimensionRawColumnChunk[] projectionListDimensionChunk = blocksChunkHolder.getDataBlock()
         .getDimensionChunks(fileReader, allSelectedDimensionBlocksIndexes);
+    dimensionReadTime = System.currentTimeMillis() - dimensionReadTime;
 
     DimensionRawColumnChunk[] dimensionRawColumnChunks =
         new DimensionRawColumnChunk[blockExecutionInfo.getTotalNumberDimensionBlock()];
@@ -216,8 +228,8 @@ public class FilterScanner extends AbstractBlockletScanner {
         dimensionRawColumnChunks[j] = projectionListDimensionChunk[j];
       }
     }
-
-    /*
+    long dimensionReadTime1 = System.currentTimeMillis();
+    /**
      * in case projection if the projected dimension are not loaded in the dimensionColumnDataChunk
      * then loading them
      */
@@ -230,12 +242,15 @@ public class FilterScanner extends AbstractBlockletScanner {
                 .getDimensionChunk(fileReader, projectionListDimensionIndexes[i]);
       }
     }
+    dimensionReadTime += (System.currentTimeMillis() - dimensionReadTime1);
+    dimensionReadTime1 = System.currentTimeMillis();
     MeasureRawColumnChunk[] measureRawColumnChunks =
         new MeasureRawColumnChunk[blockExecutionInfo.getTotalNumberOfMeasureBlock()];
     int[][] allSelectedMeasureBlocksIndexes =
         blockExecutionInfo.getAllSelectedMeasureBlocksIndexes();
     MeasureRawColumnChunk[] projectionListMeasureChunk = blocksChunkHolder.getDataBlock()
         .getMeasureChunks(fileReader, allSelectedMeasureBlocksIndexes);
+    dimensionReadTime += System.currentTimeMillis() - dimensionReadTime1;
     // read the measure chunk blocks which is not present
     for (int i = 0; i < measureRawColumnChunks.length; i++) {
       if (null != blocksChunkHolder.getMeasureRawDataChunk()[i]) {
@@ -248,9 +263,10 @@ public class FilterScanner extends AbstractBlockletScanner {
         measureRawColumnChunks[j] = projectionListMeasureChunk[j];
       }
     }
-    /*
-      in case projection if the projected measure are not loaded in the measureColumnDataChunk
-      then loading them
+    dimensionReadTime1 = System.currentTimeMillis();
+    /**
+     * in case projection if the projected measure are not loaded in the measureColumnDataChunk
+     * then loading them
      */
     int[] projectionListMeasureIndexes = blockExecutionInfo.getProjectionListMeasureIndexes();
     int projectionListMeasureIndexesLength = projectionListMeasureIndexes.length;
@@ -260,6 +276,7 @@ public class FilterScanner extends AbstractBlockletScanner {
             .getMeasureChunk(fileReader, projectionListMeasureIndexes[i]);
       }
     }
+    dimensionReadTime += System.currentTimeMillis() - dimensionReadTime1;
     DimensionColumnDataChunk[][] dimensionColumnDataChunks =
         new DimensionColumnDataChunk[dimensionRawColumnChunks.length][indexesGroup.length];
     MeasureColumnDataChunk[][] measureColumnDataChunks =
@@ -287,9 +304,11 @@ public class FilterScanner extends AbstractBlockletScanner {
     QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
         .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);
     scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
-        scanTime.getCount() + (System.currentTimeMillis() - startTime));
-    queryStatisticsModel.getRecorder().recordStatistics(scanTime);
-
+        scanTime.getCount() + (System.currentTimeMillis() - startTime - dimensionReadTime));
+    QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
+        .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
+    readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME,
+        readTime.getCount() + dimensionReadTime);
     return scannedResult;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsConstants.java
b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsConstants.java
index c8fa4a1..c2cda7c 100644
--- a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsConstants.java
@@ -24,7 +24,8 @@ public interface QueryStatisticsConstants {
 
   String LOAD_META = "Time taken to load meta data In Driver Side";
 
-  String LOAD_BLOCKS_DRIVER = "Time taken to load the Block(s) In Driver Side";
+  String LOAD_BLOCKS_DRIVER = "Time taken to load the Block(s) In Driver Side "
+      + "with Block count ";
 
   String BLOCK_ALLOCATION = "Total Time taken in block(s) allocation";
 
@@ -55,6 +56,8 @@ public interface QueryStatisticsConstants {
 
   String TOTAL_PAGE_SCANNED = "The number of total page scanned";
 
+  String PAGE_SCANNED = "The number of page scanned";
+
   // clear no-use statistics timeout
   long CLEAR_STATISTICS_TIMEOUT = 60 * 1000 * 1000000L;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
index 16abaa4..f84a674 100644
--- a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
@@ -98,6 +98,7 @@ public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder,
Ser
     long valid_pages_blocklet = 0;
     long total_pages = 0;
     long readTime = 0;
+    long scannedPages = 0;
     try {
       for (QueryStatistic statistic : queryStatistics) {
         switch (statistic.getMessage()) {
@@ -134,6 +135,9 @@ public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder,
Ser
           case QueryStatisticsConstants.READ_BLOCKlET_TIME:
             readTime = statistic.getCount();
             break;
+          case QueryStatisticsConstants.PAGE_SCANNED:
+            scannedPages = statistic.getCount();
+            break;
           default:
             break;
         }
@@ -141,7 +145,7 @@ public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder,
Ser
       String headers =
           "task_id,load_blocks_time,load_dictionary_time,carbon_scan_time,carbon_IO_time,
"
               + "total_executor_time,scan_blocks_num,total_blocklets,"
-              + "valid_blocklets,total_pages,valid_pages,result_size";
+              + "valid_blocklets,total_pages,scanned_pages,valid_pages,result_size";
       List<String> values = new ArrayList<String>();
       values.add(queryIWthTask);
       values.add(load_blocks_time + "ms");
@@ -153,6 +157,7 @@ public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder,
Ser
       values.add(String.valueOf(total_blocklet));
       values.add(String.valueOf(valid_scan_blocklet));
       values.add(String.valueOf(total_pages));
+      values.add(String.valueOf(scannedPages));
       values.add(String.valueOf(valid_pages_blocklet));
       values.add(String.valueOf(result_size));
       StringBuilder tableInfo = new StringBuilder();
@@ -174,6 +179,7 @@ public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder,
Ser
       tableInfo.append(line).append("+").append("\n");
       return "Print query statistic for each task id:" + "\n" + tableInfo.toString();
     } catch (Exception ex) {
+      LOGGER.error(ex);
       return "Put statistics into table failed, catch exception: " + ex.getMessage();
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ded55071/core/src/main/java/org/apache/carbondata/core/util/BitSetGroup.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/BitSetGroup.java b/core/src/main/java/org/apache/carbondata/core/util/BitSetGroup.java
index 87cbe77..df2d788 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/BitSetGroup.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/BitSetGroup.java
@@ -87,7 +87,18 @@ public class BitSetGroup {
   public int getValidPages() {
     int numberOfPages = 0;
     for (int i = 0; i < bitSets.length; i++) {
-      numberOfPages += (bitSets[i] == null || bitSets[i].isEmpty()) ? 0 : 1;
+      numberOfPages += (bitSets[i] != null && !bitSets[i].isEmpty()) ? 1 : 0;
+    }
+    return numberOfPages;
+  }
+
+  /**
+   * @return return the valid pages
+   */
+  public int getScannedPages() {
+    int numberOfPages = 0;
+    for (int i = 0; i < bitSets.length; i++) {
+      numberOfPages += bitSets[i] == null ? 0 : 1;
     }
     return numberOfPages;
   }


Mime
View raw message