carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kumarvisha...@apache.org
Subject carbondata git commit: [CARBONDATA-2958] Compaction with CarbonProperty 'carbon.enable.page.level.reader.in.compaction' enabled fails as Compressor is null
Date Mon, 24 Sep 2018 06:54:58 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 8320918e5 -> ed8564421


[CARBONDATA-2958] Compaction with CarbonProperty 'carbon.enable.page.level.reader.in.compaction'
enabled fails as Compressor is null

Problem:
When CarbonProperty 'carbon.enable.page.level.reader.in.compaction' is enabled, compaction
fails throwing Null Pointer Exception as compressor is Null
Solution:
Set compressor from pageMetaData

This closes #2745


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/ed856442
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/ed856442
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/ed856442

Branch: refs/heads/master
Commit: ed856442166a96d1b414336945fb1dbc1d514c4a
Parents: 8320918
Author: Indhumathi27 <indhumathim27@gmail.com>
Authored: Fri Sep 21 15:24:39 2018 +0530
Committer: kumarvishal09 <kumarvishal1802@gmail.com>
Committed: Mon Sep 24 12:24:28 2018 +0530

----------------------------------------------------------------------
 ...essedDimChunkFileBasedPageLevelReaderV3.java |  7 +++++++
 ...andardPartitionTableCompactionTestCase.scala | 22 ++++++++++++++++++++
 2 files changed, 29 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/ed856442/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
index e69984b..6efaf8a 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
@@ -23,8 +23,10 @@ import java.nio.ByteBuffer;
 import org.apache.carbondata.core.datastore.FileReader;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
+import org.apache.carbondata.core.datastore.compression.CompressorFactory;
 import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
+import org.apache.carbondata.core.util.CarbonMetadataUtil;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.format.DataChunk2;
 import org.apache.carbondata.format.DataChunk3;
@@ -146,6 +148,11 @@ public class CompressedDimChunkFileBasedPageLevelReaderV3
     DataChunk3 dataChunk3 = dimensionRawColumnChunk.getDataChunkV3();
 
     pageMetadata = dataChunk3.getData_chunk_list().get(pageNumber);
+
+    if (compressor == null) {
+      this.compressor = CompressorFactory.getInstance().getCompressor(
+          CarbonMetadataUtil.getCompressorNameFromChunkMeta(pageMetadata.getChunk_meta()));
+    }
     // calculating the start point of data
     // as buffer can contain multiple column data, start point will be datachunkoffset +
     // data chunk length + page offset

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ed856442/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCompactionTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCompactionTestCase.scala
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCompactionTestCase.scala
index 33e761f..23c2aa0 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCompactionTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCompactionTestCase.scala
@@ -16,6 +16,7 @@
  */
 package org.apache.carbondata.spark.testsuite.standardpartition
 
+import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
@@ -183,6 +184,27 @@ class StandardPartitionTableCompactionTestCase extends QueryTest with
BeforeAndA
     sql(s"""alter table compactionupdatepartition compact 'major'""").collect
   }
 
+  test("test compaction when 'carbon.enable.page.level.reader.in.compaction' is set to true")
{
+    sql("DROP TABLE IF EXISTS originTable")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_ENABLE_PAGE_LEVEL_READER_IN_COMPACTION, "true")
+    sql("create table originTable(a int, b string) stored by 'carbondata'")
+    sql("insert into originTable values(1,'abc')")
+    sql("insert into originTable values(1,'abc')")
+    sql("insert into originTable values(1,'abc')")
+    sql("insert into originTable values(1,'abc')")
+    sql("alter table originTable compact 'minor'")
+    checkAnswer(sql("select count(*) from originTable"), Seq(Row(4)))
+    sql("insert into originTable values(1,'abc')")
+    sql("insert into originTable values(1,'abc')")
+    sql("insert into originTable values(1,'abc')")
+    sql("insert into originTable values(1,'abc')")
+    sql("alter table originTable compact 'major'")
+    checkAnswer(sql("select count(*) from originTable"), Seq(Row(8)))
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_ENABLE_PAGE_LEVEL_READER_IN_COMPACTION, "false")
+  }
+
     override def afterAll = {
     dropTable
   }


Mime
View raw message