carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [10/15] carbondata git commit: [CARBONDATA-1311] Added carbon storelocation to spark warehouse. And extracted storelocation out of metastore
Date Tue, 25 Jul 2017 17:37:47 GMT
http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
index ab27b4f..bfe1276 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
@@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonTableIdentifier}
-import org.apache.carbondata.core.metadata.schema.table
+import org.apache.carbondata.core.metadata.schema
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
@@ -50,17 +50,6 @@ trait CarbonMetaStore {
       absIdentifier: AbsoluteTableIdentifier,
       sparkSession: SparkSession): CarbonRelation
 
-  /**
-   * Get table meta
-   * TODO remove it if possible
-   * @param database
-   * @param tableName
-   * @param readStore
-   * @return
-   */
-  def getTableFromMetadata(database: String,
-      tableName: String,
-      readStore: Boolean = false): Option[TableMeta]
 
   def tableExists(
     table: String,
@@ -68,8 +57,6 @@ trait CarbonMetaStore {
 
   def tableExists(tableIdentifier: TableIdentifier)(sparkSession: SparkSession): Boolean
 
-  def loadMetadata(metadataPath: String, queryId: String): MetaData
-
   /**
    * This method will overwrite the existing schema and update it with the given details
    *
@@ -90,23 +77,26 @@ trait CarbonMetaStore {
    *
    * @param carbonTableIdentifier
    * @param thriftTableInfo
-   * @param carbonStorePath
+   * @param tablePath
    * @param sparkSession
    */
   def revertTableSchema(carbonTableIdentifier: CarbonTableIdentifier,
       thriftTableInfo: org.apache.carbondata.format.TableInfo,
-      carbonStorePath: String)
+      tablePath: String)
     (sparkSession: SparkSession): String
 
   /**
-   *
-   * Prepare Thrift Schema from wrapper TableInfo and write to Schema file.
-   * Load CarbonTable from wrapper tableInfo
-   *
+   * Prepare Thrift Schema from wrapper TableInfo and write to disk
    */
-  def createTableFromThrift(tableInfo: table.TableInfo,
-      dbName: String,
-      tableName: String)(sparkSession: SparkSession): (String, String)
+  def saveToDisk(tableInfo: schema.table.TableInfo, tablePath: String)
+
+  /**
+   * Generates schema string to save it in hive metastore
+   * @param tableInfo
+   * @return
+   */
+  def generateTableSchemaString(tableInfo: schema.table.TableInfo,
+      tablePath: String): String
 
   /**
    * This method will remove the table meta from catalog metadata array
@@ -117,25 +107,25 @@ trait CarbonMetaStore {
   def removeTableFromMetadata(dbName: String, tableName: String): Unit
 
   def updateMetadataByThriftTable(schemaFilePath: String,
-      tableInfo: TableInfo, dbName: String, tableName: String, storePath: String): Unit
+      tableInfo: TableInfo, dbName: String, tableName: String, tablePath: String): Unit
 
   def isTablePathExists(tableIdentifier: TableIdentifier)(sparkSession: SparkSession): Boolean
 
-  def dropTable(tableStorePath: String, tableIdentifier: TableIdentifier)
+  def dropTable(tablePath: String, tableIdentifier: TableIdentifier)
     (sparkSession: SparkSession)
 
-  def updateAndTouchSchemasUpdatedTime(databaseName: String, tableName: String)
+  def updateAndTouchSchemasUpdatedTime(basePath: String)
 
-  def checkSchemasModifiedTimeAndReloadTables()
+  def checkSchemasModifiedTimeAndReloadTables(storePath: String)
 
   def isReadFromHiveMetaStore : Boolean
 
   def listAllTables(sparkSession: SparkSession): Seq[CarbonTable]
 
-  def storePath: String
-
   def getThriftTableInfo(tablePath: CarbonTablePath)(sparkSession: SparkSession): TableInfo
 
+  def getTableFromMetadataCache(database: String, tableName: String): Option[TableMeta]
+
 }
 
 /**
@@ -143,12 +133,12 @@ trait CarbonMetaStore {
  */
 object CarbonMetaStoreFactory {
 
-  def createCarbonMetaStore(conf: RuntimeConfig, storePath: String): CarbonMetaStore = {
+  def createCarbonMetaStore(conf: RuntimeConfig): CarbonMetaStore = {
     val readSchemaFromHiveMetaStore = readSchemaFromHive(conf)
     if (readSchemaFromHiveMetaStore) {
-      new CarbonHiveMetaStore(conf, storePath)
+      new CarbonHiveMetaStore(conf)
     } else {
-      new CarbonFileMetastore(conf, storePath)
+      new CarbonFileMetastore(conf)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
index 4aef118..99a935b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
@@ -94,16 +94,17 @@ class CarbonSessionCatalog(
   private def refreshRelationFromCache(name: TableIdentifier,
       alias: Option[String],
       carbonDatasourceHadoopRelation: CarbonDatasourceHadoopRelation): LogicalPlan = {
-    carbonEnv.carbonMetastore.checkSchemasModifiedTimeAndReloadTables
+    carbonEnv.carbonMetastore.
+      checkSchemasModifiedTimeAndReloadTables(CarbonEnv.getInstance(sparkSession).storePath)
     carbonEnv.carbonMetastore
-      .getTableFromMetadata(carbonDatasourceHadoopRelation.carbonTable.getDatabaseName,
+      .getTableFromMetadataCache(carbonDatasourceHadoopRelation.carbonTable.getDatabaseName,
         carbonDatasourceHadoopRelation.carbonTable.getFactTableName) match {
       case tableMeta: TableMeta =>
         if (tableMeta.carbonTable.getTableLastUpdatedTime !=
             carbonDatasourceHadoopRelation.carbonTable.getTableLastUpdatedTime) {
           refreshTable(name)
         }
-      case _ => refreshTable(name)
+      case _ =>
     }
     super.lookupRelation(name, alias)
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
index e94b6ed..f764882 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
@@ -52,7 +52,7 @@ object Spark2TestQueryExecutor {
     .appName("Spark2TestQueryExecutor")
     .enableHiveSupport()
     .config("spark.sql.warehouse.dir", TestQueryExecutor.warehouse)
-    .getOrCreateCarbonSession(TestQueryExecutor.storeLocation, TestQueryExecutor.metastoredb)
+    .getOrCreateCarbonSession(null, TestQueryExecutor.metastoredb)
   spark.sparkContext.setLogLevel("ERROR")
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
index 01bdc4f..74f4dd0 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
@@ -136,8 +136,9 @@ object AlterTableUtil {
         carbonTable.getCarbonTableIdentifier,
         thriftTable,
         schemaEvolutionEntry,
-        carbonTable.getStorePath)(sparkSession)
+        carbonTable.getAbsoluteTableIdentifier.getTablePath)(sparkSession)
     val tableIdentifier = TableIdentifier(tableName, Some(dbName))
+    sparkSession.catalog.refreshTable(tableIdentifier.quotedString)
     val schema = CarbonEnv.getInstance(sparkSession).carbonMetastore
       .lookupRelation(tableIdentifier)(sparkSession).schema.json
     val schemaParts = prepareSchemaJsonForAlterTable(sparkSession.sparkContext.getConf, schema)
@@ -206,7 +207,8 @@ object AlterTableUtil {
           .renameForce(carbonTablePath.getParent.toString + CarbonCommonConstants.FILE_SEPARATOR
+
                        oldTableIdentifier.table)
         val tableIdentifier = new CarbonTableIdentifier(database, oldTableIdentifier.table,
tableId)
-        metastore.revertTableSchema(tableIdentifier, tableInfo, storePath)(sparkSession)
+        metastore.revertTableSchema(tableIdentifier,
+          tableInfo, carbonTablePath.getPath)(sparkSession)
         metastore.removeTableFromMetadata(database, newTableName)
       }
     }
@@ -238,7 +240,7 @@ object AlterTableUtil {
       thriftTable.fact_table.table_columns.removeAll(addedSchemas)
       metastore
         .revertTableSchema(carbonTable.getCarbonTableIdentifier,
-          thriftTable, carbonTable.getStorePath)(sparkSession)
+          thriftTable, carbonTable.getAbsoluteTableIdentifier.getTablePath)(sparkSession)
     }
   }
 
@@ -273,7 +275,7 @@ object AlterTableUtil {
       }
       metastore
         .revertTableSchema(carbonTable.getCarbonTableIdentifier,
-          thriftTable, carbonTable.getStorePath)(sparkSession)
+          thriftTable, carbonTable.getAbsoluteTableIdentifier.getTablePath)(sparkSession)
     }
   }
 
@@ -311,7 +313,7 @@ object AlterTableUtil {
       }
       metastore
         .revertTableSchema(carbonTable.getCarbonTableIdentifier,
-          thriftTable, carbonTable.getStorePath)(sparkSession)
+          thriftTable, carbonTable.getAbsoluteTableIdentifier.getTablePath)(sparkSession)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala b/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
index 645081f..df73641 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
@@ -47,7 +47,8 @@ object CleanFiles {
     val storePath = TableAPIUtil.escape(args(0))
     val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1)))
     val spark = TableAPIUtil.spark(storePath, s"CleanFiles: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.checkSchemasModifiedTimeAndReloadTables()
+    CarbonEnv.getInstance(spark).carbonMetastore.
+      checkSchemasModifiedTimeAndReloadTables(CarbonEnv.getInstance(spark).storePath)
     cleanFiles(spark, dbName, tableName, storePath)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala b/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
index d78fd5f..d00cb84 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
@@ -56,7 +56,8 @@ object Compaction {
     val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1)))
     val compactionType = TableAPIUtil.escape(args(2))
     val spark = TableAPIUtil.spark(storePath, s"Compaction: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.checkSchemasModifiedTimeAndReloadTables()
+    CarbonEnv.getInstance(spark).carbonMetastore.
+      checkSchemasModifiedTimeAndReloadTables(CarbonEnv.getInstance(spark).storePath)
     compaction(spark, dbName, tableName, compactionType)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
index f67a5ce..4aaec8f 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
@@ -47,7 +47,8 @@ object DeleteSegmentByDate {
     val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1)))
     val dateValue = TableAPIUtil.escape(args(2))
     val spark = TableAPIUtil.spark(storePath, s"DeleteSegmentByDate: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.checkSchemasModifiedTimeAndReloadTables()
+    CarbonEnv.getInstance(spark).carbonMetastore.
+      checkSchemasModifiedTimeAndReloadTables(CarbonEnv.getInstance(spark).storePath)
     deleteSegmentByDate(spark, dbName, tableName, dateValue)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
index bbf386e..c86c7f5 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
@@ -52,7 +52,8 @@ object DeleteSegmentById {
     val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1)))
     val segmentIds = extractSegmentIds(TableAPIUtil.escape(args(2)))
     val spark = TableAPIUtil.spark(storePath, s"DeleteSegmentById: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.checkSchemasModifiedTimeAndReloadTables()
+    CarbonEnv.getInstance(spark).carbonMetastore.
+      checkSchemasModifiedTimeAndReloadTables(CarbonEnv.getInstance(spark).storePath)
     deleteSegmentById(spark, dbName, tableName, segmentIds)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala b/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
index d5788ba..19d7dce 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
@@ -78,7 +78,8 @@ object ShowSegments {
       None
     }
     val spark = TableAPIUtil.spark(storePath, s"ShowSegments: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.checkSchemasModifiedTimeAndReloadTables()
+    CarbonEnv.getInstance(spark).carbonMetastore.
+      checkSchemasModifiedTimeAndReloadTables(CarbonEnv.getInstance(spark).storePath)
     val rows = showSegments(spark, dbName, tableName, limit)
     System.out.println(showString(rows))
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala b/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
index 82d8da2..8adaf00 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
@@ -80,7 +80,8 @@ object TableLoader {
 
     val spark = TableAPIUtil.spark(storePath, s"TableLoader: $dbName.$tableName")
 
-    CarbonEnv.getInstance(spark).carbonMetastore.checkSchemasModifiedTimeAndReloadTables()
+    CarbonEnv.getInstance(spark).carbonMetastore.
+      checkSchemasModifiedTimeAndReloadTables(CarbonEnv.getInstance(spark).storePath)
     loadTable(spark, Option(dbName), tableName, inputPaths, map)
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
index a456b6d..233c03f 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
@@ -26,7 +26,8 @@ import org.apache.spark.sql.test.TestQueryExecutor
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.api.CarbonStore
-import org.apache.carbondata.core.util.CarbonUtil
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
 
 class CarbonCommandSuite extends Spark2QueryTest with BeforeAndAfterAll {
 
@@ -93,32 +94,34 @@ class CarbonCommandSuite extends Spark2QueryTest with BeforeAndAfterAll
{
     dropTable("carbon_table")
   }
 
+  private lazy val location =
+    CarbonProperties.getInstance().getProperty(CarbonCommonConstants.STORE_LOCATION)
   test("show segment") {
-    ShowSegments.main(Array(s"${CarbonUtil.getCarbonStorePath}", "carbon_table"))
+    ShowSegments.main(Array(s"${location}", "carbon_table"))
   }
 
   test("delete segment by id") {
-    DeleteSegmentById.main(Array(s"${CarbonUtil.getCarbonStorePath}", "carbon_table", "0"))
-    assert(!CarbonStore.isSegmentValid("default", "carbon_table", "0"))
+    DeleteSegmentById.main(Array(s"${location}", "carbon_table", "0"))
+    assert(!CarbonStore.isSegmentValid("default", "carbon_table",location,  "0"))
   }
 
   test("delete segment by date") {
     createAndLoadTestTable("carbon_table2", "csv_table")
     val time = new Timestamp(new Date().getTime)
-    DeleteSegmentByDate.main(Array(s"${CarbonUtil.getCarbonStorePath}", "carbon_table2",
time.toString))
-    assert(!CarbonStore.isSegmentValid("default", "carbon_table2", "0"))
+    DeleteSegmentByDate.main(Array(s"${location}", "carbon_table2", time.toString))
+    assert(!CarbonStore.isSegmentValid("default", "carbon_table2", location, "0"))
     dropTable("carbon_table2")
   }
 
   test("clean files") {
     val table = "carbon_table3"
     createAndLoadTestTable(table, "csv_table")
-    ShowSegments.main(Array(s"${CarbonUtil.getCarbonStorePath}", table))
-    DeleteSegmentById.main(Array(s"${CarbonUtil.getCarbonStorePath}", table, "0"))
-    ShowSegments.main(Array(s"${CarbonUtil.getCarbonStorePath}", table))
-    CleanFiles.main(Array(s"${CarbonUtil.getCarbonStorePath}", table))
-    ShowSegments.main(Array(s"${CarbonUtil.getCarbonStorePath}", table))
-    val tablePath = s"${CarbonUtil.getCarbonStorePath}${File.separator}default${File.separator}$table"
+    ShowSegments.main(Array(s"${location}", table))
+    DeleteSegmentById.main(Array(s"${location}", table, "0"))
+    ShowSegments.main(Array(s"${location}", table))
+    CleanFiles.main(Array(s"${location}", table))
+    ShowSegments.main(Array(s"${location}", table))
+    val tablePath = s"${location}${File.separator}default${File.separator}$table"
     val f = new File(s"$tablePath/Fact/Part0")
     assert(f.isDirectory)
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/processing/src/main/java/org/apache/carbondata/processing/merger/TableMeta.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/TableMeta.java
b/processing/src/main/java/org/apache/carbondata/processing/merger/TableMeta.java
index b84d695..09dbfff 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/TableMeta.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/TableMeta.java
@@ -29,11 +29,13 @@ public class TableMeta implements Serializable {
   public CarbonTableIdentifier carbonTableIdentifier;
   public String storePath;
   public CarbonTable carbonTable;
+  public String tablePath;
 
-  public TableMeta(CarbonTableIdentifier carbonTableIdentifier, String storePath,
+  public TableMeta(CarbonTableIdentifier carbonTableIdentifier, String storePath, String
tablePath,
       CarbonTable carbonTable) {
     this.carbonTableIdentifier = carbonTableIdentifier;
     this.storePath = storePath;
+    this.tablePath = tablePath;
     this.carbonTable = carbonTable;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c3b26663/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
index 62f13db..17f4df6 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
@@ -173,8 +173,10 @@ public final class CarbonDataProcessorUtil {
       String taskId, String partitionId, String segmentId, boolean isCompactionFlow) {
     String tempLocationKey =
         getTempStoreLocationKey(databaseName, tableName, taskId, isCompactionFlow);
-    String baseStorePath = CarbonProperties.getInstance()
-        .getProperty(tempLocationKey, CarbonCommonConstants.STORE_LOCATION_DEFAULT_VAL);
+    String baseStorePath = CarbonProperties.getInstance().getProperty(tempLocationKey);
+    if (baseStorePath == null) {
+      LOGGER.warn("Location not set for the key " + tempLocationKey);
+    }
     CarbonTable carbonTable = CarbonMetadata.getInstance()
         .getCarbonTable(databaseName + CarbonCommonConstants.UNDERSCORE + tableName);
     CarbonTablePath carbonTablePath =


Mime
View raw message