carbondata-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jackylk <...@git.apache.org>
Subject [GitHub] carbondata pull request #1657: [CARBONDATA-1895] Fix issue of create table i...
Date Fri, 15 Dec 2017 02:22:52 GMT
Github user jackylk commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1657#discussion_r157111874
  
    --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
---
    @@ -62,66 +62,66 @@ case class CarbonCreateTableCommand(
               s"Table [$tableName] already exists under database [$dbName]")
             throw new TableAlreadyExistsException(dbName, tableName)
           }
    -    }
    -
    -    val tablePath = tableLocation.getOrElse(
    -      CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession))
    -    tableInfo.setTablePath(tablePath)
    -    val tableIdentifier = AbsoluteTableIdentifier.from(tablePath, dbName, tableName)
    +    } else {
    +      val tablePath = tableLocation.getOrElse(
    +        CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession))
    +      tableInfo.setTablePath(tablePath)
    +      val tableIdentifier = AbsoluteTableIdentifier.from(tablePath, dbName, tableName)
     
    -    // Add validation for sort scope when create table
    -    val sortScope = tableInfo.getFactTable.getTableProperties.asScala
    -      .getOrElse("sort_scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
    -    if (!CarbonUtil.isValidSortOption(sortScope)) {
    -      throw new InvalidConfigurationException(
    -        s"Passing invalid SORT_SCOPE '$sortScope', valid SORT_SCOPE are 'NO_SORT', 'BATCH_SORT',"
+
    -        s" 'LOCAL_SORT' and 'GLOBAL_SORT' ")
    -    }
    +      // Add validation for sort scope when create table
    +      val sortScope = tableInfo.getFactTable.getTableProperties.asScala
    +        .getOrElse("sort_scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
    +      if (!CarbonUtil.isValidSortOption(sortScope)) {
    +        throw new InvalidConfigurationException(
    +          s"Passing invalid SORT_SCOPE '$sortScope', valid SORT_SCOPE are 'NO_SORT',"
+
    +          s" 'BATCH_SORT', 'LOCAL_SORT' and 'GLOBAL_SORT' ")
    +      }
     
    -    if (tableInfo.getFactTable.getListOfColumns.size <= 0) {
    -      CarbonException.analysisException("Table should have at least one column.")
    -    }
    +      if (tableInfo.getFactTable.getListOfColumns.size <= 0) {
    +        CarbonException.analysisException("Table should have at least one column.")
    +      }
     
    -    val operationContext = new OperationContext
    -    val createTablePreExecutionEvent: CreateTablePreExecutionEvent =
    -      CreateTablePreExecutionEvent(sparkSession, tableIdentifier, Some(tableInfo))
    -    OperationListenerBus.getInstance.fireEvent(createTablePreExecutionEvent, operationContext)
    -    val catalog = CarbonEnv.getInstance(sparkSession).carbonMetastore
    -    val carbonSchemaString = catalog.generateTableSchemaString(tableInfo, tableIdentifier)
    -    if (createDSTable) {
    -      try {
    -        val tablePath = tableIdentifier.getTablePath
    -        val carbonRelation = CarbonSparkUtil.createCarbonRelation(tableInfo, tablePath)
    -        val rawSchema = CarbonSparkUtil.getRawSchema(carbonRelation)
    -        sparkSession.sparkContext.setLocalProperty(EXECUTION_ID_KEY, null)
    -        sparkSession.sql(
    -          s"""CREATE TABLE $dbName.$tableName
    -             |(${ rawSchema })
    -             |USING org.apache.spark.sql.CarbonSource
    -             |OPTIONS (
    -             |  tableName "$tableName",
    -             |  dbName "$dbName",
    -             |  tablePath "$tablePath",
    -             |  path "$tablePath"
    -             |  $carbonSchemaString)
    -             """.stripMargin)
    -      } catch {
    -        case e: AnalysisException => throw e
    -        case e: Exception =>
    -          // call the drop table to delete the created table.
    -          CarbonEnv.getInstance(sparkSession).carbonMetastore
    -            .dropTable(tableIdentifier)(sparkSession)
    +      val operationContext = new OperationContext
    +      val createTablePreExecutionEvent: CreateTablePreExecutionEvent =
    +        CreateTablePreExecutionEvent(sparkSession, tableIdentifier, Some(tableInfo))
    +      OperationListenerBus.getInstance.fireEvent(createTablePreExecutionEvent, operationContext)
    +      val catalog = CarbonEnv.getInstance(sparkSession).carbonMetastore
    +      val carbonSchemaString = catalog.generateTableSchemaString(tableInfo, tableIdentifier)
    +      if (createDSTable) {
    +        try {
    +          val tablePath = tableIdentifier.getTablePath
    +          val carbonRelation = CarbonSparkUtil.createCarbonRelation(tableInfo, tablePath)
    +          val rawSchema = CarbonSparkUtil.getRawSchema(carbonRelation)
    +          sparkSession.sparkContext.setLocalProperty(EXECUTION_ID_KEY, null)
    +          sparkSession.sql(
    +            s"""CREATE TABLE $dbName.$tableName
    +               |(${ rawSchema })
    +               |USING org.apache.spark.sql.CarbonSource
    +               |OPTIONS (
    +               |  tableName "$tableName",
    +               |  dbName "$dbName",
    +               |  tablePath "$tablePath",
    +               |  path "$tablePath"
    +               |  $carbonSchemaString)
    +             """.stripMargin).collect()
    --- End diff --
    
    collect is not required


---

Mime
View raw message