trafodion-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ansha...@apache.org
Subject [1/2] trafodion git commit: TRAFODION-3086 DDL on Hive objs: use database and msck support
Date Tue, 03 Jul 2018 22:22:11 GMT
Repository: trafodion
Updated Branches:
  refs/heads/master be43908cb -> 7ee5b250b


TRAFODION-3086 DDL on Hive objs: use database and msck support

-- traf "set schema hive.<sch>" now behaves the same as Hive "use <db>".
   1) <sch> is validated to exist at set schema time.
   2) <sch> is set in Hive environment for that session. Any unqualified
     object reference in ddl uses that default schema (database)

-- added support for msck (meta store check) command.
   This command repairs database by refreshing untracked files/partitions
   in hive metadata.


Project: http://git-wip-us.apache.org/repos/asf/trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/trafodion/commit/f5885e4f
Tree: http://git-wip-us.apache.org/repos/asf/trafodion/tree/f5885e4f
Diff: http://git-wip-us.apache.org/repos/asf/trafodion/diff/f5885e4f

Branch: refs/heads/master
Commit: f5885e4fb34a90e99adcf7ec841e70f85e70191a
Parents: 0ebd76e
Author: Anoop Sharma <anoop.sharma@esgyn.com>
Authored: Tue Jul 3 17:54:33 2018 +0000
Committer: Anoop Sharma <anoop.sharma@esgyn.com>
Committed: Tue Jul 3 17:54:33 2018 +0000

----------------------------------------------------------------------
 core/sql/comexe/ComTdbControl.h                 | 14 +++-
 core/sql/executor/ex_control.cpp                | 48 ++++++++++++
 core/sql/generator/GenExplain.cpp               |  6 +-
 core/sql/generator/GenRelDCL.cpp                | 39 ++++++++--
 core/sql/optimizer/BindRelExpr.cpp              | 29 +++----
 core/sql/parser/ParKeyWords.cpp                 |  2 +
 core/sql/parser/StmtDDLonHiveObjects.h          | 20 +++--
 core/sql/parser/sqlparser.y                     | 39 ++++++++++
 core/sql/regress/hive/DIFF008.KNOWN             |  2 +-
 core/sql/regress/hive/EXPECTED008               | 79 +++++++++++++++++++-
 core/sql/regress/hive/TEST008                   | 25 ++++++-
 core/sql/sqlci/sqlci_lex.ll                     |  1 +
 core/sql/sqlci/sqlci_yacc.y                     |  2 +
 core/sql/sqlcomp/CmpSeabaseDDLtable.cpp         | 69 ++++++++++++++++-
 .../main/java/org/trafodion/sql/HiveClient.java | 38 ++++++----
 15 files changed, 366 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/comexe/ComTdbControl.h
----------------------------------------------------------------------
diff --git a/core/sql/comexe/ComTdbControl.h b/core/sql/comexe/ComTdbControl.h
index 9fe6029..474ade0 100644
--- a/core/sql/comexe/ComTdbControl.h
+++ b/core/sql/comexe/ComTdbControl.h
@@ -121,8 +121,19 @@ public:
   void setNonResettable(NABoolean nonResettable) 
                  { nonResettable_ = nonResettable; }
   NABoolean isNonResettable() { return (nonResettable_ != FALSE); } 
+
+  void setIsSetStmt(short v) {(v ? flags_ |= IS_SET_STMT : flags_ &= ~IS_SET_STMT); };
+  NABoolean isSetStmt() { return (flags_ & IS_SET_STMT) != 0; };
+
+  void setIsHiveSetSchema(short v) {(v ? flags_ |= IS_HIVE_SET_SCHEMA : flags_ &= ~IS_HIVE_SET_SCHEMA);
};
+  NABoolean isHiveSetSchema() { return (flags_ & IS_HIVE_SET_SCHEMA) != 0; };
  
 private:
+  enum
+    {
+      IS_SET_STMT        = 0x0001,
+      IS_HIVE_SET_SCHEMA = 0x0002
+    };
 
   // Remember, when putting tablename into one of these 3 char*'s,
   // to save its ANSI name; e.g.
@@ -141,7 +152,8 @@ private:
   Int16 sqlTextCharSet_;           // 36-37
   Int16 actionType_;               // 38-39
   Int16 nonResettable_;            // 40-41
-  char fillersComTdbControl_[30];  // 42-71
+  UInt16 flags_;                   // 42-43
+  char fillersComTdbControl_[28];  // 44-71
 
 };
 #endif

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/executor/ex_control.cpp
----------------------------------------------------------------------
diff --git a/core/sql/executor/ex_control.cpp b/core/sql/executor/ex_control.cpp
index d2792f7..f33d241 100644
--- a/core/sql/executor/ex_control.cpp
+++ b/core/sql/executor/ex_control.cpp
@@ -178,6 +178,54 @@ short ExControlTcb::work()
   char *dummyReply = NULL;
   ULng32 dummyLen;
   
+  // if this is SET SCHEMA stmt of a HIVE schema, check that the schema
+  // exists. This is to be consistent with USE <database> functionality
+  // of Hive where a schema must exist before USE stmt can be issued.
+  // An error is returned if it does not exist.
+  if ((controlTdb().isSetStmt()) &&
+      (controlTdb().isHiveSetSchema()))
+    {
+      // set schema hive.<sch> stmt
+      // Check that it exists in Hive.
+      ComSchemaName csn(value[2]);
+      NAString hiveDB = ComConvertTrafHiveNameToNativeHiveName
+        (csn.getCatalogNamePart().getInternalName(),
+         csn.getSchemaNamePart().getInternalName(),
+         NAString(""));
+
+      NAString useDB("use " + hiveDB);
+      if (HiveClient_JNI::executeHiveSQL(useDB.data()) != HVC_OK)
+        {
+          ComDiagsArea *da = 
+            ComDiagsArea::allocate(getGlobals()->getDefaultHeap());
+          if (NAString(getSqlJniErrorStr()).contains("Database does not exist:"))
+            *da << DgSqlCode(-1003)
+                << DgString0(HIVE_SYSTEM_CATALOG)
+                << DgString1(hiveDB);
+          else
+            *da << DgSqlCode(-1214)
+                << DgString0(getSqlJniErrorStr())
+                << DgString1(useDB.data());
+          
+          ExHandleArkcmpErrors(qparent_, pentry_down, 0,
+                               getGlobals(), da);
+
+          ex_queue_entry * up_entry = qparent_.up->getTailEntry();
+          
+          up_entry->upState.parentIndex = 
+            pentry_down->downState.parentIndex;
+          
+          up_entry->upState.setMatchNo(0);
+          up_entry->upState.status = ex_queue::Q_NO_DATA;
+          
+          // insert into parent
+          qparent_.up->insert();
+          
+          qparent_.down->removeHead();
+          
+          return WORK_OK;
+        }      
+    }
 
   // Only a STATIC compile will actually affect Arkcmp's context.
   CmpCompileInfo c(buf, usedlen, sqlTextCharSet, NULL, 0, 0, 0);

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/generator/GenExplain.cpp
----------------------------------------------------------------------
diff --git a/core/sql/generator/GenExplain.cpp b/core/sql/generator/GenExplain.cpp
index 951cfee..f766873 100644
--- a/core/sql/generator/GenExplain.cpp
+++ b/core/sql/generator/GenExplain.cpp
@@ -1139,7 +1139,11 @@ DDLExpr::addSpecificExplainInfo(ExplainTupleMaster *explainTuple,
             buffer += "object_name: unknown ";
           buffer += NAString("object_type: ") + hddl->getTypeStr() + " ";
           if (NOT hddl->getHiveDDL().isNull())
-            buffer += NAString("hive_ddl: ") + hddl->getHiveDDL() + " ";
+            {
+              if (NOT hddl->getHiveDefaultDB().isNull())
+                buffer += NAString("hive_default_db: ") + hddl->getHiveDefaultDB() + "
";
+              buffer += NAString("hive_ddl: ") + hddl->getHiveDDL() + " ";
+            }
           else
             buffer += "hive_ddl: unknown ";
         }

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/generator/GenRelDCL.cpp
----------------------------------------------------------------------
diff --git a/core/sql/generator/GenRelDCL.cpp b/core/sql/generator/GenRelDCL.cpp
index 821d416..1337aea 100644
--- a/core/sql/generator/GenRelDCL.cpp
+++ b/core/sql/generator/GenRelDCL.cpp
@@ -108,6 +108,33 @@ short ControlAbstractClass::codeGen(Generator * generator)
 	value_ = "OFF";
     }
 
+  Int16 reset;
+  if (cqt == DEFAULT_)
+     reset = ((token_ == "") ? -reset_ : reset_);
+  else
+     reset = reset_;
+
+  // if this is a SET SCHEMA stmt for a Hive schema, construct the value
+  // as fully qualified schema name (cat.sch).
+  // This string will be used at runtime to set this schema in Hive
+  // if the schema exists.
+  // See ExControlTcb::work for details.
+  NABoolean isHiveSetSchema = FALSE;
+  if ((cqt == DEFAULT_) && (dynamic()) && (token_ == "SCHEMA") &&
(reset == 0))
+    {
+      ComSchemaName csn(value_);
+      NAString catName(csn.getCatalogNamePart().getInternalName());
+      if (catName.isNull())
+        catName = CmpCommon::getDefaultString(CATALOG);
+      if (catName == HIVE_SYSTEM_CATALOG)
+        {
+          value_ = HIVE_SYSTEM_CATALOG;
+          value_ += ".";
+          value_ += csn.getSchemaNamePart().getInternalName();
+          isHiveSetSchema = TRUE;
+        }
+    }
+
   // We need txt/tok/val stuff if in [1] a dynamic compile (EXEC SQL PREPARE),
   // OR [2] a dynamic statement even in a static compile.
   //
@@ -125,12 +152,6 @@ short ControlAbstractClass::codeGen(Generator * generator)
     v[i++] = convertNAString(value_, space);
   }
 
-  Int16 reset;
-  if (cqt == DEFAULT_)
-     reset = ((token_ == "") ? -reset_ : reset_);
-  else
-     reset = reset_;
-
   ComTdbControl * control_tdb = new(space) 
     ComTdbControl(cqt,
 		  reset,
@@ -150,6 +171,12 @@ short ControlAbstractClass::codeGen(Generator * generator)
         ActiveSchemaDB()->getDefaults().isNonResetableAttribute(v[0]);
      control_tdb->setNonResettable(nonResettable);
      control_tdb->setControlActionType(((ControlQueryDefault *)this)->getHoldOrRestoreCQD());
+
+     if (dynamic()) // dynamic() is true for SET stmts
+       {
+         control_tdb->setIsSetStmt(TRUE);
+         control_tdb->setIsHiveSetSchema(isHiveSetSchema);
+       }
   }
   // no tupps are returned 
   generator->setCriDesc((ex_cri_desc *)(generator->getCriDesc(Generator::DOWN)),

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/optimizer/BindRelExpr.cpp
----------------------------------------------------------------------
diff --git a/core/sql/optimizer/BindRelExpr.cpp b/core/sql/optimizer/BindRelExpr.cpp
index 6d890f9..716a9c6 100644
--- a/core/sql/optimizer/BindRelExpr.cpp
+++ b/core/sql/optimizer/BindRelExpr.cpp
@@ -14403,20 +14403,23 @@ RelExpr * ControlQueryDefault::bindNode(BindWA *bindWA)
          }
        }
     }
-  
+
+
   if (holdOrRestoreCQD_ == 0)
     {
-  attrEnum_ = affectYourself ? defs.validateAndInsert(token_, value_, reset_)
-                             : defs.validate         (token_, value_, reset_);
-  if (attrEnum_ < 0)
-    {
-      if (bindWA) bindWA->setErrStatus();
-      return NULL;
-    }
-
-  // remember this control in the control table
-  if (affectYourself)
-    ActiveControlDB()->setControlDefault(this);
+      if (affectYourself)
+        attrEnum_ =  defs.validateAndInsert(token_, value_, reset_);
+      else
+        attrEnum_ = defs.validate(token_, value_, reset_);
+      if (attrEnum_ < 0)
+        {
+          if (bindWA) bindWA->setErrStatus();
+          return NULL;
+        }
+      
+      // remember this control in the control table
+      if (affectYourself)
+        ActiveControlDB()->setControlDefault(this);
     }
   else if ((holdOrRestoreCQD_ > 0) && (affectYourself))
     {
@@ -14427,7 +14430,7 @@ RelExpr * ControlQueryDefault::bindNode(BindWA *bindWA)
           return NULL;
         }
     }
-
+  
   return ControlAbstractClass::bindNode(bindWA);
 } // ControlQueryDefault::bindNode()
 

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/parser/ParKeyWords.cpp
----------------------------------------------------------------------
diff --git a/core/sql/parser/ParKeyWords.cpp b/core/sql/parser/ParKeyWords.cpp
index 0a152ce..1e8b936 100644
--- a/core/sql/parser/ParKeyWords.cpp
+++ b/core/sql/parser/ParKeyWords.cpp
@@ -675,6 +675,7 @@ ParKeyWord ParKeyWords::keyWords_[] = {
   ParKeyWord("MOVINGMAX",          TOK_MMAX,        NONRESTOKEN_),
   ParKeyWord("MOVINGMIN",          TOK_MMIN,        NONRESTOKEN_),
   ParKeyWord("MOVINGRANK",         TOK_MRANK,       NONRESTOKEN_),
+  ParKeyWord("MSCK",               TOK_MSCK,        NONRESTOKEN_),
   ParKeyWord("MOVINGSTDDEV",       TOK_MSTDDEV,     NONRESTOKEN_),
   ParKeyWord("MOVINGSUM",          TOK_MSUM,        NONRESTOKEN_),
   ParKeyWord("MOVINGVARIANCE",     TOK_MVARIANCE,   NONRESTOKEN_),
@@ -881,6 +882,7 @@ ParKeyWord ParKeyWords::keyWords_[] = {
   ParKeyWord("REMOTE",             TOK_REMOTE,      NONRESTOKEN_),
   ParKeyWord("REMOVE",             TOK_REMOVE,      FLAGSNONE_),
   ParKeyWord("RENAME",             TOK_RENAME,      NONRESTOKEN_),
+  ParKeyWord("REPAIR",             TOK_REPAIR,      NONRESTOKEN_),
   ParKeyWord("REPEAT",             TOK_REPEAT,      NONRESTOKEN_),
   ParKeyWord("REPEATABLE",         TOK_REPEATABLE,  FIRST_|SECOND_|NONRESTOKEN_),
   ParKeyWord("REPLACE",            TOK_REPLACE,     POTANS_|RESWORD_),

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/parser/StmtDDLonHiveObjects.h
----------------------------------------------------------------------
diff --git a/core/sql/parser/StmtDDLonHiveObjects.h b/core/sql/parser/StmtDDLonHiveObjects.h
index 7ec1e5b..1f07f3a 100644
--- a/core/sql/parser/StmtDDLonHiveObjects.h
+++ b/core/sql/parser/StmtDDLonHiveObjects.h
@@ -65,6 +65,7 @@ public:
       DROP_,
       ALTER_,
       TRUNCATE_,
+      MSCK_,  // MetaStore Check
       PASSTHRU_DDL_,
       UNKNOWN_OPER_
     };
@@ -79,17 +80,19 @@ public:
       
   // initialize constructor
   StmtDDLonHiveObjects(Operation oper,
-                     ObjectType type,
-                     NABoolean ifExistsOrNotExists,
-                     const NAString &name,
-                     NAString &hiveDDL,
-                     CollHeap * heap)
+                       ObjectType type,
+                       NABoolean ifExistsOrNotExists,
+                       const NAString &name,
+                       NAString &hiveDDL,
+                       NAString &hiveDefaultDB,
+                       CollHeap * heap)
        : StmtDDLNode(DDL_ON_HIVE_OBJECTS),
          oper_(oper),
          type_(type),
          ifExistsOrNotExists_(ifExistsOrNotExists),
          name_(name),
          hiveDDL_(hiveDDL),
+         hiveDefaultDB_(hiveDefaultDB),
          childNode_(NULL)
   {}
   
@@ -123,6 +126,7 @@ public:
       case DROP_         : return "drop";
       case ALTER_        : return "alter";
       case TRUNCATE_     : return "truncate";
+      case MSCK_         : return "msck";
       case PASSTHRU_DDL_ : return "passthru";
       case UNKNOWN_OPER_ : return "unknown";
       default            : return "unknown";
@@ -149,6 +153,9 @@ public:
   NAString &getHiveDDL() {return hiveDDL_;}
   void setHiveDDL(NAString &hiveDDL) {hiveDDL_ = hiveDDL;}
 
+  const NAString &getHiveDefaultDB() const {return hiveDefaultDB_;}
+  NAString &getHiveDefaultDB() {return hiveDefaultDB_;}
+
   // ExprNode * bindNode(BindWA * pBindWA);
 
   //
@@ -172,6 +179,9 @@ private:
   NAString name_;
   NAString hiveDDL_;
 
+  // default hive database/schema set for the current session
+  NAString hiveDefaultDB_;
+
   //
   // please do not use the following methods
   //

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/parser/sqlparser.y
----------------------------------------------------------------------
diff --git a/core/sql/parser/sqlparser.y b/core/sql/parser/sqlparser.y
index e7d4176..1290747 100755
--- a/core/sql/parser/sqlparser.y
+++ b/core/sql/parser/sqlparser.y
@@ -849,6 +849,7 @@ static void enableMakeQuotedStringISO88591Mechanism()
 %token <tokval> TOK_MONTHNAME
 %token <tokval> TOK_MORE                /* ANSI SQL non-reserved word */
 %token <tokval> TOK_MRANK               /* Tandem extension non-reserved word */
+%token <tokval> TOK_MSCK
 %token <tokval> TOK_MSTDDEV             /* Tandem extension non-reserved word */
 %token <tokval> TOK_MSUM                /* Tandem extension non-reserved word */
 %token <tokval> TOK_MV                  
@@ -976,6 +977,7 @@ static void enableMakeQuotedStringISO88591Mechanism()
 %token <tokval> TOK_REMOTE
 %token <tokval> TOK_TEMP_TABLE         
 %token <tokval> TOK_TEMPORARY
+%token <tokval> TOK_REPAIR
 %token <tokval> TOK_REPEAT
 %token <tokval> TOK_REPEATABLE          /* ANSI SQL non-reserved word */ 
 %token <tokval> TOK_REPEATABLE_ACCESS   /* Tandem extension */
@@ -31801,6 +31803,41 @@ alter_table_statement :  alter_table_start_tokens
                                   delete $4 /*ddl_qualified_name*/;
                                 }
 
+                     | TOK_MSCK
+                       { 
+                         // this is a Hive only syntax
+                         SqlParser_CurrentParser->hiveDDLInfo_->
+                           setValues(TRUE, StmtDDLonHiveObjects::MSCK_, 
+                                     StmtDDLonHiveObjects::TABLE_);
+                       }
+                       TOK_REPAIR TOK_TABLE ddl_qualified_name
+                       {
+                         if (NOT SqlParser_CurrentParser->hiveDDLInfo_->foundDDL_)
+                           {
+                             *SqlParser_Diags << DgSqlCode(-3242)
+                                              << DgString0("Specified object must be
a Hive object.");
+                           }
+
+                         $$ = NULL;
+                         YYERROR;
+                       }
+                     | alter_table_start_tokens ddl_qualified_name TOK_RECOVER TOK_PARTITIONS
+                       {
+                         // this is a Hive only syntax
+                         SqlParser_CurrentParser->hiveDDLInfo_->
+                           setValues(TRUE, StmtDDLonHiveObjects::MSCK_, 
+                                     StmtDDLonHiveObjects::TABLE_);
+
+                         if (NOT SqlParser_CurrentParser->hiveDDLInfo_->foundDDL_)
+                           {
+                             *SqlParser_Diags << DgSqlCode(-3242)
+                                              << DgString0("Specified object must be
a Hive object.");
+                           }
+ 
+                         $$ = NULL;
+                         YYERROR;
+                       }
+
 ghost : TOK_GHOST
                  {
                    // GHOST is allowed only if the flag ALLOW_SPECIALTABLETYPE is set,
@@ -33957,6 +33994,7 @@ nonreserved_word :      TOK_ABORT
                       | TOK_MV  
                       | TOK_MULTI            /* Long Running */
 		      | TOK_MULTIDELTA // MV
+                      | TOK_MSCK
 		      | TOK_MVATTRIBUTE  // MV
 		      | TOK_MVATTRIBUTES // MV
                       | TOK_MV_TABLE  
@@ -34062,6 +34100,7 @@ nonreserved_word :      TOK_ABORT
                       | TOK_RELOAD
                       | TOK_REMOTE
                       | TOK_RENAME
+                      | TOK_REPAIR
                       | TOK_REPOSITORY
                       | TOK_REQUEST // MV
                       | TOK_REQUIRED

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/regress/hive/DIFF008.KNOWN
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/DIFF008.KNOWN b/core/sql/regress/hive/DIFF008.KNOWN
index d28926f..2d99f1c 100644
--- a/core/sql/regress/hive/DIFF008.KNOWN
+++ b/core/sql/regress/hive/DIFF008.KNOWN
@@ -1,4 +1,4 @@
-321c321,324
+327c327,330
 < --- SQL command prepared.
 ---
 > *** ERROR[4002] Column T00804.A is not found. Table T00804 not exposed. Tables in scope:
HIVE.HIVE.T00804. Default schema: HIVE.SCH008.

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/regress/hive/EXPECTED008
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/EXPECTED008 b/core/sql/regress/hive/EXPECTED008
index 8bb8f03..a792f4b 100644
--- a/core/sql/regress/hive/EXPECTED008
+++ b/core/sql/regress/hive/EXPECTED008
@@ -2,6 +2,12 @@
 
 --- SQL operation complete.
 >>
+>>set schema hive.sch008;
+
+*** ERROR[1003] Schema HIVE.SCH008 does not exist.
+
+--- SQL operation failed with errors.
+>>
 >>showddl schema hive.sch008;
 
 *** ERROR[1003] Schema HIVE.SCH008 does not exist.
@@ -1017,6 +1023,77 @@ LC   RC   OP   OPERATOR              OPT       DESCRIPTION        
  CARD
 
 --- SQL operation complete.
 >>
+>>-- alter rename. Should unregister the source table.
+>>create table t00807 (a int);
+
+--- SQL operation complete.
+>>alter table t00807 rename to `default`.t00807ren;
+
+--- SQL operation complete.
+>>select * from trafodion."_MD_".objects where schema_name = 'HIVE' and object_name
= 'T00807';
+
+--- 0 row(s) selected.
+>>-- should show
+>>showddl hive.t00807ren;
+
+/* Hive DDL */
+CREATE TABLE HIVE.HIVE.T00807REN
+  (
+    A                                int
+  )
+  stored as textfile
+;
+
+/* Trafodion DDL */
+
+--- SQL operation complete.
+>>-- should not show
+>>showddl t00807ren;
+
+*** ERROR[1388] Object HIVE.SCH008.T00807REN does not exist in Hive Metadata.
+
+*** ERROR[8822] The statement was not prepared.
+
+>>drop table hive.t00807ren;
+
+--- SQL operation complete.
+>>
+>>-- alter rename to the hive schema set in session (hive.sch008)
+>>create table t00807 (a int);
+
+--- SQL operation complete.
+>>alter table t00807 rename to t00807ren;
+
+--- SQL operation complete.
+>>-- should show
+>>showddl t00807ren;
+
+/* Hive DDL */
+CREATE TABLE HIVE.SCH008.T00807REN
+  (
+    A                                int
+  )
+  stored as textfile
+;
+
+/* Trafodion DDL */
+
+--- SQL operation complete.
+>>-- should not show
+>>showddl hive.t00807ren;
+
+*** ERROR[1388] Object HIVE.HIVE.T00807REN does not exist in Hive Metadata.
+
+*** ERROR[8822] The statement was not prepared.
+
+>>
+>>-- msck (meta store check) command support
+>>msck repair table t00807ren;
+
+--- SQL operation complete.
+>>alter table t00807ren recover partitions;
+
+--- SQL operation complete.
 >>
 >>-- explain on hive DDL
 >>explain options 'f' drop table hive.hive.tnone;
@@ -1072,7 +1149,7 @@ CREATE ROLE "DB__HIVEROLE";
 
 --- SQL operation complete.
 >>
->>-- should not return unquthorized error.
+>>-- should not return unauthorized error.
 >>log;
 >>process hive ddl 'drop table tnotexists';
 

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/regress/hive/TEST008
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST008 b/core/sql/regress/hive/TEST008
index 4f5a593..6ff6c3f 100644
--- a/core/sql/regress/hive/TEST008
+++ b/core/sql/regress/hive/TEST008
@@ -27,6 +27,8 @@ revoke role DB__HIVEROLE from SQL_USER4;
 log LOG008 clear;
 cqd traf_ddl_on_hive_objects 'ON';
 
+set schema hive.sch008;
+
 showddl schema hive.sch008;
 create database hive.sch008;
 create schema if not exists hive.sch008;
@@ -242,6 +244,27 @@ invoke hive.sch008.t008072;
 drop table t00807;
 drop table t008072;
 
+-- alter rename. Should unregister the source table.
+create table t00807 (a int);
+alter table t00807 rename to `default`.t00807ren;
+select * from trafodion."_MD_".objects where schema_name = 'HIVE' and object_name = 'T00807';
+-- should show
+showddl hive.t00807ren;
+-- should not show
+showddl t00807ren;
+drop table hive.t00807ren;
+
+-- alter rename to the hive schema set in session (hive.sch008)
+create table t00807 (a int);
+alter table t00807 rename to t00807ren;
+-- should show
+showddl t00807ren;
+-- should not show
+showddl hive.t00807ren;
+
+-- msck (meta store check) command support
+msck repair table t00807ren;
+alter table t00807ren recover partitions;
 
 -- explain on hive DDL
 explain options 'f' drop table hive.hive.tnone;
@@ -258,7 +281,7 @@ showddl role DB__HIVEROLE;
 grant role DB__HIVEROLE to SQL_USER4;
 showddl role DB__HIVEROLE;
 
--- should not return unquthorized error.
+-- should not return unauthorized error.
 log;
 sh sqlci -i"TEST008(hive_ddl_as_user4)" -u"SQL_User4";
 log LOG008;

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/sqlci/sqlci_lex.ll
----------------------------------------------------------------------
diff --git a/core/sql/sqlci/sqlci_lex.ll b/core/sql/sqlci/sqlci_lex.ll
index ac4376d..c795608 100755
--- a/core/sql/sqlci/sqlci_lex.ll
+++ b/core/sql/sqlci/sqlci_lex.ll
@@ -206,6 +206,7 @@ B			[ \t\n]+
 [Mm][Ee][Tt][Aa][Dd][Aa][Tt][Aa]                   return_IDENT_or_TOKEN(METADATAtoken, 0);
 [Mm][Aa][Pp]                           return_IDENT_or_TOKEN(MAPtoken, 0);
 [Mm][Oo][Dd][Ii][Ff][Yy]               return_IDENT_or_TOKEN(MODIFY, 0);
+[Mm][Ss][Cc][Kk] 		       return_IDENT_or_TOKEN(MSCKtoken, 0);
 [Oo][Bb][Ee][Yy]  		       { 
                      BEGIN FNAME; 
                                          return_IDENT_or_TOKEN (OBEY, 0); 

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/sqlci/sqlci_yacc.y
----------------------------------------------------------------------
diff --git a/core/sql/sqlci/sqlci_yacc.y b/core/sql/sqlci/sqlci_yacc.y
index dc97892..b3ab7d2 100644
--- a/core/sql/sqlci/sqlci_yacc.y
+++ b/core/sql/sqlci/sqlci_yacc.y
@@ -429,6 +429,7 @@ static char * FCString (const char *idString, int isFC)
 %token MODE
 %token MODIFY
 %token MODIFYV
+%token MSCKtoken
 %token NEXT
 %token NOEtoken
 %token OBEY
@@ -1934,6 +1935,7 @@ dml_type :
 	|	TRANSFORM		{$$ = DML_DDL_TYPE;}
 	|	CALLToken		{$$ = DML_DDL_TYPE;}
 	|	LOADtoken		{$$ = DML_DDL_TYPE;}
+	|	MSCKtoken		{$$ = DML_DDL_TYPE;}
 	|	EXTRACTtoken		{$$ = DML_DESCRIBE_TYPE;}
 	|	REPLICATEtoken		{$$ = DML_DESCRIBE_TYPE;}
 	|	GENERATEtoken		{$$ = DML_DESCRIBE_TYPE;}

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/sqlcomp/CmpSeabaseDDLtable.cpp
----------------------------------------------------------------------
diff --git a/core/sql/sqlcomp/CmpSeabaseDDLtable.cpp b/core/sql/sqlcomp/CmpSeabaseDDLtable.cpp
index 4fafe44..a620624 100644
--- a/core/sql/sqlcomp/CmpSeabaseDDLtable.cpp
+++ b/core/sql/sqlcomp/CmpSeabaseDDLtable.cpp
@@ -11362,7 +11362,8 @@ static void processPassthruHiveDDL(StmtDDLonHiveObjects * hddl)
            (hiveQuery.index("TRUNCATE ", 0, NAString::ignoreCase) == 0) ||
            (hiveQuery.index("GRANT ", 0, NAString::ignoreCase) == 0) ||
            (hiveQuery.index("REVOKE ", 0, NAString::ignoreCase) == 0) ||
-           (hiveQuery.index("RELOAD ", 0, NAString::ignoreCase) == 0)))
+           (hiveQuery.index("RELOAD ", 0, NAString::ignoreCase) == 0) ||
+           (hiveQuery.index("MSCK ", 0, NAString::ignoreCase) == 0)))
     {
       // error case
       *CmpCommon::diags() << DgSqlCode(-3242) << DgString0("Specified DDL operation
cannot be executed directly by hive.");
@@ -11414,7 +11415,8 @@ void CmpSeabaseDDL::processDDLonHiveObjects(StmtDDLonHiveObjects *
hddl,
   if (NOT ((hddl->getOper() == StmtDDLonHiveObjects::CREATE_) ||
            (hddl->getOper() == StmtDDLonHiveObjects::DROP_) ||
            (hddl->getOper() == StmtDDLonHiveObjects::ALTER_) ||
-           (hddl->getOper() == StmtDDLonHiveObjects::TRUNCATE_)))
+           (hddl->getOper() == StmtDDLonHiveObjects::TRUNCATE_) ||
+           (hddl->getOper() == StmtDDLonHiveObjects::MSCK_)))
     {
       // error case
       *CmpCommon::diags() << DgSqlCode(-3242) << DgString0("Only CREATE, DROP,
ALTER or TRUNCATE DDL commands can be specified on hive objects. Use \"PROCESS HIVE DDL '<ddl-stmt>'
\" to directly execute other statements through hive.");
@@ -11671,6 +11673,45 @@ void CmpSeabaseDDL::processDDLonHiveObjects(StmtDDLonHiveObjects
* hddl,
 
   endXnIfStartedHere(&cliInterface, xnWasStartedHere, 0);
 
+  // this ALTER may be a RENAME command. 
+  // If the table being renamed is registered in Traf MD, unregister it.
+  if (hddl->getOper() == StmtDDLonHiveObjects::ALTER_)
+    {
+      // set cqd so NATable is recreated instead of returning cached one.
+      NABoolean cqdChanged = FALSE;
+      if (CmpCommon::getDefault(TRAF_RELOAD_NATABLE_CACHE) == DF_OFF)
+        {
+          NAString value("ON");
+          ActiveSchemaDB()->getDefaults().validateAndInsert(
+               "traf_reload_natable_cache", value, FALSE);
+          cqdChanged = TRUE;
+        }
+
+      NATable *naTable = bindWA.getNATable(cnTgt);
+      
+      if (cqdChanged)
+        {
+          NAString value("OFF");
+          ActiveSchemaDB()->getDefaults().validateAndInsert(
+               "traf_reload_natable_cache", value, FALSE);
+        }
+
+      NABoolean objExists = FALSE;
+      if (naTable == NULL || bindWA.errStatus())
+        objExists = FALSE;
+      else
+        objExists = TRUE;
+      
+      if (NOT objExists)
+        {
+          CmpCommon::diags()->clear();
+          cliRC = unregisterNativeTable(
+               catName, schName, objName,
+               cliInterface, 
+               objType);
+        }
+    }
+
   ActiveSchemaDB()->getNATableDB()->removeNATable
     (cnTgt,
      ComQiScope::REMOVE_FROM_ALL_USERS,
@@ -11773,6 +11814,19 @@ NABoolean CmpSeabaseDDL::setupQueryTreeForHiveDDL(
   
   CmpCommon::diags()->clear();
   
+  if (oper == StmtDDLonHiveObjects::MSCK_)
+    {
+      // this may be set through 'msck repair table <tablename>' or 
+      // 'alter table <tablename> repair partitions'.
+      // Underlying Hive may not support the 'alter repair' syntax on
+      // all platforms.
+      // Create 'msck' version which is supported.
+      NAString newHiveDDL("MSCK REPAIR TABLE ");
+      newHiveDDL += newHiveName;
+      hiveDDL.clear();
+      hiveDDL = newHiveDDL;
+    }
+
   DDLExpr * ddlExpr = NULL;
   RelExpr * ddlExprRoot = NULL;
   if (oper == StmtDDLonHiveObjects::TRUNCATE_)
@@ -11815,11 +11869,20 @@ NABoolean CmpSeabaseDDL::setupQueryTreeForHiveDDL(
   else if (NOT ((hiveDDLInfo->essd_ == Parser::HiveDDLInfo::SHOWPLAN_) ||
                 (hiveDDLInfo->essd_ == Parser::HiveDDLInfo::SHOWSHAPE_)))
     {
+      // get the hive schema name if set in the session.
+      BindWA bindWA(ActiveSchemaDB(), CmpCommon::context(), FALSE/*inDDL*/);
+      NAString hiveDB = ComConvertTrafHiveNameToNativeHiveName
+        (bindWA.getDefaultSchema().getCatalogName(),
+         bindWA.getDefaultSchema().getSchemaName(),
+         NAString(""));
+      hiveDB.toLower();
+      
       StmtDDLonHiveObjects * sdho = 
         new (PARSERHEAP()) StmtDDLonHiveObjects(oper, type, 
                                                 ifExistsOrNotExists,
                                                 con.getExternalName(),
-                                                hiveDDL, PARSERHEAP());
+                                                hiveDDL, hiveDB,
+                                                PARSERHEAP());
       
       DDLExpr * ddlExpr = new(CmpCommon::statementHeap()) 
         DDLExpr(sdho, inputStr, inputStrCharSet,

http://git-wip-us.apache.org/repos/asf/trafodion/blob/f5885e4f/core/sql/src/main/java/org/trafodion/sql/HiveClient.java
----------------------------------------------------------------------
diff --git a/core/sql/src/main/java/org/trafodion/sql/HiveClient.java b/core/sql/src/main/java/org/trafodion/sql/HiveClient.java
index 586f10e..e5268f8 100644
--- a/core/sql/src/main/java/org/trafodion/sql/HiveClient.java
+++ b/core/sql/src/main/java/org/trafodion/sql/HiveClient.java
@@ -63,6 +63,9 @@ public class HiveClient {
     private static HiveConf hiveConf = null;
     private static HiveMetaStoreClient hmsClient  ;
     private static String ddlTimeConst = null;
+
+    private static Statement stmt = null;
+
     static {
          String confFile = System.getProperty("trafodion.log4j.configFile");
          System.setProperty("trafodion.root", System.getenv("TRAF_HOME"));
@@ -241,24 +244,29 @@ public class HiveClient {
         return fieldVal.toString();
   }
 
-  public static void executeHiveSQL(String ddl) throws ClassNotFoundException, SQLException
+    public static void executeHiveSQL(String ddl) 
+        throws ClassNotFoundException, SQLException
   {
-      Class.forName("org.apache.hive.jdbc.HiveDriver");
-      Connection con = null;
-      String isSecureHadoop = System.getenv("SECURE_HADOOP");
-      //If Kerberos is enabled, then we need to connect to remote hiveserver2 using hive
principal
-      if(isSecureHadoop != null && isSecureHadoop.equalsIgnoreCase("Y")){
-         String hiveServer2Url = System.getenv("HIVESERVER2_URL");
-         if(hiveServer2Url == null || hiveServer2Url.isEmpty()){
-            hiveServer2Url = "localhost:10000";
-         }
-         String hivePrincipal = System.getenv("HIVE_PRINCIPAL");
-         con = DriverManager.getConnection("jdbc:hive2://" + hiveServer2Url+"/;principal="
+ hivePrincipal, "hive", "");
-      }else{
-         con = DriverManager.getConnection("jdbc:hive2://", "hive", "");
+      if (stmt == null) {
+          Class.forName("org.apache.hive.jdbc.HiveDriver");
+          Connection con = null;
+          String isSecureHadoop = System.getenv("SECURE_HADOOP");
+          //If Kerberos is enabled, then we need to connect to remote hiveserver2 using hive
principal
+          if(isSecureHadoop != null && isSecureHadoop.equalsIgnoreCase("Y")){
+              String hiveServer2Url = System.getenv("HIVESERVER2_URL");
+              if(hiveServer2Url == null || hiveServer2Url.isEmpty()){
+                  hiveServer2Url = "localhost:10000";
+              }
+              String hivePrincipal = System.getenv("HIVE_PRINCIPAL");
+              con = DriverManager.getConnection("jdbc:hive2://" + hiveServer2Url+"/;principal="
+ hivePrincipal, "hive", "");
+          }else{
+              con = DriverManager.getConnection("jdbc:hive2://", "hive", "");
+          }
+          stmt = con.createStatement();
       }
-      Statement stmt = con.createStatement();
+
       try {
+
           stmt.execute(ddl);
       } catch (SQLException s) {
           throw s;            


Mime
View raw message