hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject [13/13] hive git commit: HIVE-17954 : Implement pool, user, group and trigger to pool management API's (Harish Jaiprakash, reviewed by Sergey Shelukhin)
Date Mon, 27 Nov 2017 22:28:07 GMT
HIVE-17954 : Implement pool, user, group and trigger to pool management API's (Harish Jaiprakash, reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/44ef5991
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/44ef5991
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/44ef5991

Branch: refs/heads/master
Commit: 44ef599155efa998b59b0723b2bb705bf60a1f21
Parents: be1f847
Author: sergey <sershe@apache.org>
Authored: Mon Nov 27 14:27:21 2017 -0800
Committer: sergey <sershe@apache.org>
Committed: Mon Nov 27 14:27:48 2017 -0800

----------------------------------------------------------------------
 .../listener/DummyRawStoreFailEvent.java        |    48 +-
 .../hive/jdbc/TestTriggersWorkloadManager.java  |     2 +-
 .../upgrade/derby/046-HIVE-17566.derby.sql      |     3 +-
 .../upgrade/derby/hive-schema-3.0.0.derby.sql   |     4 +-
 .../upgrade/hive/hive-schema-3.0.0.hive.sql     |    69 +
 .../upgrade/mssql/031-HIVE-17566.mssql.sql      |     4 +-
 .../upgrade/mssql/hive-schema-3.0.0.mssql.sql   |     3 -
 .../upgrade/mysql/046-HIVE-17566.mysql.sql      |     4 +-
 .../upgrade/mysql/hive-schema-3.0.0.mysql.sql   |     4 +-
 .../upgrade/oracle/046-HIVE-17566.oracle.sql    |     2 -
 .../upgrade/oracle/hive-schema-3.0.0.oracle.sql |     3 -
 .../postgres/045-HIVE-17566.postgres.sql        |     3 -
 .../postgres/hive-schema-3.0.0.postgres.sql     |     1 -
 .../hive/metastore/HiveMetaStoreClient.java     |    55 +
 .../hadoop/hive/metastore/IMetaStoreClient.java |    21 +
 .../DummyRawStoreControlledCommit.java          |    48 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   117 +-
 .../hive/ql/exec/tez/UserPoolMapping.java       |     4 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java    |    53 +
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |   246 +-
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |     4 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |   153 +-
 .../hadoop/hive/ql/parse/IdentifiersParser.g    |     4 +-
 .../hadoop/hive/ql/parse/ParseDriver.java       |     3 +
 .../hadoop/hive/ql/parse/ResourcePlanParser.g   |   230 +
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |    20 +-
 .../hive/ql/plan/AlterResourcePlanDesc.java     |    89 +-
 .../hadoop/hive/ql/plan/AlterWMTriggerDesc.java |    54 +-
 .../ql/plan/CreateOrAlterWMMappingDesc.java     |    41 +
 .../hive/ql/plan/CreateOrAlterWMPoolDesc.java   |    50 +
 .../CreateOrDropTriggerToPoolMappingDesc.java   |    66 +
 .../hive/ql/plan/CreateResourcePlanDesc.java    |    24 +-
 .../hive/ql/plan/CreateWMTriggerDesc.java       |    54 +-
 .../org/apache/hadoop/hive/ql/plan/DDLWork.java |    88 +-
 .../hadoop/hive/ql/plan/DropWMMappingDesc.java  |    29 +
 .../hadoop/hive/ql/plan/DropWMPoolDesc.java     |    33 +
 .../hadoop/hive/ql/plan/HiveOperation.java      |     9 +-
 .../authorization/plugin/HiveOperationType.java |     6 +
 .../plugin/sqlstd/Operation2Privilege.java      |    12 +
 .../hive/ql/exec/tez/TestWorkloadManager.java   |     2 +-
 .../test/queries/clientpositive/resourceplan.q  |   154 +-
 .../clientpositive/llap/resourceplan.q.out      |   648 +-
 .../results/clientpositive/llap/sysdb.q.out     |   161 +-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 13702 ++++++++++-------
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h    |  1398 +-
 .../ThriftHiveMetastore_server.skeleton.cpp     |    30 +
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  1636 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   584 +-
 .../hive/metastore/api/ThriftHiveMetastore.java | 10718 +++++++++++--
 .../hive/metastore/api/WMAlterPoolRequest.java  |   504 +
 .../hive/metastore/api/WMAlterPoolResponse.java |   283 +
 ...CreateOrDropTriggerToPoolMappingRequest.java |   708 +
 ...reateOrDropTriggerToPoolMappingResponse.java |   283 +
 .../api/WMCreateOrUpdateMappingRequest.java     |   501 +
 .../api/WMCreateOrUpdateMappingResponse.java    |   283 +
 .../hive/metastore/api/WMCreatePoolRequest.java |   398 +
 .../metastore/api/WMCreatePoolResponse.java     |   283 +
 .../metastore/api/WMDropMappingRequest.java     |   398 +
 .../metastore/api/WMDropMappingResponse.java    |   283 +
 .../hive/metastore/api/WMDropPoolRequest.java   |   499 +
 .../hive/metastore/api/WMDropPoolResponse.java  |   283 +
 .../hadoop/hive/metastore/api/WMMapping.java    |   112 +-
 .../gen-php/metastore/ThriftHiveMetastore.php   |  2930 +++-
 .../src/gen/thrift/gen-php/metastore/Types.php  |   924 +-
 .../hive_metastore/ThriftHiveMetastore-remote   |    42 +
 .../hive_metastore/ThriftHiveMetastore.py       |  5440 ++++---
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   766 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   202 +-
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   446 +
 .../hadoop/hive/metastore/HiveMetaStore.java    |    84 +-
 .../hadoop/hive/metastore/ObjectStore.java      |   372 +-
 .../apache/hadoop/hive/metastore/RawStore.java  |    27 +-
 .../hive/metastore/cache/CachedStore.java       |    48 +-
 .../hadoop/hive/metastore/model/MWMPool.java    |    23 +-
 .../src/main/resources/package.jdo              |    10 +-
 .../src/main/thrift/hive_metastore.thrift       |    67 +-
 .../DummyRawStoreForJdoConnection.java          |    40 +
 77 files changed, 36467 insertions(+), 10468 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
----------------------------------------------------------------------
diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
index 7196756..7965ca3 100644
--- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
+++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
@@ -74,6 +74,8 @@ import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.metastore.api.WMPool;
 import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.thrift.TException;
 
@@ -982,7 +984,7 @@ public class DummyRawStoreFailEvent implements RawStore, Configurable {
 
   @Override
   public void createResourcePlan(WMResourcePlan resourcePlan, int defaultPoolSize)
-      throws AlreadyExistsException, MetaException {
+      throws AlreadyExistsException, InvalidObjectException, MetaException {
     objectStore.createResourcePlan(resourcePlan, defaultPoolSize);
   }
 
@@ -1043,4 +1045,48 @@ public class DummyRawStoreFailEvent implements RawStore, Configurable {
       throws NoSuchObjectException, MetaException {
     return objectStore.getTriggersForResourcePlan(resourcePlanName);
   }
+
+  @Override
+  public void createPool(WMPool pool) throws AlreadyExistsException, NoSuchObjectException,
+      InvalidOperationException, MetaException {
+    objectStore.createPool(pool);
+  }
+
+  @Override
+  public void alterPool(WMPool pool, String poolPath) throws AlreadyExistsException,
+      NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.alterPool(pool, poolPath);
+  }
+
+  @Override
+  public void dropWMPool(String resourcePlanName, String poolPath)
+      throws NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.dropWMPool(resourcePlanName, poolPath);
+  }
+
+  @Override
+  public void createOrUpdateWMMapping(WMMapping mapping, boolean update)
+      throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException,
+      MetaException {
+    objectStore.createOrUpdateWMMapping(mapping, update);
+  }
+
+  @Override
+  public void dropWMMapping(WMMapping mapping)
+      throws NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.dropWMMapping(mapping);
+  }
+
+  @Override
+  public void createWMTriggerToPoolMapping(String resourcePlanName, String triggerName,
+      String poolPath) throws AlreadyExistsException, NoSuchObjectException,
+      InvalidOperationException, MetaException {
+    objectStore.createWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath);
+  }
+
+  @Override
+  public void dropWMTriggerToPoolMapping(String resourcePlanName, String triggerName,
+      String poolPath) throws NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.dropWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java
index 0506f67..285e533 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java
@@ -86,4 +86,4 @@ public class TestTriggersWorkloadManager extends TestTriggersTezSessionPoolManag
     }
     wm.updateResourcePlanAsync(rp).get(10, TimeUnit.SECONDS);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/derby/046-HIVE-17566.derby.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/derby/046-HIVE-17566.derby.sql b/metastore/scripts/upgrade/derby/046-HIVE-17566.derby.sql
index 9f12153..8eb197c 100644
--- a/metastore/scripts/upgrade/derby/046-HIVE-17566.derby.sql
+++ b/metastore/scripts/upgrade/derby/046-HIVE-17566.derby.sql
@@ -2,11 +2,10 @@ CREATE TABLE "APP"."WM_RESOURCEPLAN" (RP_ID BIGINT NOT NULL, NAME VARCHAR(128) N
 CREATE UNIQUE INDEX "APP"."UNIQUE_WM_RESOURCEPLAN" ON "APP"."WM_RESOURCEPLAN" ("NAME");
 ALTER TABLE "APP"."WM_RESOURCEPLAN" ADD CONSTRAINT "WM_RESOURCEPLAN_PK" PRIMARY KEY ("RP_ID");
 
-CREATE TABLE "APP"."WM_POOL" (POOL_ID BIGINT NOT NULL, RP_ID BIGINT NOT NULL, PATH VARCHAR(1024) NOT NULL, PARENT_POOL_ID BIGINT, ALLOC_FRACTION DOUBLE, QUERY_PARALLELISM INTEGER, SCHEDULING_POLICY VARCHAR(1024));
+CREATE TABLE "APP"."WM_POOL" (POOL_ID BIGINT NOT NULL, RP_ID BIGINT NOT NULL, PATH VARCHAR(1024) NOT NULL, ALLOC_FRACTION DOUBLE, QUERY_PARALLELISM INTEGER, SCHEDULING_POLICY VARCHAR(1024));
 CREATE UNIQUE INDEX "APP"."UNIQUE_WM_POOL" ON "APP"."WM_POOL" ("RP_ID", "PATH");
 ALTER TABLE "APP"."WM_POOL" ADD CONSTRAINT "WM_POOL_PK" PRIMARY KEY ("POOL_ID");
 ALTER TABLE "APP"."WM_POOL" ADD CONSTRAINT "WM_POOL_FK1" FOREIGN KEY ("RP_ID") REFERENCES "APP"."WM_RESOURCEPLAN" ("RP_ID") ON DELETE NO ACTION ON UPDATE NO ACTION;
-ALTER TABLE "APP"."WM_POOL" ADD CONSTRAINT "WM_POOL_FK2" FOREIGN KEY ("PARENT_POOL_ID") REFERENCES "APP"."WM_POOL" ("POOL_ID") ON DELETE NO ACTION ON UPDATE NO ACTION;
 ALTER TABLE "APP"."WM_RESOURCEPLAN" ADD CONSTRAINT "WM_RESOURCEPLAN_FK1" FOREIGN KEY ("DEFAULT_POOL_ID") REFERENCES "APP"."WM_POOL" ("POOL_ID") ON DELETE NO ACTION ON UPDATE NO ACTION;
 
 CREATE TABLE "APP"."WM_TRIGGER" (TRIGGER_ID BIGINT NOT NULL, RP_ID BIGINT NOT NULL, NAME VARCHAR(128) NOT NULL, TRIGGER_EXPRESSION VARCHAR(1024), ACTION_EXPRESSION VARCHAR(1024));

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/derby/hive-schema-3.0.0.derby.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/derby/hive-schema-3.0.0.derby.sql b/metastore/scripts/upgrade/derby/hive-schema-3.0.0.derby.sql
index 16aae7a..f93d0d1 100644
--- a/metastore/scripts/upgrade/derby/hive-schema-3.0.0.derby.sql
+++ b/metastore/scripts/upgrade/derby/hive-schema-3.0.0.derby.sql
@@ -112,7 +112,7 @@ CREATE TABLE "APP"."METASTORE_DB_PROPERTIES" ("PROPERTY_KEY" VARCHAR(255) NOT NU
 
 CREATE TABLE "APP"."WM_RESOURCEPLAN" (RP_ID BIGINT NOT NULL, NAME VARCHAR(128) NOT NULL, QUERY_PARALLELISM INTEGER, STATUS VARCHAR(20) NOT NULL, DEFAULT_POOL_ID BIGINT);
 
-CREATE TABLE "APP"."WM_POOL" (POOL_ID BIGINT NOT NULL, RP_ID BIGINT NOT NULL, PATH VARCHAR(1024) NOT NULL, PARENT_POOL_ID BIGINT, ALLOC_FRACTION DOUBLE, QUERY_PARALLELISM INTEGER, SCHEDULING_POLICY VARCHAR(1024));
+CREATE TABLE "APP"."WM_POOL" (POOL_ID BIGINT NOT NULL, RP_ID BIGINT NOT NULL, PATH VARCHAR(1024) NOT NULL, ALLOC_FRACTION DOUBLE, QUERY_PARALLELISM INTEGER, SCHEDULING_POLICY VARCHAR(1024));
 
 CREATE TABLE "APP"."WM_TRIGGER" (TRIGGER_ID BIGINT NOT NULL, RP_ID BIGINT NOT NULL, NAME VARCHAR(128) NOT NULL, TRIGGER_EXPRESSION VARCHAR(1024), ACTION_EXPRESSION VARCHAR(1024));
 
@@ -356,8 +356,6 @@ ALTER TABLE "APP"."WM_POOL" ADD CONSTRAINT "WM_POOL_PK" PRIMARY KEY ("POOL_ID");
 
 ALTER TABLE "APP"."WM_POOL" ADD CONSTRAINT "WM_POOL_FK1" FOREIGN KEY ("RP_ID") REFERENCES "APP"."WM_RESOURCEPLAN" ("RP_ID") ON DELETE NO ACTION ON UPDATE NO ACTION;
 
-ALTER TABLE "APP"."WM_POOL" ADD CONSTRAINT "WM_POOL_FK2" FOREIGN KEY ("PARENT_POOL_ID") REFERENCES "APP"."WM_POOL" ("POOL_ID") ON DELETE NO ACTION ON UPDATE NO ACTION;
-
 ALTER TABLE "APP"."WM_RESOURCEPLAN" ADD CONSTRAINT "WM_RESOURCEPLAN_FK1" FOREIGN KEY ("DEFAULT_POOL_ID") REFERENCES "APP"."WM_POOL" ("POOL_ID") ON DELETE NO ACTION ON UPDATE NO ACTION;
 
 ALTER TABLE "APP"."WM_TRIGGER" ADD CONSTRAINT "WM_TRIGGER_PK" PRIMARY KEY ("TRIGGER_ID");

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
index 68d8d37..7589101 100644
--- a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
+++ b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
@@ -989,6 +989,75 @@ ON
   t.RP_ID = r.RP_ID"
 );
 
+CREATE TABLE IF NOT EXISTS `WM_POOLS` (
+  `RP_NAME` string,
+  `PATH` string,
+  `ALLOC_FRACTION` double,
+  `QUERY_PARALLELISM` int,
+  `SCHEDULING_POLICY` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  WM_POOL.PATH,
+  WM_POOL.ALLOC_FRACTION,
+  WM_POOL.QUERY_PARALLELISM,
+  WM_POOL.SCHEDULING_POLICY
+FROM
+  WM_POOL
+JOIN
+  WM_RESOURCEPLAN
+ON
+  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+);
+
+CREATE TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
+  `RP_NAME` string,
+  `POOL_PATH` string,
+  `TRIGGER_NAME` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME RP_NAME,
+  WM_POOL.PATH POOL_PATH,
+  WM_TRIGGER.NAME TRIGGER_NAME
+FROM
+  WM_POOL_TO_TRIGGER
+JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
+JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
+JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+);
+
+CREATE TABLE IF NOT EXISTS `WM_MAPPINGS` (
+  `RP_NAME` string,
+  `ENTITY_TYPE` string,
+  `ENTITY_NAME` string,
+  `POOL_PATH` string,
+  `ORDERING` int
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  ENTITY_TYPE,
+  ENTITY_NAME,
+  WM_POOL.PATH,
+  ORDERING
+FROM
+  WM_MAPPING
+JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
+LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID"
+);
+
+
 DROP DATABASE IF EXISTS INFORMATION_SCHEMA;
 CREATE DATABASE INFORMATION_SCHEMA;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/mssql/031-HIVE-17566.mssql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mssql/031-HIVE-17566.mssql.sql b/metastore/scripts/upgrade/mssql/031-HIVE-17566.mssql.sql
index 06d82e0..945bda4 100644
--- a/metastore/scripts/upgrade/mssql/031-HIVE-17566.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/031-HIVE-17566.mssql.sql
@@ -17,7 +17,6 @@ CREATE TABLE WM_POOL
     POOL_ID bigint NOT NULL,
     RP_ID bigint NOT NULL,
     PATH nvarchar(1024) NOT NULL,
-    PARENT_POOL_ID bigint,
     ALLOC_FRACTION DOUBLE,
     QUERY_PARALLELISM int,
     SCHEDULING_POLICY nvarchar(1024)
@@ -26,8 +25,9 @@ CREATE TABLE WM_POOL
 ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_PK PRIMARY KEY (POOL_ID);
 
 CREATE UNIQUE INDEX UNIQUE_WM_POOL ON WM_POOL (RP_ID, "NAME");
+
 ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_FK1 FOREIGN KEY (RP_ID) REFERENCES WM_RESOURCEPLAN (RP_ID);
-ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_FK2 FOREIGN KEY (PARENT_POOL_ID) REFERENCES WM_POOL (POOL_ID);
+
 ALTER TABLE WM_RESOURCEPLAN ADD CONSTRAINT WM_RESOURCEPLAN_FK1 FOREIGN KEY (DEFAULT_POOL_ID) REFERENCES WM_POOL (POOL_ID);
 
 CREATE TABLE WM_TRIGGER

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql b/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
index 70e1267..26c82af 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
@@ -613,7 +613,6 @@ CREATE TABLE WM_POOL
     POOL_ID bigint NOT NULL,
     RP_ID bigint NOT NULL,
     PATH nvarchar(1024) NOT NULL,
-    PARENT_POOL_ID bigint,
     ALLOC_FRACTION DOUBLE,
     QUERY_PARALLELISM int,
     SCHEDULING_POLICY nvarchar(1024)
@@ -935,8 +934,6 @@ CREATE UNIQUE INDEX UNIQUE_WM_MAPPING ON WM_MAPPING (RP_ID, ENTITY_TYPE, ENTITY_
 
 ALTER TABLE WM_MAPPING ADD CONSTRAINT WM_MAPPING_FK1 FOREIGN KEY (RP_ID) REFERENCES WM_RESOURCEPLAN (RP_ID);
 
-ALTER TABLE WM_MAPPING ADD CONSTRAINT WM_MAPPING_FK2 FOREIGN KEY (POOL_ID) REFERENCES WM_POOL (POOL_ID);
-
 
 -- -----------------------------------------------------------------------------------------------------------------------------------------------
 -- Transaction and Lock Tables

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql b/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
index cff0b85..34fcfe6 100644
--- a/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
+++ b/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
@@ -13,14 +13,12 @@ CREATE TABLE IF NOT EXISTS WM_POOL
     `POOL_ID` bigint(20) NOT NULL,
     `RP_ID` bigint(20) NOT NULL,
     `PATH` varchar(767) NOT NULL,
-    `PARENT_POOL_ID` bigint(20),
     `ALLOC_FRACTION` DOUBLE,
     `QUERY_PARALLELISM` int(11),
     `SCHEDULING_POLICY` varchar(767),
     PRIMARY KEY (`POOL_ID`),
     KEY `UNIQUE_WM_POOL` (`RP_ID`, `PATH`),
-    CONSTRAINT `WM_POOL_FK1` FOREIGN KEY (`RP_ID`) REFERENCES `WM_RESOURCEPLAN` (`RP_ID`),
-    CONSTRAINT `WM_POOL_FK2` FOREIGN KEY (`PARENT_POOL_ID`) REFERENCES `WM_POOL` (`POOL_ID`)
+    CONSTRAINT `WM_POOL_FK1` FOREIGN KEY (`RP_ID`) REFERENCES `WM_RESOURCEPLAN` (`RP_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
 ALTER TABLE `WM_RESOURCEPLAN` ADD CONSTRAINT `WM_RESOURCEPLAN_FK1` FOREIGN KEY (`DEFAULT_POOL_ID`) REFERENCES `WM_POOL`(`POOL_ID`);

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql b/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
index 9b66e83..ec95c17 100644
--- a/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
+++ b/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
@@ -864,14 +864,12 @@ CREATE TABLE IF NOT EXISTS WM_POOL
     `POOL_ID` bigint(20) NOT NULL,
     `RP_ID` bigint(20) NOT NULL,
     `PATH` varchar(767) NOT NULL,
-    `PARENT_POOL_ID` bigint(20),
     `ALLOC_FRACTION` DOUBLE,
     `QUERY_PARALLELISM` int(11),
     `SCHEDULING_POLICY` varchar(767),
     PRIMARY KEY (`POOL_ID`),
     KEY `UNIQUE_WM_POOL` (`RP_ID`, `PATH`),
-    CONSTRAINT `WM_POOL_FK1` FOREIGN KEY (`RP_ID`) REFERENCES `WM_RESOURCEPLAN` (`RP_ID`),
-    CONSTRAINT `WM_POOL_FK2` FOREIGN KEY (`PARENT_POOL_ID`) REFERENCES `WM_POOL` (`POOL_ID`)
+    CONSTRAINT `WM_POOL_FK1` FOREIGN KEY (`RP_ID`) REFERENCES `WM_RESOURCEPLAN` (`RP_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
 ALTER TABLE `WM_RESOURCEPLAN` ADD CONSTRAINT `WM_RESOURCEPLAN_FK1` FOREIGN KEY (`DEFAULT_POOL_ID`) REFERENCES `WM_POOL`(`POOL_ID`);

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/oracle/046-HIVE-17566.oracle.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/oracle/046-HIVE-17566.oracle.sql b/metastore/scripts/upgrade/oracle/046-HIVE-17566.oracle.sql
index ceab459..596bb60 100644
--- a/metastore/scripts/upgrade/oracle/046-HIVE-17566.oracle.sql
+++ b/metastore/scripts/upgrade/oracle/046-HIVE-17566.oracle.sql
@@ -17,7 +17,6 @@ CREATE TABLE WM_POOL
     POOL_ID bigint NOT NULL,
     RP_ID bigint NOT NULL,
     PATH nvarchar(1024) NOT NULL,
-    PARENT_POOL_ID bigint,
     ALLOC_FRACTION DOUBLE,
     QUERY_PARALLELISM int,
     SCHEDULING_POLICY nvarchar(1024)
@@ -27,7 +26,6 @@ ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_PK PRIMARY KEY (POOL_ID);
 
 CREATE UNIQUE INDEX UNIQUE_WM_POOL ON WM_POOL (RP_ID, "NAME");
 ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_FK1 FOREIGN KEY (RP_ID) REFERENCES WM_RESOURCEPLAN (RP_ID);
-ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_FK2 FOREIGN KEY (PARENT_POOL_ID) REFERENCES WM_POOL (POOL_ID);
 
 ALTER TABLE WM_RESOURCEPLAN ADD CONSTRAINT WM_RESOURCEPLAN_FK1 FOREIGN KEY (DEFAULT_POOL_ID) REFERENCES WM_POOL (POOL_ID);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/oracle/hive-schema-3.0.0.oracle.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/oracle/hive-schema-3.0.0.oracle.sql b/metastore/scripts/upgrade/oracle/hive-schema-3.0.0.oracle.sql
index de55e70..65c72af 100644
--- a/metastore/scripts/upgrade/oracle/hive-schema-3.0.0.oracle.sql
+++ b/metastore/scripts/upgrade/oracle/hive-schema-3.0.0.oracle.sql
@@ -594,7 +594,6 @@ CREATE TABLE WM_POOL
     POOL_ID bigint NOT NULL,
     RP_ID bigint NOT NULL,
     PATH nvarchar(1024) NOT NULL,
-    PARENT_POOL_ID bigint,
     ALLOC_FRACTION DOUBLE,
     QUERY_PARALLELISM int,
     SCHEDULING_POLICY nvarchar(1024)
@@ -878,8 +877,6 @@ ALTER TABLE WM_RESOURCEPLAN ADD CONSTRAINT WM_RESOURCEPLAN_FK1 FOREIGN KEY (DEFA
 
 ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_FK1 FOREIGN KEY (RP_ID) REFERENCES WM_RESOURCEPLAN (RP_ID);
 
-ALTER TABLE WM_POOL ADD CONSTRAINT WM_POOL_FK2 FOREIGN KEY (PARENT_POOL_ID) REFERENCES WM_POOL (POOL_ID);
-
 CREATE UNIQUE INDEX UNIQUE_WM_TRIGGER ON WM_TRIGGER (RP_ID, "NAME");
 
 ALTER TABLE WM_TRIGGER ADD CONSTRAINT WM_TRIGGER_FK1 FOREIGN KEY (RP_ID) REFERENCES WM_RESOURCEPLAN (RP_ID);

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/postgres/045-HIVE-17566.postgres.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/postgres/045-HIVE-17566.postgres.sql b/metastore/scripts/upgrade/postgres/045-HIVE-17566.postgres.sql
index 07fb6b7..bd588c4 100644
--- a/metastore/scripts/upgrade/postgres/045-HIVE-17566.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/045-HIVE-17566.postgres.sql
@@ -17,7 +17,6 @@ CREATE TABLE "WM_POOL" (
     "POOL_ID" bigint NOT NULL,
     "RP_ID" bigint NOT NULL,
     "PATH" character varying(1024) NOT NULL,
-    "PARENT_POOL_ID" bigint,
     "ALLOC_FRACTION" DOUBLE,
     "QUERY_PARALLELISM" integer,
     "SCHEDULING_POLICY" character varying(1024)
@@ -31,8 +30,6 @@ ALTER TABLE ONLY "WM_POOL"
 
 ALTER TABLE ONLY "WM_POOL"
     ADD CONSTRAINT "WM_POOL_FK1" FOREIGN KEY ("RP_ID") REFERENCES "WM_RESOURCEPLAN" ("RP_ID") DEFERRABLE;
-ALTER TABLE ONLY "WM_POOL"
-    ADD CONSTRAINT "WM_POOL_FK2" FOREIGN KEY ("PARENT_POOL_ID") REFERENCES "WM_POOL" ("POOL_ID") DEFERRABLE;
 
 ALTER TABLE ONLY "WM_RESOURCEPLAN"
     ADD CONSTRAINT "WM_RESOURCEPLAN_FK1" FOREIGN KEY ("DEFAULT_POOL_ID") REFERENCES "WM_POOL" ("POOL_ID") DEFERRABLE;

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/scripts/upgrade/postgres/hive-schema-3.0.0.postgres.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/postgres/hive-schema-3.0.0.postgres.sql b/metastore/scripts/upgrade/postgres/hive-schema-3.0.0.postgres.sql
index 23626c0..931d3e6 100644
--- a/metastore/scripts/upgrade/postgres/hive-schema-3.0.0.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/hive-schema-3.0.0.postgres.sql
@@ -631,7 +631,6 @@ CREATE TABLE "WM_POOL" (
     "POOL_ID" bigint NOT NULL,
     "RP_ID" bigint NOT NULL,
     "PATH" character varying(1024) NOT NULL,
-    "PARENT_POOL_ID" bigint,
     "ALLOC_FRACTION" DOUBLE,
     "QUERY_PARALLELISM" integer,
     "SCHEDULING_POLICY" character varying(1024)

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index b5a9b79..4a32704 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -2724,4 +2724,59 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
     request.setResourcePlanName(resourcePlan);
     return client.get_triggers_for_resourceplan(request).getTriggers();
   }
+
+  @Override
+  public void createWMPool(WMPool pool)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException {
+    WMCreatePoolRequest request = new WMCreatePoolRequest();
+    request.setPool(pool);
+    client.create_wm_pool(request);
+  }
+
+  @Override
+  public void alterWMPool(WMPool pool, String poolPath)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException {
+    WMAlterPoolRequest request = new WMAlterPoolRequest();
+    request.setPool(pool);
+    request.setPoolPath(poolPath);
+    client.alter_wm_pool(request);
+  }
+
+  @Override
+  public void dropWMPool(String resourcePlanName, String poolPath)
+      throws NoSuchObjectException, MetaException, TException {
+    WMDropPoolRequest request = new WMDropPoolRequest();
+    request.setResourcePlanName(resourcePlanName);
+    request.setPoolPath(poolPath);
+    client.drop_wm_pool(request);
+  }
+
+  @Override
+  public void createOrUpdateWMMapping(WMMapping mapping, boolean isUpdate)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException {
+    WMCreateOrUpdateMappingRequest request = new WMCreateOrUpdateMappingRequest();
+    request.setMapping(mapping);
+    request.setUpdate(isUpdate);
+    client.create_or_update_wm_mapping(request);
+  }
+
+  @Override
+  public void dropWMMapping(WMMapping mapping)
+      throws NoSuchObjectException, MetaException, TException {
+    WMDropMappingRequest request = new WMDropMappingRequest();
+    request.setMapping(mapping);
+    client.drop_wm_mapping(request);
+  }
+
+  @Override
+  public void createOrDropTriggerToPoolMapping(String resourcePlanName, String triggerName,
+      String poolPath, boolean shouldDrop) throws AlreadyExistsException, NoSuchObjectException,
+      InvalidObjectException, MetaException, TException {
+    WMCreateOrDropTriggerToPoolMappingRequest request = new WMCreateOrDropTriggerToPoolMappingRequest();
+    request.setResourcePlanName(resourcePlanName);
+    request.setTriggerName(triggerName);
+    request.setPoolPath(poolPath);
+    request.setDrop(shouldDrop);
+    client.create_or_drop_wm_trigger_to_pool_mapping(request);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 2cb255e..0020136 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -107,6 +107,8 @@ import org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.metastore.api.WMPool;
 import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.thrift.TException;
 
@@ -1802,4 +1804,23 @@ public interface IMetaStoreClient {
 
   List<WMTrigger> getTriggersForResourcePlan(String resourcePlan)
       throws NoSuchObjectException, MetaException, TException;
+
+  void createWMPool(WMPool pool)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException;
+
+  void alterWMPool(WMPool pool, String poolPath)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException;
+
+  void dropWMPool(String resourcePlanName, String poolPath)
+      throws NoSuchObjectException, MetaException, TException;
+
+  void createOrUpdateWMMapping(WMMapping mapping, boolean isUpdate)
+      throws NoSuchObjectException, InvalidObjectException, MetaException, TException;
+
+  void dropWMMapping(WMMapping mapping)
+      throws NoSuchObjectException, MetaException, TException;
+
+  void createOrDropTriggerToPoolMapping(String resourcePlanName, String triggerName,
+      String poolPath, boolean shouldDrop) throws AlreadyExistsException, NoSuchObjectException,
+      InvalidObjectException, MetaException, TException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index 95aeb25..a0a6e181 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -70,6 +70,8 @@ import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.metastore.api.WMPool;
 import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.thrift.TException;
 
@@ -942,7 +944,7 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
 
   @Override
   public void createResourcePlan(WMResourcePlan resourcePlan, int defaultPoolSize)
-      throws AlreadyExistsException, MetaException {
+      throws AlreadyExistsException, InvalidObjectException, MetaException {
     objectStore.createResourcePlan(resourcePlan, defaultPoolSize);
   }
 
@@ -1004,4 +1006,48 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
       throws NoSuchObjectException, MetaException {
     return objectStore.getTriggersForResourcePlan(resourcePlanName);
   }
+
+  @Override
+  public void createPool(WMPool pool) throws AlreadyExistsException, NoSuchObjectException,
+      InvalidOperationException, MetaException {
+    objectStore.createPool(pool);
+  }
+
+  @Override
+  public void alterPool(WMPool pool, String poolPath) throws AlreadyExistsException,
+      NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.alterPool(pool, poolPath);
+  }
+
+  @Override
+  public void dropWMPool(String resourcePlanName, String poolPath)
+      throws NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.dropWMPool(resourcePlanName, poolPath);
+  }
+
+  @Override
+  public void createOrUpdateWMMapping(WMMapping mapping, boolean update)
+      throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException,
+      MetaException {
+    objectStore.createOrUpdateWMMapping(mapping, update);
+  }
+
+  @Override
+  public void dropWMMapping(WMMapping mapping)
+      throws NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.dropWMMapping(mapping);
+  }
+
+  @Override
+  public void createWMTriggerToPoolMapping(String resourcePlanName, String triggerName,
+      String poolPath) throws AlreadyExistsException, NoSuchObjectException,
+      InvalidOperationException, MetaException {
+    objectStore.createWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath);
+  }
+
+  @Override
+  public void dropWMTriggerToPoolMapping(String resourcePlanName, String triggerName,
+      String poolPath) throws NoSuchObjectException, InvalidOperationException, MetaException {
+    objectStore.dropWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 17640f3..4076a9f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -21,6 +21,13 @@ package org.apache.hadoop.hive.ql.exec;
 import static org.apache.commons.lang.StringUtils.join;
 import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
 
+import java.util.concurrent.ExecutionException;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.util.concurrent.ListenableFuture;
+
 import java.io.BufferedWriter;
 import java.io.DataOutputStream;
 import java.io.FileNotFoundException;
@@ -48,7 +55,6 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
-import java.util.concurrent.ExecutionException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -104,7 +110,6 @@ import org.apache.hadoop.hive.metastore.api.TxnInfo;
 import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.metastore.txn.TxnStore;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
@@ -177,6 +182,7 @@ import org.apache.hadoop.hive.ql.plan.CacheMetadataDesc;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
+import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMMappingDesc;
 import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
@@ -190,6 +196,8 @@ import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
 import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc;
 import org.apache.hadoop.hive.ql.plan.DropTableDesc;
+import org.apache.hadoop.hive.ql.plan.DropWMMappingDesc;
+import org.apache.hadoop.hive.ql.plan.DropWMPoolDesc;
 import org.apache.hadoop.hive.ql.plan.DropWMTriggerDesc;
 import org.apache.hadoop.hive.ql.plan.FileMergeDesc;
 import org.apache.hadoop.hive.ql.plan.GrantDesc;
@@ -232,6 +240,8 @@ import org.apache.hadoop.hive.ql.plan.TezWork;
 import org.apache.hadoop.hive.ql.plan.TruncateTableDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
+import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc;
+import org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator;
@@ -275,11 +285,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.stringtemplate.v4.ST;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.ListenableFuture;
-
 /**
  * DDLTask implementation.
  *
@@ -648,10 +653,29 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
         return dropWMTrigger(db, work.getDropWMTriggerDesc());
       }
 
+      if (work.getWmPoolDesc() != null) {
+        return createOrAlterWMPool(db, work.getWmPoolDesc());
+      }
+
+      if (work.getDropWMPoolDesc() != null) {
+        return dropWMPool(db, work.getDropWMPoolDesc());
+      }
+
+      if (work.getWmMappingDesc() != null) {
+        return createOrAlterWMMapping(db, work.getWmMappingDesc());
+      }
+
+      if (work.getDropWMMappingDesc() != null) {
+        return dropWMMapping(db, work.getDropWMMappingDesc());
+      }
+
+      if (work.getTriggerToPoolMappingDesc() != null) {
+        return createOrDropTriggerToPoolMapping(db, work.getTriggerToPoolMappingDesc());
+      }
+
       if (work.getAlterMaterializedViewDesc() != null) {
         return alterMaterializedView(db, work.getAlterMaterializedViewDesc());
       }
-
     } catch (Throwable e) {
       failed(e);
       return 1;
@@ -662,12 +686,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
 
   private int createResourcePlan(Hive db, CreateResourcePlanDesc createResourcePlanDesc)
       throws HiveException {
-    WMResourcePlan resourcePlan = new WMResourcePlan();
-    resourcePlan.setName(createResourcePlanDesc.getName());
-    if (createResourcePlanDesc.getQueryParallelism() != null) {
-      resourcePlan.setQueryParallelism(createResourcePlanDesc.getQueryParallelism());
-    }
-    db.createResourcePlan(resourcePlan);
+    db.createResourcePlan(createResourcePlanDesc.getResourcePlan());
     return 0;
   }
 
@@ -694,42 +713,29 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
 
   private int alterResourcePlan(Hive db, AlterResourcePlanDesc desc) throws HiveException {
     if (desc.shouldValidate()) {
-      return db.validateResourcePlan(desc.getRpName()) ? 0 : 1;
-    }
-
-    WMResourcePlan resourcePlan = new WMResourcePlan();
-    if (desc.getNewName() != null) {
-      resourcePlan.setName(desc.getNewName());
-    } else {
-      resourcePlan.setName(desc.getRpName());
-    }
-
-    if (desc.getQueryParallelism() != null) {
-      resourcePlan.setQueryParallelism(desc.getQueryParallelism());
-    }
-
-    if (desc.getDefaultPoolPath() != null) {
-      resourcePlan.setDefaultPoolPath(desc.getDefaultPoolPath());
+      return db.validateResourcePlan(desc.getResourcePlanName()) ? 0 : 1;
     }
 
+    WMResourcePlan resourcePlan = desc.getResourcePlan();
     final WorkloadManager wm = WorkloadManager.getInstance();
     final TezSessionPoolManager pm = TezSessionPoolManager.getInstance();
     boolean isActivate = false, isInTest = HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST);
-    if (desc.getStatus() != null) {
-      resourcePlan.setStatus(desc.getStatus());
-      isActivate = desc.getStatus() == WMResourcePlanStatus.ACTIVE;
+    if (resourcePlan.getStatus() != null) {
+      resourcePlan.setStatus(resourcePlan.getStatus());
+      isActivate = resourcePlan.getStatus() == WMResourcePlanStatus.ACTIVE;
     }
 
     WMFullResourcePlan appliedRp = db.alterResourcePlan(
-      desc.getRpName(), resourcePlan, desc.isEnableActivate());
+      desc.getResourcePlanName(), resourcePlan, desc.isEnableActivate());
     if (!isActivate || (wm == null && isInTest) || (pm == null && isInTest)) {
       return 0;
     }
+
     if (appliedRp == null) {
       throw new HiveException("Cannot get a resource plan to apply");
       // TODO: shut down HS2?
     }
-    final String name = (desc.getNewName() != null) ? desc.getNewName() : desc.getRpName();
+    final String name = resourcePlan.getName();
     LOG.info("Activating a new resource plan " + name + ": " + appliedRp);
     if (wm != null) {
       // Note: as per our current constraints, the behavior of two parallel activates is
@@ -765,18 +771,12 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
   }
 
   private int createWMTrigger(Hive db, CreateWMTriggerDesc desc) throws HiveException {
-    WMTrigger trigger = new WMTrigger(desc.getRpName(), desc.getTriggerName());
-    trigger.setTriggerExpression(desc.getTriggerExpression());
-    trigger.setActionExpression(desc.getActionExpression());
-    db.createWMTrigger(trigger);
+    db.createWMTrigger(desc.getTrigger());
     return 0;
   }
 
   private int alterWMTrigger(Hive db, AlterWMTriggerDesc desc) throws HiveException {
-    WMTrigger trigger = new WMTrigger(desc.getRpName(), desc.getTriggerName());
-    trigger.setTriggerExpression(desc.getTriggerExpression());
-    trigger.setActionExpression(desc.getActionExpression());
-    db.alterWMTrigger(trigger);
+    db.alterWMTrigger(desc.getTrigger());
     return 0;
   }
 
@@ -785,6 +785,37 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
     return 0;
   }
 
+  private int createOrAlterWMPool(Hive db, CreateOrAlterWMPoolDesc desc) throws HiveException {
+    if (desc.isUpdate()) {
+      db.alterWMPool(desc.getPool(), desc.getPoolPath());
+    } else {
+      db.createWMPool(desc.getPool());
+    }
+    return 0;
+  }
+
+  private int dropWMPool(Hive db, DropWMPoolDesc desc) throws HiveException {
+    db.dropWMPool(desc.getResourcePlanName(), desc.getPoolPath());
+    return 0;
+  }
+
+  private int createOrAlterWMMapping(Hive db, CreateOrAlterWMMappingDesc desc) throws HiveException {
+    db.createOrUpdateWMMapping(desc.getMapping(), desc.isUpdate());
+    return 0;
+  }
+
+  private int dropWMMapping(Hive db, DropWMMappingDesc desc) throws HiveException {
+    db.dropWMMapping(desc.getMapping());
+    return 0;
+  }
+
+  private int createOrDropTriggerToPoolMapping(Hive db, CreateOrDropTriggerToPoolMappingDesc desc)
+      throws HiveException {
+    db.createOrDropTriggerToPoolMapping(desc.getResourcePlanName(), desc.getTriggerName(),
+        desc.getPoolPath(), desc.shouldDrop());
+    return 0;
+  }
+
   private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException {
     try{
       HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook();

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/UserPoolMapping.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/UserPoolMapping.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/UserPoolMapping.java
index cd232a0..33ee8f7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/UserPoolMapping.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/UserPoolMapping.java
@@ -99,7 +99,7 @@ class UserPoolMapping {
   }
 
   private static void addMapping(WMMapping mapping, Map<String, Mapping> map, String text) {
-    Mapping val = new Mapping(mapping.getPoolName(), mapping.getOrdering());
+    Mapping val = new Mapping(mapping.getPoolPath(), mapping.getOrdering());
     Mapping oldValue = map.put(mapping.getEntityName(), val);
     if (oldValue != null) {
       throw new AssertionError("Duplicate mapping for " + text + " " + mapping.getEntityName()
@@ -120,4 +120,4 @@ class UserPoolMapping {
     if (mapping != null) return mapping.fullPoolName;
     return defaultPoolPath;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 3e9fff1..1a37bf7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -134,6 +134,8 @@ import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest;
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.metastore.api.WMPool;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
@@ -4812,4 +4814,55 @@ private void constructOneLBLocationMap(FileStatus fSta,
       throw new HiveException(e);
     }
   }
+
+  public void createWMPool(WMPool pool) throws HiveException {
+    try {
+      getMSC().createWMPool(pool);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+  public void alterWMPool(WMPool pool, String poolPath) throws HiveException {
+    try {
+      getMSC().alterWMPool(pool, poolPath);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+  public void dropWMPool(String resourcePlanName, String poolPath) throws HiveException {
+    try {
+      getMSC().dropWMPool(resourcePlanName, poolPath);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+  public void createOrUpdateWMMapping(WMMapping mapping, boolean isUpdate)
+      throws HiveException {
+    try {
+      getMSC().createOrUpdateWMMapping(mapping, isUpdate);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+  public void dropWMMapping(WMMapping mapping) throws HiveException {
+    try {
+      getMSC().dropWMMapping(mapping);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+
+  public void createOrDropTriggerToPoolMapping(String resourcePlanName, String triggerName,
+      String poolPath, boolean shouldDrop) throws HiveException {
+    try {
+      getMSC().createOrDropTriggerToPoolMapping(resourcePlanName, triggerName, poolPath, shouldDrop);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
 };

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 1fd634c..e5e1b53 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -48,7 +48,11 @@ import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.metastore.api.WMPool;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryState;
@@ -101,6 +105,7 @@ import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
 import org.apache.hadoop.hive.ql.plan.StatsWork;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
+import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMMappingDesc;
 import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc;
 import org.apache.hadoop.hive.ql.plan.CreateWMTriggerDesc;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
@@ -111,6 +116,8 @@ import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
 import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc;
 import org.apache.hadoop.hive.ql.plan.DropTableDesc;
+import org.apache.hadoop.hive.ql.plan.DropWMMappingDesc;
+import org.apache.hadoop.hive.ql.plan.DropWMPoolDesc;
 import org.apache.hadoop.hive.ql.plan.DropWMTriggerDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -149,6 +156,8 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.TruncateTableDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
+import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc;
+import org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -565,10 +574,10 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
    case HiveParser.TOK_CACHE_METADATA:
      analyzeCacheMetadata(ast);
      break;
-   case HiveParser.TOK_CREATERESOURCEPLAN:
+   case HiveParser.TOK_CREATE_RP:
      analyzeCreateResourcePlan(ast);
      break;
-   case HiveParser.TOK_SHOWRESOURCEPLAN:
+   case HiveParser.TOK_SHOW_RP:
      ctx.setResFile(ctx.getLocalTmpPath());
      analyzeShowResourcePlan(ast);
      break;
@@ -587,6 +596,24 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
    case HiveParser.TOK_DROP_TRIGGER:
      analyzeDropTrigger(ast);
      break;
+   case HiveParser.TOK_CREATE_POOL:
+     analyzeCreatePool(ast);
+     break;
+   case HiveParser.TOK_ALTER_POOL:
+     analyzeAlterPool(ast);
+     break;
+   case HiveParser.TOK_DROP_POOL:
+     analyzeDropPool(ast);
+     break;
+   case HiveParser.TOK_CREATE_MAPPING:
+     analyzeCreateOrAlterMapping(ast, false);
+     break;
+   case HiveParser.TOK_ALTER_MAPPING:
+     analyzeCreateOrAlterMapping(ast, true);
+     break;
+   case HiveParser.TOK_DROP_MAPPING:
+     analyzeDropMapping(ast);
+     break;
    default:
       throw new SemanticException("Unsupported command: " + ast);
     }
@@ -872,11 +899,17 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     }
     String resourcePlanName = unescapeIdentifier(ast.getChild(0).getText());
     Integer queryParallelism = null;
-    if (ast.getChildCount() > 1) {
-      queryParallelism = Integer.parseInt(ast.getChild(1).getText());
-    }
-    if (ast.getChildCount() > 2) {
-      throw new SemanticException("Invalid token in CREATE RESOURCE PLAN statement");
+    for (int i = 1; i < ast.getChildCount(); ++i) {
+      Tree child = ast.getChild(i);
+      if (child.getType() == HiveParser.TOK_QUERY_PARALLELISM) {
+        if (queryParallelism == null) {
+          queryParallelism = Integer.parseInt(child.getChild(0).getText());
+        } else {
+          throw new SemanticException("QUERY_PARALLELISM should be set only once.");
+        }
+      } else {
+        throw new SemanticException("Invalid set in create resource plan: " + child.getText());
+      }
     }
     CreateResourcePlanDesc desc = new CreateResourcePlanDesc(resourcePlanName, queryParallelism);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
@@ -901,71 +934,56 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       throw new SemanticException("Invalid syntax for ALTER RESOURCE PLAN statement");
     }
     String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    AlterResourcePlanDesc desc = null;
+    WMResourcePlan resourcePlan = new WMResourcePlan(rpName);
+    boolean isEnableActive = false;
+    boolean validate = false;
     for (int i = 1; i < ast.getChildCount(); ++i) {
       Tree child = ast.getChild(i);
       switch (child.getType()) {
       case HiveParser.TOK_VALIDATE:
-        if (desc != null) throw new SemanticException("Invalid ALTER VALIDATE command");
-        desc = AlterResourcePlanDesc.createValidatePlan(rpName);
+        validate = true;
         break;
       case HiveParser.TOK_ACTIVATE:
-        if (desc == null) {
-          desc = AlterResourcePlanDesc.createChangeStatus(rpName, WMResourcePlanStatus.ACTIVE);
-        } else if (desc.getStatus() == WMResourcePlanStatus.ENABLED) {
-          desc.setIsEnableActivate(true);
-          desc.setStatus(WMResourcePlanStatus.ACTIVE);
-        } else {
-          throw new SemanticException("Invalid ALTER ACTIVATE command");
+        if (resourcePlan.getStatus() == WMResourcePlanStatus.ENABLED) {
+          isEnableActive = true;
         }
+        resourcePlan.setStatus(WMResourcePlanStatus.ACTIVE);
         break;
       case HiveParser.TOK_ENABLE:
-        if (desc == null) {
-          desc = AlterResourcePlanDesc.createChangeStatus(rpName, WMResourcePlanStatus.ENABLED);
-        } else if (desc.getStatus() == WMResourcePlanStatus.ACTIVE) {
-          desc.setIsEnableActivate(true);
+        if (resourcePlan.getStatus() == WMResourcePlanStatus.ACTIVE) {
+          isEnableActive = true;
         } else {
-          throw new SemanticException("Invalid ALTER ENABLE command");
+          resourcePlan.setStatus(WMResourcePlanStatus.ENABLED);
         }
         break;
       case HiveParser.TOK_DISABLE:
-        if (desc != null) throw new SemanticException("Invalid ALTER DISABLE command");
-        desc = AlterResourcePlanDesc.createChangeStatus(rpName, WMResourcePlanStatus.DISABLED);
+        resourcePlan.setStatus(WMResourcePlanStatus.DISABLED);
         break;
       case HiveParser.TOK_QUERY_PARALLELISM:
         if (child.getChildCount() != 1) {
           throw new SemanticException("Expected one argument");
         }
-        if (desc == null) {
-          desc = AlterResourcePlanDesc.createSet(rpName);
-        }
-        desc.setQueryParallelism(Integer.parseInt(child.getChild(0).getText()));
+        resourcePlan.setQueryParallelism(Integer.parseInt(child.getChild(0).getText()));
         break;
       case HiveParser.TOK_DEFAULT_POOL:
         if (child.getChildCount() != 1) {
           throw new SemanticException("Expected one argument");
         }
-        if (desc == null) {
-          desc = AlterResourcePlanDesc.createSet(rpName);
-        }
-        desc.setDefaultPoolPath(child.getChild(0).getText());
+        resourcePlan.setDefaultPoolPath(poolPath(child.getChild(0)));
         break;
       case HiveParser.TOK_RENAME:
-        if (desc != null) throw new SemanticException("Invalid ALTER RENAME command");
         if (ast.getChildCount() == (i + 1)) {
           throw new SemanticException("Expected an argument");
         }
-        if (desc == null) {
-          desc = AlterResourcePlanDesc.createSet(rpName);
-        }
-        desc.setNewName(ast.getChild(++i).getText());
+        resourcePlan.setName(unescapeIdentifier(ast.getChild(++i).getText()));
         break;
       default:
         throw new SemanticException(
           "Unexpected token in alter resource plan statement: " + child.getType());
       }
     }
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+        new AlterResourcePlanDesc(resourcePlan, rpName, validate, isEnableActive)), conf));
   }
 
   private void analyzeDropResourcePlan(ASTNode ast) throws SemanticException {
@@ -987,10 +1005,12 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     String triggerExpression = buildTriggerExpression((ASTNode)ast.getChild(2));
     String actionExpression = buildTriggerActionExpression((ASTNode)ast.getChild(3));
 
-    CreateWMTriggerDesc desc =
-        new CreateWMTriggerDesc(rpName, triggerName, triggerExpression, actionExpression);
-    rootTasks.add(TaskFactory.get(
-        new DDLWork(getInputs(), getOutputs(), desc), conf));
+    WMTrigger trigger = new WMTrigger(rpName, triggerName);
+    trigger.setTriggerExpression(triggerExpression);
+    trigger.setActionExpression(actionExpression);
+
+    CreateWMTriggerDesc desc = new CreateWMTriggerDesc(trigger);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
   }
 
   private String buildTriggerExpression(ASTNode ast) throws SemanticException {
@@ -1006,11 +1026,12 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     return builder.toString();
   }
 
-  private String poolPath(ASTNode ast) {
+  private String poolPath(Tree ast) {
     StringBuilder builder = new StringBuilder();
-    builder.append(ast.getText());
+    builder.append(unescapeIdentifier(ast.getText()));
     for (int i = 0; i < ast.getChildCount(); ++i) {
-      builder.append(ast.getChild(i).getText());
+      // DOT is not affected
+      builder.append(unescapeIdentifier(ast.getChild(i).getText()));
     }
     return builder.toString();
   }
@@ -1023,7 +1044,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       if (ast.getChildCount() != 1) {
         throw new SemanticException("Invalid move to clause in trigger action.");
       }
-      String poolPath = poolPath((ASTNode)ast.getChild(0));
+      String poolPath = poolPath(ast.getChild(0));
       return "MOVE TO " + poolPath;
     default:
       throw new SemanticException("Unknown token in action clause: " + ast.getType());
@@ -1039,24 +1060,145 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     String triggerExpression = buildTriggerExpression((ASTNode)ast.getChild(2));
     String actionExpression = buildTriggerActionExpression((ASTNode)ast.getChild(3));
 
-    AlterWMTriggerDesc desc =
-        new AlterWMTriggerDesc(rpName, triggerName, triggerExpression, actionExpression);
-    rootTasks.add(TaskFactory.get(
-        new DDLWork(getInputs(), getOutputs(), desc), conf));
+    WMTrigger trigger = new WMTrigger(rpName, triggerName);
+    trigger.setTriggerExpression(triggerExpression);
+    trigger.setActionExpression(actionExpression);
+
+    AlterWMTriggerDesc desc = new AlterWMTriggerDesc(trigger);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
   }
 
   private void analyzeDropTrigger(ASTNode ast) throws SemanticException {
     if (ast.getChildCount() != 2) {
       throw new SemanticException("Invalid syntax for drop trigger.");
     }
-    String rpName = ast.getChild(0).getText();
-    String triggerName = ast.getChild(1).getText();
+    String rpName = unescapeIdentifier(ast.getChild(0).getText());
+    String triggerName = unescapeIdentifier(ast.getChild(1).getText());
 
     DropWMTriggerDesc desc = new DropWMTriggerDesc(rpName, triggerName);
     rootTasks.add(TaskFactory.get(
         new DDLWork(getInputs(), getOutputs(), desc), conf));
   }
 
+  private void analyzeCreatePool(ASTNode ast) throws SemanticException {
+    if (ast.getChildCount() != 5) {
+      throw new SemanticException("Invalid syntax for create pool.");
+    }
+    String rpName = unescapeIdentifier(ast.getChild(0).getText());
+    String poolPath = poolPath(ast.getChild(1));
+    WMPool pool = new WMPool(rpName, poolPath);
+    for (int i = 2; i < ast.getChildCount(); ++i) {
+      Tree child = ast.getChild(i);
+      if (child.getChildCount() != 1) {
+        throw new SemanticException("Expected 1 paramter for: " + child.getText());
+      }
+      String param = child.getChild(0).getText();
+      switch (child.getType()) {
+      case HiveParser.TOK_ALLOC_FRACTION:
+        pool.setAllocFraction(Double.parseDouble(param));
+        break;
+      case HiveParser.TOK_QUERY_PARALLELISM:
+        pool.setQueryParallelism(Integer.parseInt(param));
+        break;
+      case HiveParser.TOK_SCHEDULING_POLICY:
+        pool.setSchedulingPolicy(PlanUtils.stripQuotes(param));
+        break;
+      case HiveParser.TOK_PATH:
+        throw new SemanticException("Invalid parameter path in create pool");
+      }
+    }
+    if (!pool.isSetAllocFraction()) {
+      throw new SemanticException("alloc_fraction should be specified for a pool");
+    }
+    CreateOrAlterWMPoolDesc desc = new CreateOrAlterWMPoolDesc(pool, poolPath, false);
+    rootTasks.add(TaskFactory.get(
+        new DDLWork(getInputs(), getOutputs(), desc), conf));
+  }
+
+  private void analyzeAlterPool(ASTNode ast) throws SemanticException {
+    if (ast.getChildCount() < 3) {
+      throw new SemanticException("Invalid syntax for alter pool.");
+    }
+    String rpName = unescapeIdentifier(ast.getChild(0).getText());
+    String poolPath = poolPath(ast.getChild(1));
+    WMPool pool = new WMPool(rpName, poolPath);
+
+    for (int i = 2; i < ast.getChildCount(); ++i) {
+      Tree child = ast.getChild(i);
+      if (child.getChildCount() != 1) {
+        throw new SemanticException("Invalid syntax in alter pool expected parameter.");
+      }
+      Tree param = child.getChild(0);
+      switch (child.getType()) {
+        case HiveParser.TOK_ALLOC_FRACTION:
+          pool.setAllocFraction(Double.parseDouble(param.getText()));
+          break;
+        case HiveParser.TOK_QUERY_PARALLELISM:
+          pool.setQueryParallelism(Integer.parseInt(param.getText()));
+          break;
+        case HiveParser.TOK_SCHEDULING_POLICY:
+          pool.setSchedulingPolicy(PlanUtils.stripQuotes(param.getText()));
+          break;
+        case HiveParser.TOK_PATH:
+          pool.setPoolPath(poolPath(param));
+          break;
+        case HiveParser.TOK_ADD_TRIGGER:
+        case HiveParser.TOK_DROP_TRIGGER:
+          boolean drop = child.getType() == HiveParser.TOK_DROP_TRIGGER;
+          String triggerName = unescapeIdentifier(param.getText());
+          rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+              new CreateOrDropTriggerToPoolMappingDesc(rpName, triggerName, poolPath, drop)),
+              conf));
+          break;
+      }
+    }
+
+    CreateOrAlterWMPoolDesc desc = new CreateOrAlterWMPoolDesc(pool, poolPath, true);
+    rootTasks.add(TaskFactory.get(
+        new DDLWork(getInputs(), getOutputs(), desc), conf));
+  }
+
+  private void analyzeDropPool(ASTNode ast) throws SemanticException {
+    if (ast.getChildCount() != 2) {
+      throw new SemanticException("Invalid syntax for drop pool.");
+    }
+    String rpName = unescapeIdentifier(ast.getChild(0).getText());
+    String poolPath = poolPath(ast.getChild(1));
+
+    DropWMPoolDesc desc = new DropWMPoolDesc(rpName, poolPath);
+    rootTasks.add(TaskFactory.get(
+        new DDLWork(getInputs(), getOutputs(), desc), conf));
+  }
+
+  private void analyzeCreateOrAlterMapping(ASTNode ast, boolean update) throws SemanticException {
+    if (ast.getChildCount() < 4) {
+      throw new SemanticException("Invalid syntax for create or alter mapping.");
+    }
+    String rpName = unescapeIdentifier(ast.getChild(0).getText());
+    String entityType = ast.getChild(1).getText();
+    String entityName = PlanUtils.stripQuotes(ast.getChild(2).getText());
+    WMMapping mapping = new WMMapping(rpName, entityType, entityName);
+    mapping.setPoolPath(poolPath(ast.getChild(3)));
+    if (ast.getChildCount() == 5) {
+      mapping.setOrdering(Integer.valueOf(ast.getChild(4).getText()));
+    }
+
+    CreateOrAlterWMMappingDesc desc = new CreateOrAlterWMMappingDesc(mapping, update);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
+  }
+
+  private void analyzeDropMapping(ASTNode ast) throws SemanticException {
+    if (ast.getChildCount() != 3) {
+      throw new SemanticException("Invalid syntax for drop mapping.");
+    }
+    String rpName = unescapeIdentifier(ast.getChild(0).getText());
+    String entityType = ast.getChild(1).getText();
+    String entityName = PlanUtils.stripQuotes(ast.getChild(2).getText());
+
+    DropWMMappingDesc desc = new DropWMMappingDesc(new WMMapping(rpName, entityType, entityName));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
+  }
+
   private void analyzeCreateDatabase(ASTNode ast) throws SemanticException {
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
     boolean ifNotExists = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index d61fce9..1dcfe9d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -362,6 +362,10 @@ KW_DEFAULT: 'DEFAULT';
 KW_POOL: 'POOL';
 KW_MOVE: 'MOVE';
 KW_DO: 'DO';
+KW_ALLOC_FRACTION: 'ALLOC_FRACTION';
+KW_SCHEDULING_POLICY: 'SCHEDULING_POLICY';
+KW_PATH: 'PATH';
+KW_MAPPING: 'MAPPING';
 
 // Operators
 // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index e99d5fb..1378950 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -24,7 +24,7 @@ ASTLabelType=ASTNode;
 backtrack=false;
 k=3;
 }
-import SelectClauseParser, FromClauseParser, IdentifiersParser;
+import SelectClauseParser, FromClauseParser, IdentifiersParser, ResourcePlanParser;
 
 tokens {
 TOK_INSERT;
@@ -409,8 +409,8 @@ TOK_EXPRESSION;
 TOK_DETAIL;
 TOK_BLOCKING;
 TOK_KILL_QUERY;
-TOK_CREATERESOURCEPLAN;
-TOK_SHOWRESOURCEPLAN;
+TOK_CREATE_RP;
+TOK_SHOW_RP;
 TOK_ALTER_RP;
 TOK_DROP_RP;
 TOK_VALIDATE;
@@ -422,6 +422,16 @@ TOK_CREATE_TRIGGER;
 TOK_ALTER_TRIGGER;
 TOK_DROP_TRIGGER;
 TOK_TRIGGER_EXPRESSION;
+TOK_CREATE_POOL;
+TOK_ALTER_POOL;
+TOK_DROP_POOL;
+TOK_ALLOC_FRACTION;
+TOK_SCHEDULING_POLICY;
+TOK_PATH;
+TOK_CREATE_MAPPING;
+TOK_ALTER_MAPPING;
+TOK_DROP_MAPPING;
+TOK_ADD_TRIGGER;
 }
 
 
@@ -603,6 +613,9 @@ import org.apache.hadoop.hive.conf.HiveConf;
     xlateMap.put("KW_POOL", "POOL");
     xlateMap.put("KW_MOVE", "MOVE");
     xlateMap.put("KW_DO", "DO");
+    xlateMap.put("KW_ALLOC_FRACTION", "ALLOC_FRACTION");
+    xlateMap.put("KW_SCHEDULING_POLICY", "SCHEDULING_POLICY");
+    xlateMap.put("KW_PATH", "PATH");
 
     // Operators
     xlateMap.put("DOT", ".");
@@ -941,12 +954,7 @@ ddlStatement
     | showCurrentRole
     | abortTransactionStatement
     | killQueryStatement
-    | createResourcePlanStatement
-    | alterResourcePlanStatement
-    | dropResourcePlanStatement
-    | createTriggerStatement
-    | alterTriggerStatement
-    | dropTriggerStatement
+    | resourcePlanDdlStatements
     ;
 
 ifExists
@@ -1000,129 +1008,6 @@ orReplace
     -> ^(TOK_ORREPLACE)
     ;
 
-createResourcePlanStatement
-@init { pushMsg("create resource plan statement", state); }
-@after { popMsg(state); }
-    : KW_CREATE KW_RESOURCE KW_PLAN
-        name=identifier
-        (KW_WITH KW_QUERY_PARALLELISM parallelism=Number)?
-    -> ^(TOK_CREATERESOURCEPLAN $name $parallelism?)
-    ;
-
-alterRpSet
-@init { pushMsg("alterRpSet", state); }
-@after { popMsg(state); }
-  : (
-     (KW_QUERY_PARALLELISM EQUAL parallelism=Number -> ^(TOK_QUERY_PARALLELISM $parallelism))
-   | (KW_DEFAULT KW_POOL EQUAL poolName=StringLiteral -> ^(TOK_DEFAULT_POOL $poolName))
-    )
-  ;
-
-alterRpSetList
-@init { pushMsg("alterRpSetList", state); }
-@after { popMsg(state); }
-  :
-  alterRpSet (COMMA alterRpSet)* -> alterRpSet+
-  ;
-
-activate : KW_ACTIVATE -> ^(TOK_ACTIVATE);
-enable : KW_ENABLE -> ^(TOK_ENABLE);
-
-alterResourcePlanStatement
-@init { pushMsg("alter resource plan statement", state); }
-@after { popMsg(state); }
-    : KW_ALTER KW_RESOURCE KW_PLAN name=identifier (
-          (KW_VALIDATE -> ^(TOK_ALTER_RP $name TOK_VALIDATE))
-        | (KW_DISABLE -> ^(TOK_ALTER_RP $name TOK_DISABLE))
-        | (KW_SET setList=alterRpSetList -> ^(TOK_ALTER_RP $name $setList))
-        | (KW_RENAME KW_TO newName=identifier
-           -> ^(TOK_ALTER_RP $name TOK_RENAME $newName))
-        | ((activate+ enable? | enable+ activate?) -> ^(TOK_ALTER_RP $name activate? enable?))
-      )
-    ;
-
-dropResourcePlanStatement
-@init { pushMsg("drop resource plan statement", state); }
-@after { popMsg(state); }
-    : KW_DROP KW_RESOURCE KW_PLAN name=identifier
-    -> ^(TOK_DROP_RP $name)
-    ;
-
-poolPath
-@init { pushMsg("poolPath", state); }
-@after { popMsg(state); }
-    : identifier^ (DOT identifier)*
-    ;
-
-triggerExpression
-@init { pushMsg("triggerExpression", state); }
-@after { popMsg(state); }
-    : triggerOrExpression -> ^(TOK_TRIGGER_EXPRESSION triggerOrExpression)
-    ;
-
-triggerOrExpression
-@init { pushMsg("triggerOrExpression", state); }
-@after { popMsg(state); }
-    : triggerAndExpression (KW_OR triggerAndExpression)*
-    ;
-
-triggerAndExpression
-@init { pushMsg("triggerAndExpression", state); }
-@after { popMsg(state); }
-    : triggerAtomExpression (KW_AND triggerAtomExpression)*
-    ;
-
-triggerAtomExpression
-@init { pushMsg("triggerAtomExpression", state); }
-@after { popMsg(state); }
-    : (identifier comparisionOperator triggerLiteral)
-    | (LPAREN triggerOrExpression RPAREN)
-    ;
-
-triggerLiteral
-@init { pushMsg("triggerLiteral", state); }
-@after { popMsg(state); }
-    : (Number (KW_HOUR|KW_MINUTE|KW_SECOND)?)
-    | ByteLengthLiteral
-    | StringLiteral
-    ;
-
-comparisionOperator
-@init { pushMsg("comparisionOperator", state); }
-@after { popMsg(state); }
-    : EQUAL | LESSTHAN | LESSTHANOREQUALTO | GREATERTHAN | GREATERTHANOREQUALTO
-    ;
-
-triggerActionExpression
-@init { pushMsg("triggerActionExpression", state); }
-@after { popMsg(state); }
-    : KW_KILL
-    | (KW_MOVE^ KW_TO! poolPath)
-    ;
-
-createTriggerStatement
-@init { pushMsg("create trigger statement", state); }
-@after { popMsg(state); }
-    : KW_CREATE KW_TRIGGER rpName=identifier DOT triggerName=identifier
-      KW_WHEN triggerExpression KW_DO triggerActionExpression
-    -> ^(TOK_CREATE_TRIGGER $rpName $triggerName triggerExpression triggerActionExpression)
-    ;
-
-alterTriggerStatement
-@init { pushMsg("alter trigger statement", state); }
-@after { popMsg(state); }
-    : KW_ALTER KW_TRIGGER rpName=identifier DOT triggerName=identifier
-      KW_WHEN triggerExpression KW_DO triggerActionExpression
-    -> ^(TOK_ALTER_TRIGGER $rpName $triggerName triggerExpression triggerActionExpression)
-    ;
-
-dropTriggerStatement
-@init { pushMsg("drop trigger statement", state); }
-@after { popMsg(state); }
-    : KW_DROP KW_TRIGGER rpName=identifier DOT triggerName=identifier
-    -> ^(TOK_DROP_TRIGGER $rpName $triggerName)
-    ;
-
 createDatabaseStatement
 @init { pushMsg("create database statement", state); }
 @after { popMsg(state); }
@@ -1775,8 +1660,8 @@ showStatement
     | KW_SHOW KW_CONF StringLiteral -> ^(TOK_SHOWCONF StringLiteral)
     | KW_SHOW KW_RESOURCE
       (
-        (KW_PLAN rp_name=identifier -> ^(TOK_SHOWRESOURCEPLAN $rp_name))
-        | (KW_PLANS -> ^(TOK_SHOWRESOURCEPLAN))
+        (KW_PLAN rp_name=identifier -> ^(TOK_SHOW_RP $rp_name))
+        | (KW_PLANS -> ^(TOK_SHOW_RP))
       )
     ;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index a0eca4b..f1ca301 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -830,8 +830,8 @@ nonReserved
     | KW_ZONE
     | KW_TIMESTAMPTZ
     | KW_DEFAULT
-    | KW_POOL
-
+    | KW_RESOURCE | KW_PLAN | KW_PLANS | KW_QUERY_PARALLELISM | KW_ACTIVATE | KW_MOVE | KW_DO
+    | KW_POOL | KW_ALLOC_FRACTION | KW_SCHEDULING_POLICY | KW_PATH | KW_MAPPING
 ;
 
 //The following SQL2011 reserved keywords are used as function name only, but not as identifiers.

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
index d9a16a2..13b8d81 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
@@ -31,6 +31,9 @@ import org.antlr.runtime.tree.CommonTreeAdaptor;
 import org.antlr.runtime.tree.TreeAdaptor;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
+import com.sun.tools.extcheck.Main;
+
 import org.apache.hadoop.hive.ql.Context;
 
 /**

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
new file mode 100644
index 0000000..95c8725
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
@@ -0,0 +1,230 @@
+/**
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+*/
+parser grammar ResourcePlanParser;
+
+options
+{
+  output=AST;
+  ASTLabelType=ASTNode;
+  backtrack=false;
+  k=3;
+}
+
+resourcePlanDdlStatements
+    : createResourcePlanStatement
+    | alterResourcePlanStatement
+    | dropResourcePlanStatement
+    | createTriggerStatement
+    | alterTriggerStatement
+    | dropTriggerStatement
+    | createPoolStatement
+    | alterPoolStatement
+    | dropPoolStatement
+    | createMappingStatement
+    | alterMappingStatement
+    | dropMappingStatement
+    ;
+
+rpAssign
+@init { gParent.pushMsg("rpAssign", state); }
+@after { gParent.popMsg(state); }
+  : (
+      (KW_QUERY_PARALLELISM EQUAL parallelism=Number) -> ^(TOK_QUERY_PARALLELISM $parallelism)
+    | (KW_DEFAULT KW_POOL EQUAL poolPath) -> ^(TOK_DEFAULT_POOL poolPath)
+    )
+  ;
+
+rpAssignList
+@init { gParent.pushMsg("rpAssignList", state); }
+@after { gParent.popMsg(state); }
+  : rpAssign (COMMA rpAssign)* -> rpAssign+
+  ;
+
+createResourcePlanStatement
+@init { gParent.pushMsg("create resource plan statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_CREATE KW_RESOURCE KW_PLAN name=identifier (KW_WITH rpAssignList)?
+    -> ^(TOK_CREATE_RP $name rpAssignList?)
+    ;
+
+activate : KW_ACTIVATE -> ^(TOK_ACTIVATE);
+enable : KW_ENABLE -> ^(TOK_ENABLE);
+
+alterResourcePlanStatement
+@init { gParent.pushMsg("alter resource plan statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_ALTER KW_RESOURCE KW_PLAN name=identifier (
+          (KW_VALIDATE -> ^(TOK_ALTER_RP $name TOK_VALIDATE))
+        | (KW_DISABLE -> ^(TOK_ALTER_RP $name TOK_DISABLE))
+        | (KW_SET rpAssignList -> ^(TOK_ALTER_RP $name rpAssignList))
+        | (KW_RENAME KW_TO newName=identifier -> ^(TOK_ALTER_RP $name TOK_RENAME $newName))
+        | ((activate enable? | enable activate?) -> ^(TOK_ALTER_RP $name activate? enable?))
+      )
+    ;
+
+dropResourcePlanStatement
+@init { gParent.pushMsg("drop resource plan statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_DROP KW_RESOURCE KW_PLAN name=identifier -> ^(TOK_DROP_RP $name)
+    ;
+
+poolPath
+@init { gParent.pushMsg("poolPath", state); }
+@after { gParent.popMsg(state); }
+    : identifier^ (DOT identifier)*
+    ;
+
+triggerExpression
+@init { gParent.pushMsg("triggerExpression", state); }
+@after { gParent.popMsg(state); }
+    : triggerOrExpression -> ^(TOK_TRIGGER_EXPRESSION triggerOrExpression)
+    ;
+
+triggerOrExpression
+@init { gParent.pushMsg("triggerOrExpression", state); }
+@after { gParent.popMsg(state); }
+    : triggerAndExpression (KW_OR triggerAndExpression)*
+    ;
+
+triggerAndExpression
+@init { gParent.pushMsg("triggerAndExpression", state); }
+@after { gParent.popMsg(state); }
+    : triggerAtomExpression (KW_AND triggerAtomExpression)*
+    ;
+
+triggerAtomExpression
+@init { gParent.pushMsg("triggerAtomExpression", state); }
+@after { gParent.popMsg(state); }
+    : (identifier comparisionOperator triggerLiteral)
+    | (LPAREN triggerOrExpression RPAREN)
+    ;
+
+triggerLiteral
+@init { gParent.pushMsg("triggerLiteral", state); }
+@after { gParent.popMsg(state); }
+    : (Number (KW_HOUR|KW_MINUTE|KW_SECOND)?)
+    | ByteLengthLiteral
+    | StringLiteral
+    ;
+
+comparisionOperator
+@init { gParent.pushMsg("comparisionOperator", state); }
+@after { gParent.popMsg(state); }
+    : EQUAL | LESSTHAN | LESSTHANOREQUALTO | GREATERTHAN | GREATERTHANOREQUALTO
+    ;
+
+triggerActionExpression
+@init { gParent.pushMsg("triggerActionExpression", state); }
+@after { gParent.popMsg(state); }
+    : KW_KILL
+    | (KW_MOVE^ KW_TO! poolPath)
+    ;
+
+createTriggerStatement
+@init { gParent.pushMsg("create trigger statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_CREATE KW_TRIGGER rpName=identifier DOT triggerName=identifier
+      KW_WHEN triggerExpression KW_DO triggerActionExpression
+    -> ^(TOK_CREATE_TRIGGER $rpName $triggerName triggerExpression triggerActionExpression)
+    ;
+
+alterTriggerStatement
+@init { gParent.pushMsg("alter trigger statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_ALTER KW_TRIGGER rpName=identifier DOT triggerName=identifier
+      KW_WHEN triggerExpression KW_DO triggerActionExpression
+    -> ^(TOK_ALTER_TRIGGER $rpName $triggerName triggerExpression triggerActionExpression)
+    ;
+
+dropTriggerStatement
+@init { gParent.pushMsg("drop trigger statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_DROP KW_TRIGGER rpName=identifier DOT triggerName=identifier
+    -> ^(TOK_DROP_TRIGGER $rpName $triggerName)
+    ;
+
+poolAssign
+@init { gParent.pushMsg("poolAssign", state); }
+@after { gParent.popMsg(state); }
+    : (
+        (KW_ALLOC_FRACTION EQUAL allocFraction=Number) -> ^(TOK_ALLOC_FRACTION $allocFraction)
+      | (KW_QUERY_PARALLELISM EQUAL parallelism=Number) -> ^(TOK_QUERY_PARALLELISM $parallelism)
+      | (KW_SCHEDULING_POLICY EQUAL policy=StringLiteral) -> ^(TOK_SCHEDULING_POLICY $policy)
+      | (KW_PATH EQUAL path=poolPath) -> ^(TOK_PATH $path)
+      )
+    ;
+
+poolAssignList
+@init { gParent.pushMsg("poolAssignList", state); }
+@after { gParent.popMsg(state); }
+    : poolAssign (COMMA poolAssign)* -> poolAssign+
+    ;
+
+createPoolStatement
+@init { gParent.pushMsg("create pool statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_CREATE KW_POOL rpName=identifier DOT poolPath
+      KW_WITH poolAssignList
+    -> ^(TOK_CREATE_POOL $rpName poolPath poolAssignList)
+    ;
+
+alterPoolStatement
+@init { gParent.pushMsg("alter pool statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_ALTER KW_POOL rpName=identifier DOT poolPath (
+        (KW_SET poolAssignList -> ^(TOK_ALTER_POOL $rpName poolPath poolAssignList))
+        | (KW_ADD KW_TRIGGER triggerName=identifier
+            -> ^(TOK_ALTER_POOL $rpName poolPath ^(TOK_ADD_TRIGGER $triggerName)))
+        | (KW_DROP KW_TRIGGER triggerName=identifier
+            -> ^(TOK_ALTER_POOL $rpName poolPath ^(TOK_DROP_TRIGGER $triggerName)))
+      )
+    ;
+
+dropPoolStatement
+@init { gParent.pushMsg("drop pool statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_DROP KW_POOL rpName=identifier DOT poolPath
+    -> ^(TOK_DROP_POOL $rpName poolPath)
+    ;
+
+createMappingStatement
+@init { gParent.pushMsg("create mapping statement", state); }
+@after { gParent.popMsg(state); }
+    : (KW_CREATE mappingType=(KW_USER | KW_GROUP)
+         KW_MAPPING name=StringLiteral
+         KW_IN rpName=identifier KW_TO poolPath
+         (KW_WITH KW_ORDER order=Number)?)
+    -> ^(TOK_CREATE_MAPPING $rpName $mappingType $name poolPath $order?)
+    ;
+
+alterMappingStatement
+@init { gParent.pushMsg("alter mapping statement", state); }
+@after { gParent.popMsg(state); }
+    : (KW_ALTER mappingType=(KW_USER | KW_GROUP) KW_MAPPING
+         KW_MAPPING name=StringLiteral
+         KW_IN rpName=identifier KW_TO poolPath
+         (KW_WITH KW_ORDER order=Number)?)
+    -> ^(TOK_ALTER_MAPPING $rpName $mappingType $name poolPath $order?)
+    ;
+
+dropMappingStatement
+@init { gParent.pushMsg("drop mapping statement", state); }
+@after { gParent.popMsg(state); }
+    : KW_DROP mappingType=(KW_USER | KW_GROUP) KW_MAPPING
+         name=StringLiteral KW_IN rpName=identifier
+    -> ^(TOK_DROP_MAPPING $rpName $mappingType $name)
+    ;

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index a3b3287..a25e78c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -137,13 +137,19 @@ public final class SemanticAnalyzerFactory {
     commandType.put(HiveParser.TOK_REPL_LOAD, HiveOperation.REPLLOAD);
     commandType.put(HiveParser.TOK_REPL_STATUS, HiveOperation.REPLSTATUS);
     commandType.put(HiveParser.TOK_KILL_QUERY, HiveOperation.KILL_QUERY);
-    commandType.put(HiveParser.TOK_CREATERESOURCEPLAN, HiveOperation.CREATE_RESOURCEPLAN);
-    commandType.put(HiveParser.TOK_SHOWRESOURCEPLAN, HiveOperation.SHOW_RESOURCEPLAN);
+    commandType.put(HiveParser.TOK_CREATE_RP, HiveOperation.CREATE_RESOURCEPLAN);
+    commandType.put(HiveParser.TOK_SHOW_RP, HiveOperation.SHOW_RESOURCEPLAN);
     commandType.put(HiveParser.TOK_ALTER_RP, HiveOperation.ALTER_RESOURCEPLAN);
     commandType.put(HiveParser.TOK_DROP_RP, HiveOperation.DROP_RESOURCEPLAN);
     commandType.put(HiveParser.TOK_CREATE_TRIGGER, HiveOperation.CREATE_TRIGGER);
     commandType.put(HiveParser.TOK_ALTER_TRIGGER, HiveOperation.ALTER_TRIGGER);
     commandType.put(HiveParser.TOK_DROP_TRIGGER, HiveOperation.DROP_TRIGGER);
+    commandType.put(HiveParser.TOK_CREATE_POOL, HiveOperation.CREATE_POOL);
+    commandType.put(HiveParser.TOK_ALTER_POOL, HiveOperation.ALTER_POOL);
+    commandType.put(HiveParser.TOK_DROP_POOL, HiveOperation.DROP_POOL);
+    commandType.put(HiveParser.TOK_CREATE_MAPPING, HiveOperation.CREATE_MAPPING);
+    commandType.put(HiveParser.TOK_ALTER_MAPPING, HiveOperation.ALTER_MAPPING);
+    commandType.put(HiveParser.TOK_DROP_MAPPING, HiveOperation.DROP_MAPPING);
   }
 
   static {
@@ -337,13 +343,19 @@ public final class SemanticAnalyzerFactory {
       case HiveParser.TOK_SHOW_SET_ROLE:
       case HiveParser.TOK_CACHE_METADATA:
       case HiveParser.TOK_KILL_QUERY:
-      case HiveParser.TOK_CREATERESOURCEPLAN:
-      case HiveParser.TOK_SHOWRESOURCEPLAN:
+      case HiveParser.TOK_CREATE_RP:
+      case HiveParser.TOK_SHOW_RP:
       case HiveParser.TOK_ALTER_RP:
       case HiveParser.TOK_DROP_RP:
       case HiveParser.TOK_CREATE_TRIGGER:
       case HiveParser.TOK_ALTER_TRIGGER:
       case HiveParser.TOK_DROP_TRIGGER:
+      case HiveParser.TOK_CREATE_POOL:
+      case HiveParser.TOK_ALTER_POOL:
+      case HiveParser.TOK_DROP_POOL:
+      case HiveParser.TOK_CREATE_MAPPING:
+      case HiveParser.TOK_ALTER_MAPPING:
+      case HiveParser.TOK_DROP_MAPPING:
         return new DDLSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_CREATEFUNCTION:


Mime
View raw message