ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jonathanhur...@apache.org
Subject git commit: AMBARI-6717 - Alerts: Add alert schema tables for Oracle, MySQL, PostgreSQL remote (jonathanhurley)
Date Mon, 04 Aug 2014 17:19:22 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk d072327ef -> 5423110a3


AMBARI-6717 - Alerts: Add alert schema tables for Oracle, MySQL, PostgreSQL remote (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5423110a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5423110a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5423110a

Branch: refs/heads/trunk
Commit: 5423110a38818b6fea6527dfe6a2712e4993a258
Parents: d072327
Author: Jonathan Hurley <jhurley@hortonworks.com>
Authored: Fri Aug 1 15:48:52 2014 -0400
Committer: Jonathan Hurley <jhurley@hortonworks.com>
Committed: Mon Aug 4 13:14:18 2014 -0400

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog170.java       | 329 +++++++++++++++----
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  | 110 ++++++-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql | 105 ++++++
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   2 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   2 +-
 .../server/upgrade/UpgradeCatalog170Test.java   |  79 +++--
 6 files changed, 528 insertions(+), 99 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5423110a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index c13b742..9fbed00 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -28,11 +28,9 @@ import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.controller.ConfigurationRequest;
-import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -50,6 +48,15 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   private static final String CONTENT_FIELD_NAME = "content";
   private static final String ENV_CONFIGS_POSTFIX = "-env";
 
+  private static final String ALERT_TABLE_DEFINITION = "alert_definition";
+  private static final String ALERT_TABLE_HISTORY = "alert_history";
+  private static final String ALERT_TABLE_CURRENT = "alert_current";
+  private static final String ALERT_TABLE_GROUP = "alert_group";
+  private static final String ALERT_TABLE_TARGET = "alert_target";
+  private static final String ALERT_TABLE_GROUP_TARGET = "alert_group_target";
+  private static final String ALERT_TABLE_GROUPING = "alert_grouping";
+  private static final String ALERT_TABLE_NOTICE = "alert_notice";
+
   //SourceVersion is only for book-keeping purpos
   @Override
   public String getSourceVersion() {
@@ -66,7 +73,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
    */
   private static final Logger LOG = LoggerFactory.getLogger
       (UpgradeCatalog170.class);
-  
+
   // ----- Constructors ------------------------------------------------------
 
   @Inject
@@ -80,15 +87,15 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
 
   @Override
   protected void executeDDLUpdates() throws AmbariException, SQLException {
-    // !!! TODO: alerting DDL upgrade
-
-    List<DBAccessor.DBColumnInfo> columns;
+    List<DBColumnInfo> columns;
     String dbType = getDbType();
 
     // add admin tables and initial values prior to adding referencing columns on existing
tables
-    columns = new ArrayList<DBAccessor.DBColumnInfo>();
-    columns.add(new DBAccessor.DBColumnInfo("principal_type_id", Integer.class, 1, null,
false));
-    columns.add(new DBAccessor.DBColumnInfo("principal_type_name", String.class, null, null,
false));
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("principal_type_id", Integer.class, 1, null,
+        false));
+    columns.add(new DBColumnInfo("principal_type_name", String.class, null,
+        null, false));
 
     dbAccessor.createTable("adminprincipaltype", columns, "principal_type_id");
 
@@ -97,18 +104,21 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
         "  union all\n" +
         "  select 2, 'GROUP'", true);
 
-    columns = new ArrayList<DBAccessor.DBColumnInfo>();
-    columns.add(new DBAccessor.DBColumnInfo("principal_id", Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("principal_type_id", Integer.class, 1, null,
false));
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("principal_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("principal_type_id", Integer.class, 1, null,
+        false));
 
     dbAccessor.createTable("adminprincipal", columns, "principal_id");
 
     dbAccessor.executeQuery("insert into adminprincipal (principal_id, principal_type_id)\n"
+
         "  select 1, 1", true);
 
-    columns = new ArrayList<DBAccessor.DBColumnInfo>();
-    columns.add(new DBAccessor.DBColumnInfo("resource_type_id", Integer.class, 1, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("resource_type_name", String.class, null, null,
false));
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("resource_type_id", Integer.class, 1, null,
+        false));
+    columns.add(new DBColumnInfo("resource_type_name", String.class, null,
+        null, false));
 
     dbAccessor.createTable("adminresourcetype", columns, "resource_type_id");
 
@@ -119,19 +129,22 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
         "  union all\n" +
         "  select 3, 'VIEW'", true);
 
-    columns = new ArrayList<DBAccessor.DBColumnInfo>();
-    columns.add(new DBAccessor.DBColumnInfo("resource_id", Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("resource_type_id", Integer.class, 1, null, false));
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("resource_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("resource_type_id", Integer.class, 1, null,
+        false));
 
     dbAccessor.createTable("adminresource", columns, "resource_id");
 
     dbAccessor.executeQuery("insert into adminresource (resource_id, resource_type_id)\n"
+
         "  select 1, 1", true);
 
-    columns = new ArrayList<DBAccessor.DBColumnInfo>();
-    columns.add(new DBAccessor.DBColumnInfo("permission_id", Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("permission_name", String.class, null, null,
false));
-    columns.add(new DBAccessor.DBColumnInfo("resource_type_id", Integer.class, 1, null, false));
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("permission_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("permission_name", String.class, null, null,
+        false));
+    columns.add(new DBColumnInfo("resource_type_id", Integer.class, 1, null,
+        false));
 
     dbAccessor.createTable("adminpermission", columns, "permission_id");
 
@@ -144,11 +157,11 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
         "  union all\n" +
         "  select 4, 'VIEW.USE', 3", true);
 
-    columns = new ArrayList<DBAccessor.DBColumnInfo>();
-    columns.add(new DBAccessor.DBColumnInfo("privilege_id", Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("permission_id", Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("resource_id", Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("principal_id", Long.class, null, null, false));
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("privilege_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("permission_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("resource_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("principal_id", Long.class, null, null, false));
 
     dbAccessor.createTable("adminprivilege", columns, "privilege_id");
 
@@ -156,28 +169,30 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
         "  select 1, 1, 1, injector1", true);
 
 
-    DBAccessor.DBColumnInfo clusterConfigAttributesColumn = new DBAccessor.DBColumnInfo(
+    DBColumnInfo clusterConfigAttributesColumn = new DBColumnInfo(
         "config_attributes", String.class, 32000, null, true);
     dbAccessor.addColumn("clusterconfig", clusterConfigAttributesColumn);
 
     // Add columns
-    dbAccessor.addColumn("viewmain", new DBAccessor.DBColumnInfo("mask",
+    dbAccessor.addColumn("viewmain", new DBColumnInfo("mask",
       String.class, 255, null, true));
-    dbAccessor.addColumn("viewparameter", new DBAccessor.DBColumnInfo("masked",
+    dbAccessor.addColumn("viewparameter", new DBColumnInfo("masked",
       Character.class, 1, null, true));
-    dbAccessor.addColumn("users", new DBAccessor.DBColumnInfo("active",
+    dbAccessor.addColumn("users", new DBColumnInfo("active",
       Integer.class, 1, 1, false));
-    dbAccessor.addColumn("users", new DBAccessor.DBColumnInfo("principal_id",
+    dbAccessor.addColumn("users", new DBColumnInfo("principal_id",
         Long.class, 1, 1, false));
-    dbAccessor.addColumn("viewmain", new DBAccessor.DBColumnInfo("resource_type_id",
+    dbAccessor.addColumn("viewmain", new DBColumnInfo("resource_type_id",
         Integer.class, 1, 1, false));
-    dbAccessor.addColumn("viewinstance", new DBAccessor.DBColumnInfo("resource_id",
+    dbAccessor.addColumn("viewinstance", new DBColumnInfo("resource_id",
         Long.class, 1, 1, false));
-    dbAccessor.addColumn("clusters", new DBAccessor.DBColumnInfo("resource_id",
+    dbAccessor.addColumn("clusters", new DBColumnInfo("resource_id",
         Long.class, 1, 1, false));
 
-    dbAccessor.addColumn("host_role_command", new DBAccessor.DBColumnInfo("output_log", String.class,
255, null, true));
-    dbAccessor.addColumn("host_role_command", new DBAccessor.DBColumnInfo("error_log", String.class,
255, null, true));
+    dbAccessor.addColumn("host_role_command", new DBColumnInfo("output_log",
+        String.class, 255, null, true));
+    dbAccessor.addColumn("host_role_command", new DBColumnInfo("error_log",
+        String.class, 255, null, true));
 
     // Update historic records with the log paths, but only enough so as to not prolong the
upgrade process
     if (dbType.equals(Configuration.POSTGRES_DB_NAME) || dbType.equals(Configuration.ORACLE_DB_NAME))
{
@@ -189,6 +204,8 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
       dbAccessor.executeQuery("UPDATE host_role_command SET output_log = CONCAT('/var/lib/ambari-agent/data/output-',
task_id, '.txt') WHERE task_id IN (SELECT task_id FROM host_role_command WHERE output_log
IS NULL OR output_log = '' ORDER BY task_id DESC LIMIT 1000);");
       dbAccessor.executeQuery("UPDATE host_role_command SET error_log = CONCAT('/var/lib/ambari-agent/data/errors-',
task_id, '.txt') WHERE task_id IN (SELECT task_id FROM host_role_command WHERE error_log IS
NULL OR error_log = '' ORDER BY task_id DESC LIMIT 1000);");
     }
+
+    addAlertingFrameworkDDL();
   }
 
 
@@ -198,7 +215,6 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     // !!! TODO: create admin principals for existing users and groups.
     // !!! TODO: create admin resources for existing clusters and view instances
-    // !!! TODO: alerting DML updates (sequences)
 
     String dbType = getDbType();
 
@@ -210,30 +226,205 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     }
 
     dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
-        + valueColumnName + ") " + "VALUES('alert_definition_id_seq', 0)", true);
+        + valueColumnName + ") " + "VALUES('alert_definition_id_seq', 0)",
+        false);
 
     dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
-        + valueColumnName + ") " + "VALUES('alert_group_id_seq', 0)", true);
+        + valueColumnName + ") " + "VALUES('alert_group_id_seq', 0)", false);
 
     dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
-        + valueColumnName + ") " + "VALUES('alert_target_id_seq', 0)", true);
+        + valueColumnName + ") " + "VALUES('alert_target_id_seq', 0)", false);
 
     dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
-        + valueColumnName + ") " + "VALUES('alert_history_id_seq', 0)", true);
+        + valueColumnName + ") " + "VALUES('alert_history_id_seq', 0)", false);
 
     dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
-        + valueColumnName + ") " + "VALUES('alert_notice_id_seq', 0)", true);
-    
+        + valueColumnName + ") " + "VALUES('alert_notice_id_seq', 0)", false);
+
+    dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+        + valueColumnName + ") " + "VALUES('alert_current_id_seq', 0)", false);
+
     moveGlobalsToEnv();
     addEnvContentFields();
     addMissingConfigs();
   }
 
-protected void addMissingConfigs() throws AmbariException {
-  updateConfigurationProperties("hbase-env", Collections.singletonMap("hbase_regionserver_xmn_max",
"512"), false, false);
-  updateConfigurationProperties("hbase-env", Collections.singletonMap("hbase_regionserver_xmn_ratio",
"0.2"), false, false);
-}
-  
+  /**
+   * Adds the alert tables and constraints.
+   */
+  private void addAlertingFrameworkDDL() throws AmbariException, SQLException {
+    // alert_definition
+    ArrayList<DBColumnInfo> columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("definition_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("cluster_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("definition_name", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("service_name", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("component_name", String.class, 255, null, true));
+    columns.add(new DBColumnInfo("scope", String.class, 255, null, true));
+    columns.add(new DBColumnInfo("enabled", Short.class, 1, 1, false));
+    columns.add(new DBColumnInfo("schedule_interval", Integer.class, null, null, false));
+    columns.add(new DBColumnInfo("source_type", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("alert_source", String.class, 4000, null, false));
+    columns.add(new DBColumnInfo("hash", String.class, 64, null, false));
+    dbAccessor.createTable(ALERT_TABLE_DEFINITION, columns, "definition_id");
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_DEFINITION,
+        "fk_alert_def_cluster_id",
+        "cluster_id", "clusters", "cluster_id", false);
+
+    dbAccessor.executeQuery(
+        "ALTER TABLE "
+            + ALERT_TABLE_DEFINITION
+            + " ADD CONSTRAINT uni_alert_def_name UNIQUE (cluster_id,definition_name)",
+        false);
+
+    // alert_history
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("alert_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("cluster_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("alert_definition_id", Long.class, null, null,
+        false));
+    columns.add(new DBColumnInfo("service_name", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("component_name", String.class, 255, null, true));
+    columns.add(new DBColumnInfo("host_name", String.class, 255, null, true));
+    columns.add(new DBColumnInfo("alert_instance", String.class, 255, null,
+        true));
+    columns.add(new DBColumnInfo("alert_timestamp", Long.class, null, null,
+        false));
+    columns.add(new DBColumnInfo("alert_label", String.class, 1024, null, true));
+    columns.add(new DBColumnInfo("alert_state", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("alert_text", String.class, 4000, null, true));
+    dbAccessor.createTable(ALERT_TABLE_HISTORY, columns, "alert_id");
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_HISTORY, "fk_alert_history_def_id",
+        "alert_definition_id", ALERT_TABLE_DEFINITION, "definition_id", false);
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_HISTORY,
+        "fk_alert_history_cluster_id",
+        "cluster_id", "clusters", "cluster_id", false);
+
+    // alert_current
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("alert_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("definition_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("history_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("maintenance_state", String.class, 255, null,
+        true));
+    columns.add(new DBColumnInfo("original_timestamp", Long.class, 0, null,
+        false));
+    columns.add(new DBColumnInfo("latest_timestamp", Long.class, 0, null, false));
+    dbAccessor.createTable(ALERT_TABLE_CURRENT, columns, "alert_id");
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_CURRENT, "fk_alert_current_def_id",
+        "definition_id", ALERT_TABLE_DEFINITION, "definition_id", false);
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_CURRENT,
+        "fk_alert_current_history_id", "history_id", ALERT_TABLE_HISTORY,
+        "alert_id", false);
+
+    dbAccessor.executeQuery("ALTER TABLE " + ALERT_TABLE_CURRENT
+        + " ADD CONSTRAINT uni_alert_current_hist_id UNIQUE (history_id)",
+        false);
+
+    // alert_group
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("group_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("cluster_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("group_name", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("is_default", Short.class, 1, 1, false));
+    dbAccessor.createTable(ALERT_TABLE_GROUP, columns, "group_id");
+
+    dbAccessor.executeQuery(
+        "ALTER TABLE "
+            + ALERT_TABLE_GROUP
+            + " ADD CONSTRAINT uni_alert_group_name UNIQUE (cluster_id,group_name)",
+        false);
+
+    // alert_target
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("target_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("target_name", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("notification_type", String.class, 64, null, false));
+    columns.add(new DBColumnInfo("properties", String.class, 4000, null, true));
+    columns.add(new DBColumnInfo("description", String.class, 1024, null, true));
+    dbAccessor.createTable(ALERT_TABLE_TARGET, columns, "target_id");
+
+    dbAccessor.executeQuery("ALTER TABLE " + ALERT_TABLE_TARGET
+        + " ADD CONSTRAINT uni_alert_target_name UNIQUE (target_name)",
+        false);
+
+    // alert_group_target
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("group_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("target_id", Long.class, null, null, false));
+    dbAccessor.createTable(ALERT_TABLE_GROUP_TARGET, columns, "group_id",
+        "target_id");
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_GROUP_TARGET,
+        "fk_alert_gt_group_id", "group_id", ALERT_TABLE_GROUP, "group_id",
+        false);
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_GROUP_TARGET,
+        "fk_alert_gt_target_id", "target_id", ALERT_TABLE_TARGET, "target_id",
+        false);
+
+    // alert_grouping
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("definition_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("group_id", Long.class, null, null, false));
+    dbAccessor.createTable(ALERT_TABLE_GROUPING, columns, "group_id",
+        "definition_id");
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_GROUPING,
+        "fk_alert_grouping_def_id", "definition_id", ALERT_TABLE_DEFINITION,
+        "definition_id", false);
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_GROUPING,
+        "fk_alert_grouping_group_id", "group_id", ALERT_TABLE_GROUP,
+        "group_id", false);
+
+    // alert_notice
+    columns = new ArrayList<DBColumnInfo>();
+    columns.add(new DBColumnInfo("notification_id", Long.class, null, null,
+        false));
+    columns.add(new DBColumnInfo("target_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("history_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("notify_state", String.class, 255, null, false));
+    dbAccessor.createTable(ALERT_TABLE_NOTICE, columns, "notification_id");
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_NOTICE, "fk_alert_notice_target_id",
+        "target_id", ALERT_TABLE_TARGET, "target_id", false);
+
+    dbAccessor.addFKConstraint(ALERT_TABLE_NOTICE, "fk_alert_notice_hist_id",
+        "history_id", ALERT_TABLE_HISTORY, "alert_id", false);
+
+    // Indexes
+    dbAccessor.createIndex("idx_alert_history_def_id", ALERT_TABLE_HISTORY,
+        "alert_definition_id");
+    dbAccessor.createIndex("idx_alert_history_service", ALERT_TABLE_HISTORY,
+        "service_name");
+    dbAccessor.createIndex("idx_alert_history_host", ALERT_TABLE_HISTORY,
+        "host_name");
+    dbAccessor.createIndex("idx_alert_history_time", ALERT_TABLE_HISTORY,
+        "alert_timestamp");
+    dbAccessor.createIndex("idx_alert_history_state", ALERT_TABLE_HISTORY,
+        "alert_state");
+    dbAccessor.createIndex("idx_alert_group_name", ALERT_TABLE_GROUP,
+        "group_name");
+    dbAccessor.createIndex("idx_alert_notice_state", ALERT_TABLE_NOTICE,
+        "notify_state");
+  }
+
+  protected void addMissingConfigs() throws AmbariException {
+    updateConfigurationProperties("hbase-env",
+        Collections.singletonMap("hbase_regionserver_xmn_max", "512"), false,
+        false);
+
+    updateConfigurationProperties("hbase-env",
+        Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), false,
+        false);
+  }
+
   protected void addEnvContentFields() throws AmbariException {
     ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
     AmbariManagementController ambariManagementController = injector.getInstance(
@@ -243,28 +434,28 @@ protected void addMissingConfigs() throws AmbariException {
     if (clusters == null) {
       return;
     }
- 
+
     Map<String, Cluster> clusterMap = clusters.getClusters();
 
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (final Cluster cluster : clusterMap.values()) {
-        Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
CONTENT_FIELD_NAME);  
-        
+        Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
CONTENT_FIELD_NAME);
+
         for(String configType:configTypes) {
           if(!configType.endsWith(ENV_CONFIGS_POSTFIX)) {
             continue;
           }
-          
+
           String value = configHelper.getPropertyValueFromStackDefenitions(cluster, configType,
CONTENT_FIELD_NAME);
           updateConfigurationProperties(configType, Collections.singletonMap(CONTENT_FIELD_NAME,
value), true, true);
         }
       }
     }
   }
-  
+
   protected void moveGlobalsToEnv() throws AmbariException {
     ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
-    
+
     AmbariManagementController ambariManagementController = injector.getInstance(
         AmbariManagementController.class);
     Clusters clusters = ambariManagementController.getClusters();
@@ -280,47 +471,47 @@ protected void addMissingConfigs() throws AmbariException {
           LOG.info("Config " + Configuration.GLOBAL_CONFIG_TAG + " not found. Assuming upgrade
already done.");
           return;
         }
-        
+
         Map<String, Map<String, String>> newProperties = new HashMap<String,
Map<String, String>>();
         Map<String, String> globalProperites = config.getProperties();
         Map<String, String> unmappedGlobalProperties = new HashMap<String, String>();
-        
+
         for (Map.Entry<String, String> property : globalProperites.entrySet()) {
           String propertyName = property.getKey();
           String propertyValue = property.getValue();
-          
+
           Set<String> newConfigTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
propertyName);
           // if it's custom user service global.xml can be still there.
           newConfigTypes.remove(Configuration.GLOBAL_CONFIG_TAG);
-          
+
           String newConfigType = null;
           if(newConfigTypes.size() > 0) {
             newConfigType = newConfigTypes.iterator().next();
           } else {
             newConfigType = getAdditionalMappingGlobalToEnv().get(propertyName);
           }
-          
+
           if(newConfigType==null) {
             LOG.warn("Cannot find where to map " + propertyName + " from " + Configuration.GLOBAL_CONFIG_TAG
+
                 " (value="+propertyValue+")");
             unmappedGlobalProperties.put(propertyName, propertyValue);
             continue;
           }
-          
-          LOG.info("Mapping config " + propertyName + " from " + Configuration.GLOBAL_CONFIG_TAG
+ 
+
+          LOG.info("Mapping config " + propertyName + " from " + Configuration.GLOBAL_CONFIG_TAG
+
               " to " + newConfigType +
               " (value="+propertyValue+")");
-          
+
           if(!newProperties.containsKey(newConfigType)) {
             newProperties.put(newConfigType, new HashMap<String, String>());
           }
           newProperties.get(newConfigType).put(propertyName, propertyValue);
         }
-        
+
         for (Entry<String, Map<String, String>> newProperty : newProperties.entrySet())
{
           updateConfigurationProperties(newProperty.getKey(), newProperty.getValue(), true,
true);
         }
-        
+
         // if have some custom properties, for own services etc., leave that as it was
         if(unmappedGlobalProperties.size() != 0) {
           LOG.info("Not deleting globals because have custom properties");
@@ -331,10 +522,10 @@ protected void addMissingConfigs() throws AmbariException {
       }
     }
   }
-  
+
   public static Map<String, String> getAdditionalMappingGlobalToEnv() {
     Map<String, String> result = new HashMap<String, String>();
-    
+
     result.put("smokeuser_keytab","hadoop-env");
     result.put("hdfs_user_keytab","hadoop-env");
     result.put("kerberos_domain","hadoop-env");
@@ -346,7 +537,7 @@ protected void addMissingConfigs() throws AmbariException {
     result.put("zookeeper_keytab_path","zookeeper-env");
     result.put("storm_principal_name","storm-env");
     result.put("storm_keytab","storm-env");
-    
+
     return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5423110a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 7333919..c48b78c 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -148,6 +148,105 @@ ALTER TABLE serviceconfigmapping ADD CONSTRAINT FK_scvm_config FOREIGN
KEY (conf
 ALTER TABLE serviceconfigapplication ADD CONSTRAINT FK_scva_scv FOREIGN KEY (service_config_id)
REFERENCES serviceconfig(service_config_id);
 ALTER TABLE clusters ADD CONSTRAINT FK_clusters_resource_id FOREIGN KEY (resource_id) REFERENCES
adminresource(resource_id);
 
+-- Alerting Framework
+CREATE TABLE alert_definition (
+  definition_id BIGINT NOT NULL, 
+  cluster_id BIGINT NOT NULL, 
+  definition_name VARCHAR(255) NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  component_name VARCHAR(255),
+  scope VARCHAR(255),
+  enabled SMALLINT DEFAULT 1 NOT NULL,
+  schedule_interval INTEGER NOT NULL,
+  source_type VARCHAR(255) NOT NULL,
+  alert_source TEXT NOT NULL,
+  hash VARCHAR(64) NOT NULL,
+  PRIMARY KEY (definition_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  CONSTRAINT uni_alert_def_name UNIQUE(cluster_id,definition_name)
+);
+
+CREATE TABLE alert_history (
+  alert_id BIGINT NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  alert_definition_id BIGINT NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  component_name VARCHAR(255),
+  host_name VARCHAR(255),
+  alert_instance VARCHAR(255),
+  alert_timestamp BIGINT NOT NULL,
+  alert_label VARCHAR(1024),
+  alert_state VARCHAR(255) NOT NULL,
+  alert_text TEXT,
+  PRIMARY KEY (alert_id),
+  FOREIGN KEY (alert_definition_id) REFERENCES alert_definition(definition_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id)
+);
+
+CREATE TABLE alert_current (
+  alert_id BIGINT NOT NULL,
+  definition_id BIGINT NOT NULL,
+  history_id BIGINT NOT NULL UNIQUE,
+  maintenance_state VARCHAR(255),
+  original_timestamp BIGINT NOT NULL,
+  latest_timestamp BIGINT NOT NULL,
+  PRIMARY KEY (alert_id),
+  FOREIGN KEY (definition_id) REFERENCES alert_definition(definition_id),
+  FOREIGN KEY (history_id) REFERENCES alert_history(alert_id)
+);
+
+CREATE TABLE alert_group (
+  group_id BIGINT NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  group_name VARCHAR(255) NOT NULL,
+  is_default SMALLINT NOT NULL DEFAULT 0,
+  PRIMARY KEY (group_id),
+  CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
+);
+
+CREATE TABLE alert_target (
+  target_id BIGINT NOT NULL,
+  target_name VARCHAR(255) NOT NULL UNIQUE,
+  notification_type VARCHAR(64) NOT NULL,
+  properties TEXT,
+  description VARCHAR(1024),
+  PRIMARY KEY (target_id)
+);
+
+CREATE TABLE alert_group_target (
+  group_id BIGINT NOT NULL,
+  target_id BIGINT NOT NULL,
+  PRIMARY KEY (group_id, target_id),
+  FOREIGN KEY (group_id) REFERENCES alert_group(group_id),
+  FOREIGN KEY (target_id) REFERENCES alert_target(target_id)
+);
+
+CREATE TABLE alert_grouping (
+  definition_id BIGINT NOT NULL,
+  group_id BIGINT NOT NULL,
+  PRIMARY KEY (group_id, definition_id),
+  FOREIGN KEY (definition_id) REFERENCES alert_definition(definition_id),
+  FOREIGN KEY (group_id) REFERENCES alert_group(group_id)
+);
+
+CREATE TABLE alert_notice (
+  notification_id BIGINT NOT NULL,
+  target_id BIGINT NOT NULL,
+  history_id BIGINT NOT NULL,
+  notify_state VARCHAR(255) NOT NULL,
+  PRIMARY KEY (notification_id),
+  FOREIGN KEY (target_id) REFERENCES alert_target(target_id),  
+  FOREIGN KEY (history_id) REFERENCES alert_history(alert_id)
+);
+
+CREATE INDEX idx_alert_history_def_id on alert_history(alert_definition_id);
+CREATE INDEX idx_alert_history_service on alert_history(service_name);
+CREATE INDEX idx_alert_history_host on alert_history(host_name);
+CREATE INDEX idx_alert_history_time on alert_history(alert_timestamp);
+CREATE INDEX idx_alert_history_state on alert_history(alert_state);
+CREATE INDEX idx_alert_group_name on alert_group(group_name);
+CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
+
 INSERT INTO ambari_sequences(sequence_name, value) values ('cluster_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, value) values ('host_role_command_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 2);
@@ -168,6 +267,12 @@ INSERT INTO ambari_sequences(sequence_name, value) values ('privilege_id_seq',
1
 INSERT INTO ambari_sequences(sequence_name, value) values ('config_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, value) values ('service_config_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, value) values ('service_config_application_id_seq',
1);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_definition_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_group_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_target_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_history_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_notice_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_current_id_seq', 0);
 
 insert into adminresourcetype (resource_type_id, resource_type_name)
   select 1, 'AMBARI'
@@ -457,8 +562,3 @@ CREATE TABLE clusterEvent (
   error TEXT, data TEXT ,
   host TEXT, rack TEXT
 );
-
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/5423110a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 5698c5d..5cb5aa4 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -138,6 +138,105 @@ ALTER TABLE serviceconfigmapping ADD CONSTRAINT FK_scvm_config FOREIGN
KEY (conf
 ALTER TABLE serviceconfigapplication ADD CONSTRAINT FK_scva_scv FOREIGN KEY (service_config_id)
REFERENCES serviceconfig(service_config_id);
 ALTER TABLE clusters ADD CONSTRAINT FK_clusters_resource_id FOREIGN KEY (resource_id) REFERENCES
adminresource(resource_id);
 
+-- Alerting Framework
+CREATE TABLE alert_definition (
+  definition_id NUMBER(19) NOT NULL, 
+  cluster_id NUMBER(19) NOT NULL, 
+  definition_name VARCHAR2(255) NOT NULL,
+  service_name VARCHAR2(255) NOT NULL,
+  component_name VARCHAR2(255),
+  scope VARCHAR2(255),
+  enabled NUMBER(1) DEFAULT 1 NOT NULL,
+  schedule_interval NUMBER(10) NOT NULL,
+  source_type VARCHAR2(255) NOT NULL,
+  alert_source VARCHAR2(4000) NOT NULL,
+  hash VARCHAR2(64) NOT NULL,
+  PRIMARY KEY (definition_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  CONSTRAINT uni_alert_def_name UNIQUE(cluster_id,definition_name)
+);
+
+CREATE TABLE alert_history (
+  alert_id NUMBER(19) NOT NULL,
+  cluster_id NUMBER(19) NOT NULL,
+  alert_definition_id NUMBER(19) NOT NULL,
+  service_name VARCHAR2(255) NOT NULL,
+  component_name VARCHAR2(255),
+  host_name VARCHAR2(255),
+  alert_instance VARCHAR2(255),
+  alert_timestamp NUMBER(19) NOT NULL,
+  alert_label VARCHAR2(1024),
+  alert_state VARCHAR2(255) NOT NULL,
+  alert_text VARCHAR2(4000),
+  PRIMARY KEY (alert_id),
+  FOREIGN KEY (alert_definition_id) REFERENCES alert_definition(definition_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id)
+);
+
+CREATE TABLE alert_current (
+  alert_id NUMBER(19) NOT NULL,
+  definition_id NUMBER(19) NOT NULL,
+  history_id NUMBER(19) NOT NULL UNIQUE,
+  maintenance_state VARCHAR2(255),
+  original_timestamp NUMBER(19) NOT NULL,
+  latest_timestamp NUMBER(19) NOT NULL,
+  PRIMARY KEY (alert_id),
+  FOREIGN KEY (definition_id) REFERENCES alert_definition(definition_id),
+  FOREIGN KEY (history_id) REFERENCES alert_history(alert_id)
+);
+
+CREATE TABLE alert_group (
+  group_id NUMBER(19) NOT NULL,
+  cluster_id NUMBER(19) NOT NULL,
+  group_name VARCHAR2(255) NOT NULL,
+  is_default NUMBER(1) DEFAULT 0 NOT NULL,
+  PRIMARY KEY (group_id),
+  CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
+);
+
+CREATE TABLE alert_target (
+  target_id NUMBER(19) NOT NULL,
+  target_name VARCHAR2(255) NOT NULL UNIQUE,
+  notification_type VARCHAR2(64) NOT NULL,
+  properties VARCHAR2(4000),
+  description VARCHAR2(1024),
+  PRIMARY KEY (target_id)
+);
+
+CREATE TABLE alert_group_target (
+  group_id NUMBER(19) NOT NULL,
+  target_id NUMBER(19) NOT NULL,
+  PRIMARY KEY (group_id, target_id),
+  FOREIGN KEY (group_id) REFERENCES alert_group(group_id),
+  FOREIGN KEY (target_id) REFERENCES alert_target(target_id)
+);
+
+CREATE TABLE alert_grouping (
+  definition_id NUMBER(19) NOT NULL,
+  group_id NUMBER(19) NOT NULL,
+  PRIMARY KEY (group_id, definition_id),
+  FOREIGN KEY (definition_id) REFERENCES alert_definition(definition_id),
+  FOREIGN KEY (group_id) REFERENCES alert_group(group_id)
+);
+
+CREATE TABLE alert_notice (
+  notification_id NUMBER(19) NOT NULL,
+  target_id NUMBER(19) NOT NULL,
+  history_id NUMBER(19) NOT NULL,
+  notify_state VARCHAR2(255) NOT NULL,
+  PRIMARY KEY (notification_id),
+  FOREIGN KEY (target_id) REFERENCES alert_target(target_id),  
+  FOREIGN KEY (history_id) REFERENCES alert_history(alert_id)
+);
+
+CREATE INDEX idx_alert_history_def_id on alert_history(alert_definition_id);
+CREATE INDEX idx_alert_history_service on alert_history(service_name);
+CREATE INDEX idx_alert_history_host on alert_history(host_name);
+CREATE INDEX idx_alert_history_time on alert_history(alert_timestamp);
+CREATE INDEX idx_alert_history_state on alert_history(alert_state);
+CREATE INDEX idx_alert_group_name on alert_group(group_name);
+CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
+
 INSERT INTO ambari_sequences(sequence_name, value) values ('host_role_command_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, value) values ('group_id_seq', 0);
@@ -158,6 +257,12 @@ INSERT INTO ambari_sequences(sequence_name, value) values ('privilege_id_seq',
1
 INSERT INTO ambari_sequences(sequence_name, value) values ('config_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, value) values ('service_config_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, value) values ('service_config_application_id_seq',
1);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_definition_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_group_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_target_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_history_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_notice_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('alert_current_id_seq', 0);
 
 INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariVersion}');
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5423110a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index dbb55e8..6ab57ac 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -227,7 +227,7 @@ CREATE TABLE alert_group (
   group_name VARCHAR(255) NOT NULL,
   is_default SMALLINT NOT NULL DEFAULT 0,
   PRIMARY KEY (group_id),
-  CONSTRAINT uni_group_name UNIQUE(cluster_id,group_name)
+  CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
 );
 
 CREATE TABLE alert_target (

http://git-wip-us.apache.org/repos/asf/ambari/blob/5423110a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 4e0e912..ef70439 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -293,7 +293,7 @@ CREATE TABLE ambari.alert_group (
   group_name VARCHAR(255) NOT NULL,
   is_default SMALLINT NOT NULL DEFAULT 0,
   PRIMARY KEY (group_id),
-  CONSTRAINT uni_group_name UNIQUE(cluster_id,group_name)
+  CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
 );
 
 CREATE TABLE ambari.alert_target (

http://git-wip-us.apache.org/repos/asf/ambari/blob/5423110a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index e74f257..a0f1e65 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -23,7 +23,6 @@ import static junit.framework.Assert.assertNull;
 import static junit.framework.Assert.assertTrue;
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createMockBuilder;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
@@ -40,23 +39,18 @@ import java.sql.SQLException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import javax.persistence.EntityManager;
-import javax.persistence.EntityTransaction;
-import javax.persistence.TypedQuery;
-
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.State;
 import org.easymock.Capture;
 import org.junit.Assert;
 import org.junit.Test;
@@ -65,7 +59,6 @@ import com.google.inject.Binder;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 import com.google.inject.Module;
-import com.google.inject.Provider;
 
 /**
  * UpgradeCatalog170 unit tests.
@@ -83,10 +76,43 @@ public class UpgradeCatalog170Test {
     Capture<DBAccessor.DBColumnInfo> clusterConfigAttributesColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
     Capture<DBAccessor.DBColumnInfo> maskColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
     Capture<DBAccessor.DBColumnInfo> maskedColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertDefinitionColumnCapture = new
Capture<List<DBAccessor.DBColumnInfo>>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertHistoryColumnCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertCurrentColumnCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertGroupColumnCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertTargetCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertGroupTargetCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertGroupingCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
+    Capture<List<DBAccessor.DBColumnInfo>> alertNoticeCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
 
     setViewExpectations(dbAccessor, maskColumnCapture);
     setViewParameterExpectations(dbAccessor, maskedColumnCapture);
     setClusterConfigExpectations(dbAccessor, clusterConfigAttributesColumnCapture);
+
+    dbAccessor.createTable(eq("alert_definition"),
+        capture(alertDefinitionColumnCapture), eq("definition_id"));
+
+    dbAccessor.createTable(eq("alert_history"),
+        capture(alertHistoryColumnCapture), eq("alert_id"));
+
+    dbAccessor.createTable(eq("alert_current"),
+        capture(alertCurrentColumnCapture), eq("alert_id"));
+
+    dbAccessor.createTable(eq("alert_group"), capture(alertGroupColumnCapture),
+        eq("group_id"));
+
+    dbAccessor.createTable(eq("alert_target"), capture(alertTargetCapture),
+        eq("target_id"));
+
+    dbAccessor.createTable(eq("alert_group_target"),
+        capture(alertGroupTargetCapture), eq("group_id"), eq("target_id"));
+
+    dbAccessor.createTable(eq("alert_grouping"), capture(alertGroupingCapture),
+        eq("group_id"), eq("definition_id"));
+
+    dbAccessor.createTable(eq("alert_notice"), capture(alertNoticeCapture),
+        eq("notification_id"));
+
     dbAccessor.executeSelect(anyObject(String.class));
     expectLastCall().andReturn(resultSet).anyTimes();
     resultSet.next();
@@ -108,7 +134,14 @@ public class UpgradeCatalog170Test {
     assertViewColumns(maskColumnCapture);
     assertViewParameterColumns(maskedColumnCapture);
 
-    // !!! TODO: alerting DDL upgrade
+    assertEquals(11, alertDefinitionColumnCapture.getValue().size());
+    assertEquals(11, alertHistoryColumnCapture.getValue().size());
+    assertEquals(6, alertCurrentColumnCapture.getValue().size());
+    assertEquals(4, alertGroupColumnCapture.getValue().size());
+    assertEquals(5, alertTargetCapture.getValue().size());
+    assertEquals(2, alertGroupTargetCapture.getValue().size());
+    assertEquals(2, alertGroupingCapture.getValue().size());
+    assertEquals(4, alertNoticeCapture.getValue().size());
   }
 
   @Test
@@ -121,42 +154,42 @@ public class UpgradeCatalog170Test {
     Cluster cluster = createStrictMock(Cluster.class);
     Clusters clusters = createStrictMock(Clusters.class);
     Config config = createStrictMock(Config.class);
-    
+
     Method m = AbstractUpgradeCatalog.class.getDeclaredMethod
         ("updateConfigurationProperties", String.class, Map.class, boolean.class, boolean.class);
 
     UpgradeCatalog170 upgradeCatalog = createMockBuilder(UpgradeCatalog170.class)
       .addMockedMethod(m).createMock();
-    
+
     Map<String, Cluster> clustersMap = new HashMap<String, Cluster>();
     clustersMap.put("c1", cluster);
-    
+
     Map<String, String> globalConfigs = new HashMap<String, String>();
     globalConfigs.put("prop1", "val1");
     globalConfigs.put("smokeuser_keytab", "val2");
-    
+
     Set<String> envDicts = new HashSet<String>();
     envDicts.add("hadoop-env");
     envDicts.add("global");
-    
+
     Map<String, String> contentOfHadoopEnv = new HashMap<String, String>();
     contentOfHadoopEnv.put("content", "env file contents");
 
     upgradeCatalog.updateConfigurationProperties("hadoop-env",
         globalConfigs, true, true);
     expectLastCall();
-    
+
     upgradeCatalog.updateConfigurationProperties("hadoop-env",
         contentOfHadoopEnv, true, true);
-    expectLastCall();   
-    
+    expectLastCall();
+
     upgradeCatalog.updateConfigurationProperties("hbase-env",
         Collections.singletonMap("hbase_regionserver_xmn_max", "512"), false, false);
-    expectLastCall();   
-    
+    expectLastCall();
+
     upgradeCatalog.updateConfigurationProperties("hbase-env",
         Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), false, false);
-    expectLastCall();   
+    expectLastCall();
 
     expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
     expect(injector.getInstance(ConfigHelper.class)).andReturn(configHelper).anyTimes();
@@ -172,7 +205,7 @@ public class UpgradeCatalog170Test {
     expect(configHelper.getPropertyValueFromStackDefenitions(cluster, "hadoop-env", "content")).andReturn("env
file contents").once();
 
     replay(upgradeCatalog, dbAccessor, configuration, injector, cluster, clusters, amc, config,
configHelper);
-    
+
     Class<?> c = AbstractUpgradeCatalog.class;
     Field f = c.getDeclaredField("configuration");
     f.setAccessible(true);
@@ -208,7 +241,7 @@ public class UpgradeCatalog170Test {
     Injector injector = Guice.createInjector(module);
     return injector.getInstance(UpgradeCatalog170.class);
   }
-  
+
   private void assertClusterConfigColumns(Capture<DBAccessor.DBColumnInfo> clusterConfigAttributesColumnCapture)
{
     DBAccessor.DBColumnInfo column = clusterConfigAttributesColumnCapture.getValue();
     assertEquals("config_attributes", column.getName());
@@ -224,7 +257,7 @@ public class UpgradeCatalog170Test {
     dbAccessor.addColumn(eq("clusterconfig"),
         capture(clusterConfigAttributesColumnCapture));
   }
-  
+
   @Test
   public void testGetSourceVersion() {
     final DBAccessor dbAccessor     = createNiceMock(DBAccessor.class);


Mime
View raw message