ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nc...@apache.org
Subject [14/14] git commit: AMBARI-2677. Merge from branch-1.4.0 (ncole)
Date Fri, 19 Jul 2013 16:31:47 GMT
AMBARI-2677. Merge from branch-1.4.0 (ncole)


Project: http://git-wip-us.apache.org/repos/asf/incubator-ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ambari/commit/a718fc45
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ambari/tree/a718fc45
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ambari/diff/a718fc45

Branch: refs/heads/trunk
Commit: a718fc45dbf958c7be7158fe498d4ff76dd5feae
Parents: a1322ce
Author: Nate Cole <ncole@hortonworks.com>
Authored: Wed Jul 17 16:26:08 2013 -0400
Committer: Nate Cole <ncole@hortonworks.com>
Committed: Fri Jul 19 08:24:09 2013 -0400

----------------------------------------------------------------------
 .../hdp-hadoop/manifests/hdfs/directory.pp      |     6 +-
 .../puppet/modules/hdp-hadoop/manifests/init.pp |    44 +-
 .../modules/hdp-hadoop/manifests/namenode.pp    |    19 +-
 .../modules/hdp-hadoop/manifests/params.pp      |     4 +-
 .../hdp-hadoop/templates/include_hosts_list.erb |     3 +
 .../puppet/modules/hdp-hbase/manifests/init.pp  |    25 +-
 .../modules/hdp-hbase/manifests/params.pp       |     2 +-
 .../hdp-hive/manifests/jdbc-connector.pp        |    11 +-
 .../templates/hadoop-services.cfg.erb           |    14 +-
 .../modules/hdp-oozie/files/oozieSmoke2.sh      |    98 +
 .../hdp-oozie/manifests/oozie/service_check.pp  |    25 +-
 .../modules/hdp-oozie/manifests/params.pp       |     2 -
 .../modules/hdp-oozie/manifests/service.pp      |    45 +-
 .../modules/hdp-templeton/manifests/server.pp   |    22 +-
 .../files/validateYarnComponentStatus.py        |    87 +-
 .../manifests/historyserver/service_check.pp    |     2 +-
 .../hdp-yarn/manifests/mapred2/service_check.pp |    69 +
 .../modules/hdp-yarn/manifests/nodemanager.pp   |    14 +-
 .../puppet/modules/hdp-yarn/manifests/params.pp |    16 +-
 .../manifests/resourcemanager/service_check.pp  |     2 +-
 .../modules/hdp-yarn/manifests/service.pp       |     3 +-
 .../modules/hdp-yarn/manifests/smoketest.pp     |    13 +-
 .../hdp-yarn/manifests/yarn/service_check.pp    |    37 +
 .../functions/hdp_get_major_stack_version.rb    |    28 +
 .../main/puppet/modules/hdp/manifests/init.pp   |     2 +-
 .../main/puppet/modules/hdp/manifests/params.pp |    16 +-
 .../main/python/ambari_agent/AmbariConfig.py    |     2 +
 .../src/main/python/ambari_agent/Controller.py  |     1 +
 ambari-server/docs/api/v1/index.md              |     1 -
 .../apache/ambari/eventdb/db/DBConnector.java   |    13 +-
 .../ambari/eventdb/db/PostgresConnector.java    |    75 +-
 .../org/apache/ambari/eventdb/model/Apps.java   |   193 +
 .../ambari/eventdb/model/WorkflowContext.java   |     9 +
 .../apache/ambari/eventdb/model/Workflows.java  |    12 +-
 .../eventdb/webservice/WorkflowJsonService.java |    54 +-
 .../java/org/apache/ambari/server/Role.java     |     2 +
 .../ambari/server/agent/HeartbeatMonitor.java   |    19 +-
 .../AmbariManagementControllerImpl.java         |    10 +-
 .../internal/AbstractProviderModule.java        |   183 +-
 .../internal/ClusterControllerImpl.java         |    22 +-
 .../internal/VersioningPropertyProvider.java    |   107 +
 .../controller/utilities/PredicateBuilder.java  |   378 +-
 .../controller/utilities/PropertyHelper.java    |    33 +-
 .../ambari/server/metadata/ActionMetadata.java  |     6 +
 .../server/metadata/RoleCommandOrder.java       |    12 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |    17 +-
 .../src/main/resources/ganglia_properties.json  |   126 +-
 .../main/resources/ganglia_properties_2.json    | 21535 +++++++++++++++++
 .../src/main/resources/jmx_properties_2.json    |  8039 ++++++
 .../resources/stacks/HDP/2.0.1/metainfo.xml     |    22 -
 .../stacks/HDP/2.0.1/repos/repoinfo.xml         |    75 -
 .../HDP/2.0.1/services/GANGLIA/metainfo.xml     |    36 -
 .../HBASE/configuration/hbase-policy.xml        |    53 -
 .../services/HBASE/configuration/hbase-site.xml |   350 -
 .../HDP/2.0.1/services/HBASE/metainfo.xml       |    40 -
 .../HDP/2.0.1/services/HCATALOG/metainfo.xml    |    30 -
 .../services/HDFS/configuration/core-site.xml   |   257 -
 .../services/HDFS/configuration/global.xml      |   207 -
 .../HDFS/configuration/hadoop-policy.xml        |   134 -
 .../services/HDFS/configuration/hdfs-site.xml   |   438 -
 .../stacks/HDP/2.0.1/services/HDFS/metainfo.xml |    46 -
 .../services/HIVE/configuration/hive-site.xml   |   138 -
 .../stacks/HDP/2.0.1/services/HIVE/metainfo.xml |    43 -
 .../configuration/container-executor.cfg        |    20 -
 .../MAPREDUCE2/configuration/core-site.xml      |    20 -
 .../configuration/mapred-queue-acls.xml         |    39 -
 .../MAPREDUCE2/configuration/mapred-site.xml    |   549 -
 .../HDP/2.0.1/services/MAPREDUCE2/metainfo.xml  |    32 -
 .../HDP/2.0.1/services/NAGIOS/metainfo.xml      |    30 -
 .../services/OOZIE/configuration/oozie-site.xml |   245 -
 .../HDP/2.0.1/services/OOZIE/metainfo.xml       |    35 -
 .../services/PIG/configuration/pig.properties   |    52 -
 .../stacks/HDP/2.0.1/services/PIG/metainfo.xml  |    30 -
 .../stacks/HDP/2.0.1/services/TEZ/metainfo.xml  |    30 -
 .../WEBHCAT/configuration/webhcat-site.xml      |   126 -
 .../HDP/2.0.1/services/WEBHCAT/metainfo.xml     |    31 -
 .../YARN/configuration/capacity-scheduler.xml   |   120 -
 .../YARN/configuration/container-executor.cfg   |    20 -
 .../services/YARN/configuration/core-site.xml   |    20 -
 .../services/YARN/configuration/yarn-site.xml   |   172 -
 .../stacks/HDP/2.0.1/services/YARN/metainfo.xml |    36 -
 .../HDP/2.0.1/services/ZOOKEEPER/metainfo.xml   |    35 -
 .../resources/stacks/HDP/2.0.3/metainfo.xml     |    22 +
 .../stacks/HDP/2.0.3/repos/repoinfo.xml         |    99 +
 .../HDP/2.0.3/services/GANGLIA/metainfo.xml     |    36 +
 .../HDP/2.0.3/services/HCATALOG/metainfo.xml    |    30 +
 .../services/HDFS/configuration/core-site.xml   |   257 +
 .../services/HDFS/configuration/global.xml      |   192 +
 .../HDFS/configuration/hadoop-policy.xml        |   134 +
 .../services/HDFS/configuration/hdfs-site.xml   |   459 +
 .../stacks/HDP/2.0.3/services/HDFS/metainfo.xml |    46 +
 .../services/HIVE/configuration/hive-site.xml   |   253 +
 .../stacks/HDP/2.0.3/services/HIVE/metainfo.xml |    43 +
 .../configuration/container-executor.cfg        |    20 +
 .../MAPREDUCE2/configuration/core-site.xml      |    20 +
 .../MAPREDUCE2/configuration/global.xml         |    44 +
 .../configuration/mapred-queue-acls.xml         |    39 +
 .../MAPREDUCE2/configuration/mapred-site.xml    |   300 +
 .../HDP/2.0.3/services/MAPREDUCE2/metainfo.xml  |    32 +
 .../HDP/2.0.3/services/NAGIOS/metainfo.xml      |    30 +
 .../services/OOZIE/configuration/oozie-site.xml |   245 +
 .../HDP/2.0.3/services/OOZIE/metainfo.xml       |    35 +
 .../services/PIG/configuration/pig.properties   |    52 +
 .../stacks/HDP/2.0.3/services/PIG/metainfo.xml  |    30 +
 .../stacks/HDP/2.0.3/services/TEZ/metainfo.xml  |    30 +
 .../WEBHCAT/configuration/webhcat-site.xml      |   126 +
 .../HDP/2.0.3/services/WEBHCAT/metainfo.xml     |    31 +
 .../YARN/configuration/capacity-scheduler.xml   |   120 +
 .../YARN/configuration/container-executor.cfg   |    20 +
 .../services/YARN/configuration/core-site.xml   |    20 +
 .../services/YARN/configuration/global.xml      |    49 +
 .../services/YARN/configuration/yarn-site.xml   |   196 +
 .../stacks/HDP/2.0.3/services/YARN/metainfo.xml |    36 +
 .../ddl/Ambari-DDL-Postgres-UPGRADE-1.3.0.sql   |    10 +-
 .../AmbariManagementControllerImplTest.java     |   744 +-
 .../AmbariManagementControllerTest.java         |    23 +-
 .../ganglia/GangliaPropertyProviderTest.java    |    21 +-
 .../GangliaReportPropertyProviderTest.java      |     2 +-
 .../internal/AbstractPropertyProviderTest.java  |     4 +-
 .../VersioningPropertyProviderTest.java         |   144 +
 .../controller/jmx/JMXPropertyProviderTest.java |    45 +-
 .../controller/jmx/TestStreamProvider.java      |     7 +-
 .../utilities/PropertyHelperTest.java           |    28 +
 .../ambari/server/metadata/RoleGraphTest.java   |    10 +
 ambari-server/src/test/resources/deploy_HDP2.sh |    27 +-
 .../src/test/resources/resourcemanager_jmx.json |   865 +
 .../resources/stacks/HDP/2.0.1/metainfo.xml     |    22 +
 .../stacks/HDP/2.0.1/repos/repoinfo.xml         |    75 +
 .../HDP/2.0.1/services/GANGLIA/metainfo.xml     |    36 +
 .../HBASE/configuration/hbase-policy.xml        |    53 +
 .../services/HBASE/configuration/hbase-site.xml |   350 +
 .../HDP/2.0.1/services/HBASE/metainfo.xml       |    40 +
 .../HDP/2.0.1/services/HCATALOG/metainfo.xml    |    30 +
 .../services/HDFS/configuration/core-site.xml   |   257 +
 .../services/HDFS/configuration/global.xml      |   192 +
 .../HDFS/configuration/hadoop-policy.xml        |   134 +
 .../services/HDFS/configuration/hdfs-site.xml   |   438 +
 .../stacks/HDP/2.0.1/services/HDFS/metainfo.xml |    46 +
 .../services/HIVE/configuration/hive-site.xml   |   138 +
 .../stacks/HDP/2.0.1/services/HIVE/metainfo.xml |    43 +
 .../configuration/container-executor.cfg        |    20 +
 .../MAPREDUCE2/configuration/core-site.xml      |    20 +
 .../MAPREDUCE2/configuration/global.xml         |    44 +
 .../configuration/mapred-queue-acls.xml         |    39 +
 .../MAPREDUCE2/configuration/mapred-site.xml    |   300 +
 .../HDP/2.0.1/services/MAPREDUCE2/metainfo.xml  |    32 +
 .../HDP/2.0.1/services/NAGIOS/metainfo.xml      |    30 +
 .../services/OOZIE/configuration/oozie-site.xml |   245 +
 .../HDP/2.0.1/services/OOZIE/metainfo.xml       |    35 +
 .../services/PIG/configuration/pig.properties   |    52 +
 .../stacks/HDP/2.0.1/services/PIG/metainfo.xml  |    30 +
 .../stacks/HDP/2.0.1/services/TEZ/metainfo.xml  |    30 +
 .../WEBHCAT/configuration/webhcat-site.xml      |   126 +
 .../HDP/2.0.1/services/WEBHCAT/metainfo.xml     |    31 +
 .../YARN/configuration/capacity-scheduler.xml   |   120 +
 .../YARN/configuration/container-executor.cfg   |    20 +
 .../services/YARN/configuration/core-site.xml   |    20 +
 .../services/YARN/configuration/global.xml      |    49 +
 .../services/YARN/configuration/yarn-site.xml   |   172 +
 .../stacks/HDP/2.0.1/services/YARN/metainfo.xml |    36 +
 .../HDP/2.0.1/services/ZOOKEEPER/metainfo.xml   |    35 +
 .../apps/apps/mapreduce_201301280808_0001.json  |    16 +
 .../apps/apps/mapreduce_201301280808_0003.json  |    15 +
 .../apps/apps/mapreduce_201301280808_0004.json  |    40 +
 ...ig_c1af4446-64d2-46fb-be69-6363e7ec89fe.json |    89 +
 ...ig_f9957a11-a902-4f01-ac53-9679ce3a4b13.json |    16 +
 .../data/apps/apps/yarn_1371140159837_0001.json |    18 +
 ambari-web/app/assets/data/apps/runs2.json      |   171 +
 .../assets/data/services/metrics/yarn/gc.json   |    16 +
 .../data/services/metrics/yarn/jvm_heap.json    |    19 +
 .../data/services/metrics/yarn/jvm_threads.json |    19 +
 .../assets/data/services/metrics/yarn/rpc.json  |    16 +
 ambari-web/app/assets/test/tests.js             |     4 +-
 ambari-web/app/classes/app_class.js             |    38 +
 ambari-web/app/classes/run_class.js             |     8 +-
 ambari-web/app/controllers.js                   |     3 +
 .../controllers/global/cluster_controller.js    |     4 +-
 .../app/controllers/global/update_controller.js |     4 +-
 .../controllers/main/apps/item_controller.js    |    32 +-
 .../app/controllers/main/apps_controller.js     |    79 +-
 .../app/controllers/main/charts/heatmap.js      |    13 +
 .../heatmap_metrics/heatmap_metric_yarn.js      |    63 +
 .../heatmap_metric_yarn_gctime.js               |    29 +
 .../heatmap_metric_yarn_memHeapUsed.js          |    29 +
 .../controllers/main/service/info/configs.js    |    17 +-
 .../app/controllers/wizard/step12_controller.js |    21 +-
 .../app/controllers/wizard/step14_controller.js |     6 +-
 .../app/controllers/wizard/step3_controller.js  |     9 +
 .../app/controllers/wizard/step6_controller.js  |     4 +-
 .../app/controllers/wizard/step8_controller.js  |    22 +-
 ambari-web/app/data/HDP2/config_mapping.js      |    26 +-
 ambari-web/app/data/HDP2/config_properties.js   |    65 +-
 ambari-web/app/data/HDP2/custom_configs.js      |   127 +
 ambari-web/app/data/custom_configs.js           |    31 +-
 ambari-web/app/data/secure_configs.js           |     8 +-
 ambari-web/app/data/secure_mapping.js           |    11 +-
 ambari-web/app/data/secure_properties.js        |     5 +-
 ambari-web/app/data/service_configs.js          |     6 +-
 ambari-web/app/initialize.js                    |     1 +
 ambari-web/app/mappers/apps_mapper.js           |    63 +
 ambari-web/app/mappers/runs_mapper.js           |     3 +-
 ambari-web/app/mappers/service_mapper.js        |   120 +-
 ambari-web/app/messages.js                      |    31 +-
 ambari-web/app/models.js                        |     4 +-
 ambari-web/app/models/apps.js                   |    42 +
 ambari-web/app/models/host_component.js         |     2 +
 ambari-web/app/models/job.js                    |    18 +-
 ambari-web/app/models/quick_links.js            |    48 +
 ambari-web/app/models/run.js                    |    22 +-
 ambari-web/app/models/service/mapreduce2.js     |    26 +
 ambari-web/app/models/service/yarn.js           |    45 +
 ambari-web/app/routes/main.js                   |    12 +-
 ambari-web/app/routes/reassign_master_routes.js |     2 +
 ambari-web/app/styles/application.less          |     4 +-
 ambari-web/app/styles/apps.less                 |   217 +-
 .../common/configs/capacity_scheduler.hbs       |    21 +-
 ambari-web/app/templates/main/apps.hbs          |     9 +-
 ambari-web/app/templates/main/apps/item/app.hbs |    64 +
 ambari-web/app/templates/main/apps/item/dag.hbs |    13 -
 ambari-web/app/templates/main/apps/list_row.hbs |     3 +-
 .../main/dashboard/service/mapreduce2.hbs       |     1 +
 .../templates/main/dashboard/service/yarn.hbs   |   122 +
 .../app/templates/main/service/info/summary.hbs |     3 +
 .../main/service/info/summary/mapreduce2.hbs    |     6 +
 ambari-web/app/templates/main/service/item.hbs  |     2 +-
 ambari-web/app/templates/wizard/step13.hbs      |     4 +-
 ambari-web/app/utils/ajax.js                    |    20 +
 ambari-web/app/utils/app_graph.js               |   441 +
 ambari-web/app/utils/config.js                  |    94 +-
 ambari-web/app/utils/date.js                    |    73 +-
 ambari-web/app/utils/helper.js                  |    14 +-
 ambari-web/app/views.js                         |    10 +
 .../app/views/common/chart/linear_time.js       |     5 +-
 .../app/views/common/configs/services_config.js |   148 +-
 .../app/views/common/quick_view_link_view.js    |     9 +
 ambari-web/app/views/common/table_view.js       |     2 +-
 ambari-web/app/views/main/apps/item/app_view.js |   136 +
 ambari-web/app/views/main/apps/item_view.js     |    42 +-
 ambari-web/app/views/main/apps_view.js          |    30 +-
 ambari-web/app/views/main/dashboard.js          |     4 +
 .../views/main/dashboard/service/mapreduce.js   |     2 +-
 .../app/views/main/dashboard/service/yarn.js    |   106 +
 ambari-web/app/views/main/menu.js               |     9 +-
 .../views/main/service/info/metrics/yarn/gc.js  |    56 +
 .../service/info/metrics/yarn/jobs_status.js    |    68 +
 .../main/service/info/metrics/yarn/jvm_heap.js  |    71 +
 .../service/info/metrics/yarn/jvm_threads.js    |    66 +
 .../main/service/info/metrics/yarn/map_slots.js |    59 +
 .../service/info/metrics/yarn/reduce_slots.js   |    59 +
 .../views/main/service/info/metrics/yarn/rpc.js |    56 +
 .../info/metrics/yarn/tasks_running_waiting.js  |    65 +
 .../app/views/main/service/info/summary.js      |    19 +
 ambari-web/app/views/main/service/menu.js       |     2 +-
 ambari-web/test/installer/step2_test.js         |     1 -
 ambari-web/test/installer/step5_test.js         |     4 -
 ambari-web/vendor/styles/cubism.css             |     2 +-
 .../src/addOns/nagios/scripts/nagios_alerts.php |     7 +
 257 files changed, 42064 insertions(+), 4908 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
index 7736d4e..33319e5 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
@@ -34,9 +34,9 @@ define hdp-hadoop::hdfs::directory(
   $try_sleep = 10
  
   if ($service_state == 'running') {
-  
-  
-    if $stack_version in ("2.0.1") {
+
+
+    if (hdp_get_major_stack_version($stack_version) >= 2) {
       $mkdir_cmd = "fs -mkdir -p ${name}"
     } else {
       $mkdir_cmd = "fs -mkdir ${name}"

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp
index b3c08ee..504dbef 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp
@@ -187,6 +187,30 @@ class hdp-hadoop::initialize()
     owner => $hdp-hadoop::params::mapred_user,
     group => $hdp::params::user_group
   }
+
+  if (hdp_get_major_stack_version($stack_version) >= 2) {
+    if (hdp_is_empty($configuration) == false and hdp_is_empty($configuration['hdfs-site']) == false) {
+      if (hdp_is_empty($configuration['hdfs-site']['dfs.hosts.exclude']) == false) {
+        $exlude_file_path = $configuration['hdfs-site']['dfs.hosts.exclude']
+        file { $exlude_file_path :
+        ensure => present,
+        owner => $hdp-hadoop::params::hdfs_user,
+        group => $hdp::params::user_group
+        }
+      }
+      if (hdp_is_empty($hdp::params::slave_hosts) == false and hdp_is_empty($configuration['hdfs-site']['dfs.hosts']) == false) {
+        $include_file_path = $configuration['hdfs-site']['dfs.hosts']
+        $include_hosts_list = $hdp::params::slave_hosts
+        file { $include_file_path :
+        ensure => present,
+        owner => $hdp-hadoop::params::hdfs_user,
+        group => $hdp::params::user_group,
+        content => template('hdp-hadoop/include_hosts_list.erb')
+        }
+      }
+    }
+  }
+
 }
 
 class hdp-hadoop(
@@ -196,7 +220,8 @@ class hdp-hadoop(
   include hdp-hadoop::params
   $hadoop_config_dir = $hdp-hadoop::params::conf_dir
   $mapred_user = $hdp-hadoop::params::mapred_user  
-  $hdfs_user = $hdp-hadoop::params::hdfs_user  
+  $hdfs_user = $hdp-hadoop::params::hdfs_user
+  $hadoop_tmp_dir = $hdp-hadoop::params::hadoop_tmp_dir
 
   anchor{'hdp-hadoop::begin':} 
   anchor{'hdp-hadoop::end':} 
@@ -298,9 +323,24 @@ class hdp-hadoop(
       }
     }
 
-    Anchor['hdp-hadoop::begin'] -> Hdp-hadoop::Package<||> ->  Hdp::User<|title == 'hdfs_user' or title == 'mapred_user'|>  ->
+    if (hdp_get_major_stack_version($stack_version) >= 2) {
+      hdp::directory_recursive_create { "$hadoop_tmp_dir":
+        service_state => $service_state,
+        force => true,
+        owner => $hdfs_user
+      }
+    }
+
+    if (hdp_get_major_stack_version($stack_version) >= 2) {
+      Anchor['hdp-hadoop::begin'] -> Hdp-hadoop::Package<||> ->  Hdp::User<|title == $hdfs_user or title == $mapred_user|>  ->
+      Hdp::Directory_recursive_create[$hadoop_config_dir] -> Hdp-hadoop::Configfile<|tag == 'common'|> ->
+      Hdp::Directory_recursive_create[$logdirprefix] -> Hdp::Directory_recursive_create[$piddirprefix] -> Hdp::Directory_recursive_create["$hadoop_tmp_dir"] -> Anchor['hdp-hadoop::end']
+    } else {
+      Anchor['hdp-hadoop::begin'] -> Hdp-hadoop::Package<||> ->  Hdp::User<|title == $hdfs_user or title == $mapred_user|>  ->
       Hdp::Directory_recursive_create[$hadoop_config_dir] -> Hdp-hadoop::Configfile<|tag == 'common'|> ->
       Hdp::Directory_recursive_create[$logdirprefix] -> Hdp::Directory_recursive_create[$piddirprefix] -> Anchor['hdp-hadoop::end']
+    }
+
   }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp
index c891958..cfd4e22 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp
@@ -140,12 +140,6 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
         owner         => $hdp::params::hbase_user,
         service_state => $service_state
       }
-     $hbase_staging_dir = $hdp::params::hbase_staging_dir
-     hdp-hadoop::hdfs::directory { $hbase_staging_dir:
-       owner         => $hdp::params::hbase_user,
-       service_state => $service_state,
-       mode             => '711',
-     }
     }
 
     if ($hdp::params::hive_server_host != "") {
@@ -172,7 +166,7 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
       }
     }
 
-    if $stack_version in ("2.0.1") {
+    if (hdp_get_major_stack_version($stack_version) >= 2) {
       if ($hdp::params::nm_hosts != "") {
         if ($hdp::params::yarn_log_aggregation_enabled == "true") {
           $yarn_user = $hdp::params::yarn_user
@@ -181,7 +175,7 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
           hdp-hadoop::hdfs::directory{ $yarn_nm_app_log_dir:
             service_state => $service_state,
             owner => $yarn_user,
-            mode  => '744',
+            mode  => '1777',
             recursive_chmod => true
           }
         }
@@ -191,20 +185,21 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
       if ($hdp::params::hs_host != "") {
         $mapred_user = $hdp::params::mapred_user
         $mapreduce_jobhistory_intermediate_done_dir = $hdp::params::mapreduce_jobhistory_intermediate_done_dir
+        $group = $hdp::params::user_group
         $mapreduce_jobhistory_done_dir = $hdp::params::mapreduce_jobhistory_done_dir
 
         hdp-hadoop::hdfs::directory{ $mapreduce_jobhistory_intermediate_done_dir:
           service_state => $service_state,
           owner => $mapred_user,
-          mode  => '777',
-          recursive_chmod => true
+          group => $group,
+          mode  => '1777'
         }
 
         hdp-hadoop::hdfs::directory{ $mapreduce_jobhistory_done_dir:
           service_state => $service_state,
           owner => $mapred_user,
-          mode  => '750',
-          recursive_chmod => true
+          group => $group,
+          mode  => '1777'
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
index eea8005..fe5efce 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
@@ -81,7 +81,7 @@ class hdp-hadoop::params(
   
   $mapred_log_dir_prefix = hdp_default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
 
-  $mapred_pid_dir_prefix = hdp_default("mapreduce_libs_path","/var/run/hadoop-mapreduce")
+  $mapred_pid_dir_prefix = hdp_default("mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
 
   ### compression related
   if (($hdp::params::lzo_enabled == true) and ($hdp::params::snappy_enabled == true)) {
@@ -106,6 +106,8 @@ class hdp-hadoop::params(
   $fs_checkpoint_dir = hdp_default("core-site/fs.checkpoint.dir","/tmp/hadoop-hdfs/dfs/namesecondary")
 
   $proxyuser_group = hdp_default("core-site/proxyuser.group","users")
+  
+  $hadoop_tmp_dir = hdp_default("core-site/hadoop.tmp.dir","/tmp/hadoop-$hdfs_user")
 
   ### hdfs-site
   $datanode_du_reserved = hdp_default("hdfs-site/datanode.du.reserved",1073741824)

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/include_hosts_list.erb
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/include_hosts_list.erb b/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/include_hosts_list.erb
new file mode 100644
index 0000000..5b519c6
--- /dev/null
+++ b/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/include_hosts_list.erb
@@ -0,0 +1,3 @@
+<% include_hosts_list.each do |val| -%>
+<%= val %>
+<% end -%>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/init.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/init.pp b/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/init.pp
index 7cb4b10..bdd7fd2 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/init.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/init.pp
@@ -90,18 +90,37 @@ class hdp-hbase(
       override_owner => true
     }
 
-   hdp-hbase::configfile { ['hbase-env.sh','hadoop-metrics.properties']: 
+   hdp-hbase::configfile { ['hbase-env.sh','log4j.properties','hadoop-metrics.properties']: 
       type => $type
     }
 
     hdp-hbase::configfile { 'regionservers':}
 
     if ($security_enabled == true) {
-      if ($type == 'master' and $service_state == 'running') {
+      if ($type == 'master') {
         hdp-hbase::configfile { 'hbase_master_jaas.conf' : }
       } elsif ($type == 'regionserver' and $service_state == 'running') {
+
+        $hbase_grant_premissions_file = '/tmp/hbase_grant_permissions.sh'
+
+        file { $hbase_grant_premissions_file:
+          owner   => $hbase_user,
+          group   => $hdp::params::user_group,
+          mode => '0644',
+          content => template('hdp-hbase/hbase_grant_permissions.erb')
+        }
+
+        hdp::exec { '${smokeuser}_grant_privileges' :
+          command => "su - ${smoke_test_user} -c 'hbase --config $conf_dir shell ${hbase_grant_premissions_file}'",
+          require => File[$hbase_grant_premissions_file]
+        }
+
+        Hdp-hbase::Configfile<||> -> File[$hbase_grant_premissions_file] ->
+        Hdp::Exec['${smokeuser}_grant_privileges'] -> Anchor['hdp-hbase::end']
+
+      } elsif ($type == 'regionserver') {
         hdp-hbase::configfile { 'hbase_regionserver_jaas.conf' : }
-      } elsif ($type == 'client') {
+      } else {
         hdp-hbase::configfile { 'hbase_client_jaas.conf' : }
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/params.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/params.pp b/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/params.pp
index 1e6d6b4..266fe40 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/params.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/params.pp
@@ -89,8 +89,8 @@ class hdp-hbase::params() inherits hdp::params
   $hbase_regionserver_jaas_config_file = hdp_default("hbase_regionserver_jaas_config_file", "${conf_dir}/hbase_regionserver_jaas.conf")
 
   $hbase_master_keytab_path = hdp_default("hbase-site/hbase.master.keytab.file", "${keytab_path}/hbase.service.keytab")
-  $hbase_regionserver_keytab_path = hdp_default("hbase-site/hbase.regionserver.keytab.file", "${keytab_path}/hbase.service.keytab")
   $hbase_master_principal = hdp_default("hbase-site/hbase.master.kerberos.principal", "hbase/_HOST@${kerberos_domain}")
+  $hbase_regionserver_keytab_path = hdp_default("hbase-site/hbase.regionserver.keytab.file", "${keytab_path}/hbase.service.keytab")
   $hbase_regionserver_principal = hdp_default("hbase-site/hbase.regionserver.kerberos.principal", "hbase/_HOST@${kerberos_domain}")
 
   $hbase_primary_name = hdp_default("hbase_primary_name", "hbase")

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-hive/manifests/jdbc-connector.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-hive/manifests/jdbc-connector.pp b/ambari-agent/src/main/puppet/modules/hdp-hive/manifests/jdbc-connector.pp
index e5d8c3c..3bcd270 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-hive/manifests/jdbc-connector.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-hive/manifests/jdbc-connector.pp
@@ -24,13 +24,8 @@ class hdp-hive::jdbc-connector()
 
   $jdbc_jar_name = $hdp-hive::params::jdbc_jar_name
   
-  
-  $java_share_dir = "/usr/share/java"
-  $driver_curl_target = "${java_share_dir}/${jdbc_jar_name}"  
   $hive_lib = $hdp-hive::params::hive_lib
   $target = "${hive_lib}/${jdbc_jar_name}"
-  $jdk_location = $hdp::params::jdk_location
-  $driver_curl_source = "${jdk_location}${jdbc_jar_name}"
   
   anchor { 'hdp-hive::jdbc-connector::begin':}
 
@@ -46,18 +41,18 @@ class hdp-hive::jdbc-connector()
        creates => $target,
        path    => ["/bin","/usr/bin/"],
        require => Hdp::Package['mysql-connector-java'],
-       before  =>  Anchor['hdp-hive::jdbc-connector::end'],
+       notify  =>  Anchor['hdp-hive::jdbc-connector::end'],
    }
   } elsif ($hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver") {
    hdp::exec { 'hive mkdir -p ${artifact_dir} ; curl -kf --retry 10 ${driver_curl_source} -o ${driver_curl_target} &&  cp ${driver_curl_target} ${target}':
        command => "mkdir -p ${artifact_dir} ; curl -kf --retry 10 ${driver_curl_source} -o ${driver_curl_target} &&  cp ${driver_curl_target} ${target}",
        unless  => "test -f ${target}",
        path    => ["/bin","/usr/bin/"],
-       before  =>  Anchor['hdp-hive::jdbc-connector::end'],
+       notify  =>  Anchor['hdp-hive::jdbc-connector::end'],
      }  
   }
 
 
    anchor { 'hdp-hive::jdbc-connector::end':}
-   
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-nagios/templates/hadoop-services.cfg.erb
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-nagios/templates/hadoop-services.cfg.erb b/ambari-agent/src/main/puppet/modules/hdp-nagios/templates/hadoop-services.cfg.erb
index 3eb55b9..3af8895 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-nagios/templates/hadoop-services.cfg.erb
+++ b/ambari-agent/src/main/puppet/modules/hdp-nagios/templates/hadoop-services.cfg.erb
@@ -180,7 +180,7 @@ define service {
 
 <%if scope.function_hdp_nagios_members_exist('resorcemanager')-%>
 define service {
-        hostgroup_name          resorcemanager
+        hostgroup_name          ganglia-server
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for Resource Manager
         servicegroups           GANGLIA
@@ -193,7 +193,7 @@ define service {
 
 <%if scope.function_hdp_nagios_members_exist('nodemanagers')-%>
 define service {
-        hostgroup_name          nodemanagers
+        hostgroup_name          ganglia-server
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for Node Manager
         servicegroups           GANGLIA
@@ -206,7 +206,7 @@ define service {
 
 <%if scope.function_hdp_nagios_members_exist('historyserver2')-%>
 define service {
-        hostgroup_name          historyserver2
+        hostgroup_name          ganglia-server
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for History Server 2
         servicegroups           GANGLIA
@@ -450,11 +450,11 @@ define service {
 <% end %>
 
 <%if scope.function_hdp_nagios_members_exist('historyserver2')-%>
-# MAPREDUCE::HISTORYSERVER2 Checks
+# MAPREDUCE::JOBHISTORY Checks
 define service {
         hostgroup_name          historyserver2
         use                     hadoop-service
-        service_description     HISTORYSERVER2::History Server 2 Web UI down
+        service_description     JOBHISTORY::History Server 2 Web UI down
         servicegroups           MAPREDUCE
         check_command           check_webui!historyserver2!<%=scope.function_hdp_template_var("hs_port")%>
         normal_check_interval   1
@@ -465,7 +465,7 @@ define service {
 define service {
         hostgroup_name          historyserver2
         use                     hadoop-service
-        service_description     HISTORYSERVER::History Server 2 CPU utilization
+        service_description     JOBHISTORY::History Server 2 CPU utilization
         servicegroups           MAPREDUCE
         check_command           check_cpu!200%!250%
         normal_check_interval   5
@@ -476,7 +476,7 @@ define service {
 define service {
         hostgroup_name          historyserver2
         use                     hadoop-service
-        service_description     HISTORYSERVER::History Server 2 RPC latency
+        service_description     JOBHISTORY::History Server 2 RPC latency
         servicegroups           MAPREDUCE
         check_command           check_rpcq_latency!JobHistoryServer!<%=scope.function_hdp_template_var("hs_port")%>!3000!5000
         normal_check_interval   5

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-oozie/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-oozie/files/oozieSmoke2.sh b/ambari-agent/src/main/puppet/modules/hdp-oozie/files/oozieSmoke2.sh
new file mode 100644
index 0000000..e1f31f5
--- /dev/null
+++ b/ambari-agent/src/main/puppet/modules/hdp-oozie/files/oozieSmoke2.sh
@@ -0,0 +1,98 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+function getValueFromField {
+  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
+  return $?
+}
+
+function checkOozieJobStatus {
+  local job_id=$1
+  local num_of_tries=$2
+  #default num_of_tries to 10 if not present
+  num_of_tries=${num_of_tries:-10}
+  local i=0
+  local rc=1
+  local cmd="source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie job -oozie ${OOZIE_SERVER} -info $job_id"
+  su - ${smoke_test_user} -c "$cmd"
+  while [ $i -lt $num_of_tries ] ; do
+    cmd_output=`su - ${smoke_test_user} -c "$cmd"`
+    (IFS='';echo $cmd_output)
+    act_status=$(IFS='';echo $cmd_output | grep ^Status | cut -d':' -f2 | sed 's| ||g')
+    echo "workflow_status=$act_status"
+    if [ "RUNNING" == "$act_status" ]; then
+      #increment the couner and get the status again after waiting for 15 secs
+      sleep 15
+      (( i++ ))
+      elif [ "SUCCEEDED" == "$act_status" ]; then
+        rc=0;
+        break;
+      else
+        rc=1
+        break;
+      fi
+    done
+    return $rc
+}
+
+export oozie_conf_dir=$1
+export hadoop_conf_dir=$2
+export smoke_test_user=$3
+export security_enabled=$4
+export smoke_user_keytab=$5
+export realm=$6
+export JTHOST=$7
+export NNHOST=$8
+
+export OOZIE_EXIT_CODE=0
+export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
+export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.default.name`
+export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url`
+export OOZIE_EXAMPLES_DIR=`rpm -ql oozie-client | grep 'oozie-examples.tar.gz$' | xargs dirname`
+cd $OOZIE_EXAMPLES_DIR
+
+tar -zxf oozie-examples.tar.gz
+sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
+sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
+sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
+
+if [[ $security_enabled == "true" ]]; then
+  kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smoke_test_user}; "
+  echo "dfs.namenode.kerberos.principal=nn/`echo ${NNHOST} | tr '[:upper:]' '[:lower:]'`@${realm}" >> examples/apps/map-reduce/job.properties
+  echo "mapreduce.jobtracker.kerberos.principal=jt/`echo ${JTHOST} | tr '[:upper:]' '[:lower:]'`@${realm}" >> examples/apps/map-reduce/job.properties
+else 
+  kinitcmd=""
+fi
+
+su - ${smoke_test_user} -c "hdfs dfs -rm -r examples"
+su - ${smoke_test_user} -c "hdfs dfs -rm -r input-data"
+su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+
+cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
+echo $cmd
+job_info=`su - ${smoke_test_user} -c "$cmd" | grep "job:"`
+job_id="`echo $job_info | cut -d':' -f2`"
+checkOozieJobStatus "$job_id"
+OOZIE_EXIT_CODE="$?"
+exit $OOZIE_EXIT_CODE

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp b/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp
index 97dd50e..833a5f2 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp
@@ -23,14 +23,25 @@ class hdp-oozie::oozie::service_check()
   include hdp-oozie::params
 
   $smoke_shell_files = ['oozieSmoke.sh']
+
+  if (hdp_get_major_stack_version($stack_version) >= 2) {
+    $smoke_test_file_name = 'oozieSmoke2.sh'
+  } else {
+    $smoke_test_file_name = 'oozieSmoke.sh'
+  }
+
   anchor { 'hdp-oozie::oozie::service_check::begin':}
 
-  hdp-oozie::smoke_shell_file { $smoke_shell_files: }
+  hdp-oozie::smoke_shell_file { $smoke_shell_files:
+    smoke_shell_file_name => $smoke_test_file_name
+  }
 
   anchor{ 'hdp-oozie::oozie::service_check::end':}
 }
 
-define hdp-oozie::smoke_shell_file()
+define hdp-oozie::smoke_shell_file(
+  $smoke_shell_file_name
+)
 {
   $smoke_test_user = $hdp::params::smokeuser
   $conf_dir = $hdp::params::oozie_conf_dir
@@ -47,17 +58,17 @@ define hdp-oozie::smoke_shell_file()
   $nn_principal = $hdp::params::nn_principal
   $jt_principal = $hdp::params::jt_principal
 
-  file { '/tmp/oozieSmoke.sh':
+  file { "/tmp/${smoke_shell_file_name}":
     ensure => present,
-    source => "puppet:///modules/hdp-oozie/oozieSmoke.sh",
+    source => "puppet:///modules/hdp-oozie/${smoke_shell_file_name}",
     mode => '0755'
   }
 
-  exec { '/tmp/oozieSmoke.sh':
-    command   => "sh /tmp/oozieSmoke.sh ${conf_dir} ${hadoopconf_dir} ${smoke_test_user} ${security} ${smoke_user_keytab} ${realm} $jt_principal $nn_principal $kinit_path",
+  exec { "/tmp/${smoke_shell_file_name}":
+    command   => "sh /tmp/${smoke_shell_file_name} ${conf_dir} ${hadoopconf_dir} ${smoke_test_user} ${security} ${smoke_user_keytab} ${realm} $jt_host $nn_host",
     tries     => 3,
     try_sleep => 5,
-    require   => File['/tmp/oozieSmoke.sh'],
+    require   => File["/tmp/${smoke_shell_file_name}"],
     path      => '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
     logoutput => "true"
   }

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/params.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/params.pp b/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/params.pp
index 8cd6725..b74cf7c 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/params.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/params.pp
@@ -43,8 +43,6 @@ class hdp-oozie::params() inherits hdp::params
 
   $oozie_lib_dir = hdp_default("oozie_lib_dir","/var/lib/oozie/")
   
-  $libext_dir = hdp_default("libext_dir","/usr/lib/oozie/libext")
-  
   $oozie_webapps_dir = hdp_default("oozie_webapps_dir","/var/lib/oozie/oozie-server/webapps/")
   
   ### oozie-site

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp b/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp
index 0f694ec..dc82fb3 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp
@@ -33,7 +33,11 @@ class hdp-oozie::service(
   $cmd = "env HADOOP_HOME=${hadoop_home} /usr/sbin/oozie_server.sh"
   $pid_file = "${hdp-oozie::params::oozie_pid_dir}/oozie.pid" 
   $jar_location = $hdp::params::hadoop_jar_location
-  $ext_js_path = "/usr/share/HDP-oozie/ext.zip"
+  if (hdp_get_major_stack_version($stack_version) >= 2) {
+    $ext_js_path = "/usr/share/HDP-oozie/ext-2.2.zip"
+  } else {
+    $ext_js_path = "/usr/share/HDP-oozie/ext.zip"
+  }
 
   $security = $hdp::params::security_enabled
   $oozie_keytab = $hdp-oozie::params::oozie_service_keytab
@@ -49,11 +53,9 @@ class hdp-oozie::service(
   
   $jdbc_driver_name = $configuration['oozie-site']['oozie.service.JPAService.jdbc.driver']
   if ($jdbc_driver_name == "com.mysql.jdbc.Driver"){
-    $jdbc_driver_jar = "${java_share_dir}/mysql-connector-java.jar"
-    $jdbc_driver_jar_target = "${libext_dir}/mysql-connector-java.jar"
+    $jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
   } elsif($jdbc_driver_name == "oracle.jdbc.driver.OracleDriver") {
-      $jdbc_driver_jar = "${java_share_dir}/${oracle_driver_jar_name}"
-      $jdbc_driver_jar_target = "${libext_dir}/${oracle_driver_jar_name}"
+      $jdbc_driver_jar = "/usr/share/java/ojdbc6.jar"
   }
   
   file { '/tmp/wrap_ooziedb.sh':
@@ -87,18 +89,24 @@ class hdp-oozie::service(
     $jar_path = "${lzo_jar_suffix}${jdbc_driver_jar}"
   }
 
-              
+       
   $cmd1 = "cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz"
   $cmd2 =  "cd /usr/lib/oozie && mkdir -p ${oozie_tmp}"
   $cmd3 =  "cd /usr/lib/oozie && chown ${user}:${hdp::params::user_group} ${oozie_tmp}" 
      
-  $cmd4 = $jdbc_driver_name ? {
-        /(com.mysql.jdbc.Driver|oracle.jdbc.driver.OracleDriver)/ => "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 $jar_location -extjs $ext_js_path $jar_option $jar_path && cp $jdbc_driver_jar $jdbc_driver_jar_target",
+  if (hdp_get_major_stack_version($stack_version) >= 2) {
+    $cmd4 = $jdbc_driver_name ? {
+        /(com.mysql.jdbc.Driver|oracle.jdbc.driver.OracleDriver)/ => "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 2.x /usr/lib/ -extjs $ext_js_path $jar_option $jar_path",
+        default            => "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 2.x /usr/lib/ -extjs $ext_js_path $jar_option $jar_path",
+    }
+  } else {
+    $cmd4 = $jdbc_driver_name ? {
+        /(com.mysql.jdbc.Driver|oracle.jdbc.driver.OracleDriver)/ => "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 $jar_location -extjs $ext_js_path $jar_option $jar_path",
         default            => "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 $jar_location -extjs $ext_js_path $jar_option $jar_path",
+    }
   }
-  $cmd5 =  "/tmp/wrap_ooziedb.sh create -sqlfile oozie.sql -run "
+  $cmd5 =  "cd ${oozie_tmp} && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run ; echo 0"
   $cmd6 =  "su - ${user} -c '${kinit_if_needed}; hadoop dfs -put /usr/lib/oozie/share ${oozie_hdfs_user_dir} ; hadoop dfs -chmod -R 755 ${oozie_hdfs_user_dir}/share'"
-  #$cmd7 = "/usr/lib/oozie/bin/oozie-start.sh"
 
   if ($ensure == 'installed_and_configured') {
     $sh_cmds = [$cmd1, $cmd2, $cmd3]
@@ -119,15 +127,13 @@ class hdp-oozie::service(
   hdp-oozie::service::directory { $hdp-oozie::params::oozie_data_dir : }
   hdp-oozie::service::directory { $hdp-oozie::params::oozie_lib_dir : }
   hdp-oozie::service::directory { $hdp-oozie::params::oozie_webapps_dir : }
-  hdp-oozie::service::directory { $hdp-oozie::params::libext_dir : }
-  hdp-oozie::service::jdbc-connector-java { $hdp-oozie::params::libext_dir : }
 
   anchor{'hdp-oozie::service::begin':} -> Hdp-oozie::Service::Directory<||> -> anchor{'hdp-oozie::service::end':}
   
   if ($ensure == 'installed_and_configured') {
     hdp-oozie::service::exec_sh{$sh_cmds:}
     hdp-oozie::service::exec_user{$user_cmds:}
-    Anchor['hdp-oozie::service::begin'] -> Hdp-oozie::Service::Jdbc-connector-java[$hdp-oozie::params::libext_dir] -> Hdp-oozie::Service::Directory<||> -> Hdp-oozie::Service::Exec_sh[$cmd1] -> Hdp-oozie::Service::Exec_sh[$cmd2] ->Hdp-oozie::Service::Exec_sh[$cmd3] -> Hdp-oozie::Service::Exec_user[$cmd4] ->Hdp-oozie::Service::Exec_user[$cmd5] -> Anchor['hdp-oozie::service::end']
+    Hdp-oozie::Service::Directory<||> -> Hdp-oozie::Service::Exec_sh[$cmd1] -> Hdp-oozie::Service::Exec_sh[$cmd2] ->Hdp-oozie::Service::Exec_sh[$cmd3] -> Hdp-oozie::Service::Exec_user[$cmd4] ->Hdp-oozie::Service::Exec_user[$cmd5] -> Anchor['hdp-oozie::service::end']
   } elsif ($ensure == 'running') {
     hdp::exec { "exec $cmd6" :
       command => $cmd6,
@@ -149,19 +155,6 @@ class hdp-oozie::service(
   }
 }
 
-define hdp-oozie::service::jdbc-connector-java()
-{
-  if ($jdbc_driver_name == "com.mysql.jdbc.Driver"){
-   hdp::package { 'mysql-connector-java' : }
-  } elsif($jdbc_driver_name == "oracle.jdbc.driver.OracleDriver") {
-    exec{ "${curl_cmd} ${name}":
-      command => $curl_cmd,
-      path    => ["/bin","/usr/bin/"],
-      unless  => "test -e ${java_share_dir}/${oracle_driver_jar_name}",
-    } 
-  }                       
-}                      
-                      
 define hdp-oozie::service::directory()
 {
   hdp::directory_recursive_create { $name: 

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/server.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/server.pp b/ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/server.pp
index 8671d6d..7c03849 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/server.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/server.pp
@@ -95,11 +95,23 @@ class hdp-templeton::copy-hdfs-directories($service_state)
     path => ['/bin']
   }
 
-  hdp-hadoop::hdfs::copyfromlocal { '/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar':
-   service_state => $service_state,
-   owner => $webhcat_user,
-   mode  => '755',
-   dest_dir => "$webhcat_apps_dir/hadoop-streaming.jar",
+  if (hdp_get_major_stack_version($stack_version) >= 2) {
+    hdp-hadoop::hdfs::copyfromlocal { '/usr/lib/hadoop-mapreduce/hadoop-streaming*.jar':
+      service_state => $service_state,
+      owner => $webhcat_user,
+      mode  => '755',
+      dest_dir => "$webhcat_apps_dir/hadoop-streaming.jar",
+      kinit_if_needed => $kinit_if_needed
+    }
+  }
+  else {
+    hdp-hadoop::hdfs::copyfromlocal { '/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar':
+      service_state => $service_state,
+      owner => $webhcat_user,
+      mode  => '755',
+      dest_dir => "$webhcat_apps_dir/hadoop-streaming.jar",
+      kinit_if_needed => $kinit_if_needed
+    }
   }
   hdp-hadoop::hdfs::copyfromlocal { '/usr/share/HDP-webhcat/pig.tar.gz' :
     service_state => $service_state,

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/files/validateYarnComponentStatus.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/files/validateYarnComponentStatus.py b/ambari-agent/src/main/puppet/modules/hdp-yarn/files/validateYarnComponentStatus.py
index 7cc3230..9eb3444 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-yarn/files/validateYarnComponentStatus.py
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/files/validateYarnComponentStatus.py
@@ -23,11 +23,14 @@ import urllib2, urllib
 import json
 
 RESOURCEMANAGER = 'rm'
-HISTORYSERVER ='hs'
+NODEMANAGER = 'nm'
+HISTORYSERVER = 'hs'
 
 STARTED_STATE = 'STARTED'
+RUNNING_STATE = 'RUNNING'
 
-def validate(component, path, address):
+#Return reponse for given path and address
+def getResponse(path, address):
 
   try:
     url = 'http://' + address + path
@@ -36,23 +39,42 @@ def validate(component, path, address):
     request = urllib2.Request(url)
     handler = urllib2.urlopen(request)
     response = json.loads(handler.read())
-    is_valid = validateResponse(component, response)
-    if is_valid:
-      exit(0)
-    else:
+    if response == None:
+      print 'There is no response for url: ' + str(url)
       exit(1)
+    return response
   except Exception as e:
-    print 'Error checking status of component', e
+    print 'Error getting response for url:' + str(url), e
     exit(1)
 
+#Verify that REST api is available for given component
+def validateAvailability(component, path, address):
+
+  try:
+    response = getResponse(path, address)
+    is_valid = validateAvailabilityResponse(component, response)
+    if not is_valid:
+      exit(1)
+  except Exception as e:
+    print 'Error checking availability status of component', e
+    exit(1)
 
-def validateResponse(component, response):
+#Validate component-specific response
+def validateAvailabilityResponse(component, response):
   try:
     if component == RESOURCEMANAGER:
       rm_state = response['clusterInfo']['state']
       if rm_state == STARTED_STATE:
         return True
       else:
+        print 'Resourcemanager is not started'
+        return False
+
+    elif component == NODEMANAGER:
+      node_healthy = bool(response['nodeInfo']['nodeHealthy'])
+      if node_healthy:
+        return True
+      else:
         return False
     elif component == HISTORYSERVER:
       hs_start_time = response['historyInfo']['startedOn']
@@ -63,7 +85,44 @@ def validateResponse(component, response):
     else:
       return False
   except Exception as e:
-    print 'Error validation of response', e
+    print 'Error validation of availability response for ' + str(component), e
+    return False
+
+#Verify that component has required resources to work
+def validateAbility(component, path, address):
+
+  try:
+    response = getResponse(path, address)
+    is_valid = validateAbilityResponse(component, response)
+    if not is_valid:
+      exit(1)
+  except Exception as e:
+    print 'Error checking ability of component', e
+    exit(1)
+
+#Validate component-specific response that it has required resources to work
+def validateAbilityResponse(component, response):
+  try:
+    if component == RESOURCEMANAGER:
+      nodes = []
+      if response.has_key('nodes') and not response['nodes'] == None and response['nodes'].has_key('node'):
+        nodes = response['nodes']['node']
+      connected_nodes_count = len(nodes)
+      if connected_nodes_count == 0:
+        print 'There is no connected nodemanagers to resourcemanager'
+        return False
+      active_nodes = filter(lambda x: x['state'] == RUNNING_STATE, nodes)
+      active_nodes_count = len(active_nodes)
+
+      if connected_nodes_count == 0:
+        print 'There is no connected active nodemanagers to resourcemanager'
+        return False
+      else:
+        return True
+    else:
+      return False
+  except Exception as e:
+    print 'Error validation of ability response', e
     return False
 
 #
@@ -73,21 +132,25 @@ def main():
   parser = optparse.OptionParser(usage="usage: %prog [options] component ")
   parser.add_option("-p", "--port", dest="address", help="Host:Port for REST API of a desired component")
 
-
   (options, args) = parser.parse_args()
 
   component = args[0]
   
   address = options.address
-  
   if component == RESOURCEMANAGER:
     path = '/ws/v1/cluster/info'
+  elif component == NODEMANAGER:
+    path = '/ws/v1/node/info'
   elif component == HISTORYSERVER:
     path = '/ws/v1/history/info'
   else:
     parser.error("Invalid component")
 
-  validate(component, path, address)
+  validateAvailability(component, path, address)
+
+  if component == RESOURCEMANAGER:
+    path = '/ws/v1/cluster/nodes'
+    validateAbility(component, path, address)
 
 if __name__ == "__main__":
   main()

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/historyserver/service_check.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/historyserver/service_check.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/historyserver/service_check.pp
index d88cdec..885e24b 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/historyserver/service_check.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/historyserver/service_check.pp
@@ -20,5 +20,5 @@
 #
 class hdp-yarn::historyserver::service_check() inherits hdp-yarn::params
 {
-  class { 'hdp-yarn::smoketest': component_name => 'historyserver'}
+  hdp-yarn::smoketest{'hdp-yarn::smoketest:rm': component_name => 'historyserver'}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/mapred2/service_check.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/mapred2/service_check.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/mapred2/service_check.pp
new file mode 100644
index 0000000..3ada16e
--- /dev/null
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/mapred2/service_check.pp
@@ -0,0 +1,69 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+class hdp-yarn::mapred2::service_check() inherits hdp-yarn::params
+{
+  $smoke_test_user = $hdp::params::smokeuser
+  $hadoopMapredExamplesJarName = $hdp-yarn::params::hadoopMapredExamplesJarName
+  $jar_path = "$hdp::params::hadoop_mapred2_jar_location/$hadoopMapredExamplesJarName"
+  $input_file = "/user/${smoke_test_user}/mapredsmokeinput"
+  $output_file = "/user/${smoke_test_user}/mapredsmokeoutput"
+  $hadoop_conf_dir = $hdp::params::hadoop_conf_dir
+
+  $cleanup_cmd = "fs -rm -r -f ${output_file} ${input_file}"
+  $create_file_cmd = "fs -put /etc/passwd ${input_file}"
+  $test_cmd = "fs -test -e ${output_file}"
+  $run_wordcount_job = "jar $jar_path wordcount ${input_file} ${output_file}"
+
+  anchor { 'hdp-yarn::mapred2::service_check::begin':}
+
+  hdp-hadoop::exec-hadoop { 'mapred::service_check::cleanup_before':
+    command   => $cleanup_cmd,
+    tries     => 1,
+    try_sleep => 5,
+    user      => $smoke_test_user
+  }
+
+  hdp-hadoop::exec-hadoop { 'mapred::service_check::create_file':
+    command   => $create_file_cmd,
+    tries     => 1,
+    try_sleep => 5,
+    user      => $smoke_test_user
+  }
+
+  hdp-hadoop::exec-hadoop { 'mapred::service_check::run_wordcount':
+    command   => $run_wordcount_job,
+    tries     => 1,
+    try_sleep => 5,
+    user      => $smoke_test_user,
+    logoutput => "true"
+  }
+
+  hdp-hadoop::exec-hadoop { 'mapred::service_check::test':
+    command     => $test_cmd,
+    refreshonly => true,
+    user        => $smoke_test_user
+  }
+
+  anchor { 'hdp-yarn::mapred2::service_check::end':}
+
+  Anchor['hdp-yarn::mapred2::service_check::begin'] -> Hdp-hadoop::Exec-hadoop['mapred::service_check::cleanup_before'] -> Hdp-hadoop::Exec-hadoop['mapred::service_check::create_file'] -> Hdp-hadoop::Exec-hadoop['mapred::service_check::run_wordcount'] -> Hdp-hadoop::Exec-hadoop['mapred::service_check::test'] -> Anchor['hdp-yarn::mapred2::service_check::end']
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/nodemanager.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/nodemanager.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/nodemanager.pp
index df852ad..7f40ff3 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/nodemanager.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/nodemanager.pp
@@ -25,7 +25,8 @@ class hdp-yarn::nodemanager(
 {
   $yarn_user = $hdp-yarn::params::yarn_user
   $nm_local_dirs = $hdp-yarn::params::nm_local_dirs
-  
+  $nm_log_dirs = $hdp-yarn::params::nm_log_dirs
+
   if ($service_state == 'no_op') {
   } elsif ($service_state in 'installed_and_configured') {
   
@@ -45,14 +46,21 @@ class hdp-yarn::nodemanager(
       force => true
     }
 
+    hdp::directory_recursive_create { $nm_log_dirs: 
+      owner       => $yarn_user,
+      context_tag => 'yarn_service',
+      service_state => $service_state,
+      force => true
+    }
+
     hdp-yarn::service{ 'nodemanager':
       ensure       => $service_state,
       user         => $yarn_user
     }
 
-    Hdp::Directory_recursive_create[$nm_local_dirs] -> Hdp-yarn::Service['nodemanager']
+    anchor{"hdp-yarn::nodemanager::begin":} -> Hdp::Directory_recursive_create[$nm_local_dirs] -> Hdp-yarn::Service['nodemanager'] -> anchor{"hdp-yarn::nodemanager::end":}
 
   } else {
     hdp_fail("TODO not implemented yet: service_state = ${service_state}")
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
index c1faf1c..9cb799f 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
@@ -23,22 +23,28 @@ class hdp-yarn::params(
 {
 
   $conf_dir = $hdp::params::yarn_conf_dir 
+  $stack_version = $hdp::params::stack_version
     
   ## yarn-env 
   $hadoop_libexec_dir = hdp_default("yarn/yarn-env/hadoop_libexec_dir","/usr/lib/hadoop/libexec")
   
   $hadoop_common_home = hdp_default("yarn/yarn-env/hadoop_common_home","/usr/lib/hadoop")
   $hadoop_hdfs_home = hdp_default("yarn/yarn-env/hadoop_hdfs_home","/usr/lib/hadoop-hdfs")
-  $hadoop_mapred_home = hdp_default("yarn/yarn-env/hadoop_mapred_home","/usr/lib/hadoop-yarn")
+  $hadoop_mapred_home = hdp_default("yarn/yarn-env/hadoop_mapred_home","/usr/lib/hadoop-mapreduce")
   $hadoop_yarn_home = hdp_default("yarn/yarn-env/hadoop_yarn_home","/usr/lib/hadoop-yarn")
   
   $yarn_log_dir_prefix = hdp_default("hadoop/yarn-env/yarn_log_dir_prefix","/var/log/hadoop-yarn")
   $yarn_pid_dir_prefix = hdp_default("hadoop/yarn-env/yarn_pid_dir_prefix","/var/run/hadoop-yarn")
   
   ## yarn-site
-  $rm_webui_address = hdp_default("yarn-site/yarn.resourcemanager.webapp.address", "localhost:8088")
-  $nm_webui_address = hdp_default("yarn-site/yarn.nodemanager.webapp.address", "localhost:8042")
-  $hs_webui_address = hdp_default("mapred-site/mapreduce.jobhistory.webapp.address", "localhost:19888")
+  $rm_webui_address = hdp_default("yarn-site/yarn.resourcemanager.webapp.address", "0.0.0.0:8088")
+  $nm_webui_address = hdp_default("yarn-site/yarn.nodemanager.webapp.address", "0.0.0.0:8042")
+  $hs_webui_address = hdp_default("mapred-site/mapreduce.jobhistory.webapp.address", "0.0.0.0:19888")
   
-  $nm_local_dirs = hdp_default("yarn-site/yarn.nodemanager.local-dirs", "/yarn/loc/dir") 
+  $nm_local_dirs = hdp_default("yarn-site/yarn.nodemanager.local-dirs", "$hadoop_tmp_dir/nm-local-dir")
+  $nm_log_dirs = hdp_default("yarn-site/yarn.nodemanager.log-dirs", "/var/log/hadoop-yarn/yarn")
+
+  ##smoke test configs
+  $distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
+  $hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
 }

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager/service_check.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager/service_check.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager/service_check.pp
index 8f42c0c..c5386c3 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager/service_check.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager/service_check.pp
@@ -20,5 +20,5 @@
 #
 class hdp-yarn::resourcemanager::service_check() inherits hdp-yarn::params
 {
-  class { 'hdp-yarn::smoketest': component_name => 'resourcemanager'}
+  hdp-yarn::smoketest{'hdp-yarn::smoketest:rm': component_name => 'resourcemanager'}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
index cd479b0..ab0f2ec 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
@@ -98,8 +98,7 @@ define hdp-yarn::service(
   anchor{"hdp-yarn::service::${name}::begin":}
   anchor{"hdp-yarn::service::${name}::end":}
   if ($daemon_cmd != undef) {
-    Anchor["hdp-yarn::service::${name}::begin"] -> Hdp::Directory_recursive_create<|context_tag == 'yarn_service'|> ->
-      Hdp::Exec[$daemon_cmd] -> Anchor["hdp-yarn::service::${name}::end"]
+    Anchor["hdp-yarn::service::${name}::begin"] -> Hdp::Directory_recursive_create<|title == $pid_dir or title == $log_dir|> -> Hdp::Exec[$daemon_cmd] -> Anchor["hdp-yarn::service::${name}::end"]
 
   }
   if ($ensure == 'running') {

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/smoketest.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/smoketest.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/smoketest.pp
index 383138f..6ef7bf8 100644
--- a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/smoketest.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/smoketest.pp
@@ -18,7 +18,7 @@
 # under the License.
 #
 #
-class hdp-yarn::smoketest(
+define hdp-yarn::smoketest(
   $component_name = undef
 )
 {
@@ -29,15 +29,18 @@ class hdp-yarn::smoketest(
   if ($component_name == 'resourcemanager') {
     $component_type = 'rm'
     $component_address = $rm_webui_address
+  } elsif ($component_name == 'nodemanager') {
+    $component_type = 'nm'
+    $component_address = $nm_webui_address
   } elsif ($component_name == 'historyserver') {
-    $component_type = 'hs' 
+    $component_type = 'hs'
     $component_address = $hs_webui_address
   } else {
     hdp_fail("Unsupported component name: $component_name")
   }
 
   $smoke_test_user = $hdp::params::smokeuser
-  
+
   $validateStatusFileName = "validateYarnComponentStatus.py"
   $validateStatusFilePath = "/tmp/$validateStatusFileName"
 
@@ -55,6 +58,6 @@ class hdp-yarn::smoketest(
     try_sleep => 5,
     path      => '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
     logoutput => "true"
-  }
-  File[$validateStatusFilePath] -> Exec[$validateStatusFilePath]
+}
+  anchor{"hdp-yarn::smoketest::begin":} -> File[$validateStatusFilePath] -> Exec[$validateStatusFilePath] -> anchor{"hdp-yarn::smoketest::end":}
 }

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/yarn/service_check.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/yarn/service_check.pp b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/yarn/service_check.pp
new file mode 100644
index 0000000..fe053ac
--- /dev/null
+++ b/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/yarn/service_check.pp
@@ -0,0 +1,37 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+class hdp-yarn::yarn::service_check() inherits hdp-yarn::params
+{
+
+  $jar_path = "$hadoop_yarn_home/$distrAppJarName"
+  $run_dist_shell_app_cmd = "jar $jar_path -appname yarnservicecheck -master_memory 512 -container_memory 128 -num_containers 2 -shell_command \"ls\" -jar $jar_path"
+  
+  ## Check availability of REST api
+  hdp-yarn::smoketest{'hdp-yarn::smoketest:rm': component_name => 'resourcemanager'}
+  
+  ## Run distributed shell application check
+  hdp-hadoop::exec-hadoop { 'hdp-yarn::yarn::service_check':
+    command     => $run_dist_shell_app_cmd,
+    user        => $smoke_test_user
+  }
+  
+  anchor{"hdp-yarn::yarn::service_check::begin":} -> Hdp-yarn::Smoketest['hdp-yarn::smoketest:rm'] ->  Hdp-hadoop::Exec-hadoop['hdp-yarn::yarn::service_check'] -> anchor{"hdp-yarn::yarn::service_check::end":}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_get_major_stack_version.rb
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_get_major_stack_version.rb b/ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_get_major_stack_version.rb
new file mode 100644
index 0000000..859af6f
--- /dev/null
+++ b/ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_get_major_stack_version.rb
@@ -0,0 +1,28 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#to handle differences in how args passed in
+module Puppet::Parser::Functions
+  newfunction(:hdp_get_major_stack_version, :type => :rvalue) do |args|
+    stack_version = args[0]
+    major_stack_version = stack_version.split('.')[0]
+    major_stack_version.to_i
+  end
+end
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp/manifests/init.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp/manifests/init.pp b/ambari-agent/src/main/puppet/modules/hdp/manifests/init.pp
index bc7405b..11a4d4f 100644
--- a/ambari-agent/src/main/puppet/modules/hdp/manifests/init.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp/manifests/init.pp
@@ -69,7 +69,7 @@ class hdp(
   $ganglia_collector_jobtracker_port = hdp_default("ganglia_collector_jobtracker_port","8662")
   $ganglia_collector_hbase_port = hdp_default("ganglia_collector_hbase_port","8663")
   $ganglia_collector_rm_port = hdp_default("ganglia_collector_rm_port","8664")
-  $ganglia_collector_nm_port = hdp_default("ganglia_collector_nm_port","8665")
+  $ganglia_collector_nm_port = hdp_default("ganglia_collector_nm_port","8660")
   $ganglia_collector_hs_port = hdp_default("ganglia_collector_hs_port","8666")
 
   $oozie_server_port = hdp_default("oozie_server_port","11000")

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp b/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
index 715fc46..de23776 100644
--- a/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
+++ b/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
@@ -230,7 +230,6 @@ class hdp::params()
   $hive_apps_whs_dir = hdp_default("hive_apps_whs_dir", "/apps/hive/warehouse")
   $webhcat_apps_dir = hdp_default("webhcat_apps_dir", "/apps/webhcat")
   $hbase_hdfs_root_dir = hdp_default("hbase-site/hbase.hdfs.root.dir","/apps/hbase/data")
-  $hbase_staging_dir = hdp_default("hbase-site/hbase.bulkload.staging.dir","/apps/hbase/staging")
 
   $yarn_nm_app_log_dir = hdp_default("yarn-site/yarn.nodemanager.remote-app-log-dir","/app-logs")
 
@@ -317,6 +316,7 @@ class hdp::params()
     $pig_conf_dir = "/etc/pig"
     $oozie_conf_dir = "/etc/oozie"
     $hadoop_jar_location = "/usr/share/hadoop"
+    $hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
     $hbase_daemon_script = "/usr/bin/hbase-daemon.sh"
     $use_32_bits_on_slaves = false
     $zk_bin = '/usr/sbin'
@@ -332,10 +332,12 @@ class hdp::params()
    
     $mapred_smoke_test_script = "/usr/lib/hadoop/sbin/hadoop-validate-setup.sh"
 
-    if $stack_version in ("2.0.1") {
+    if (hdp_get_major_stack_version($stack_version) >= 2) {
       $hadoop_bin = "/usr/lib/hadoop/sbin"
+      $hadoop_deps = ['hadoop','hadoop-libhdfs','hadoop-lzo', 'hadoop-lzo-native']
     } else {
       $hadoop_bin = "/usr/lib/hadoop/bin"
+      $hadoop_deps = ['hadoop','hadoop-libhdfs','hadoop-native','hadoop-pipes','hadoop-sbin','hadoop-lzo', 'hadoop-lzo-native']
     }
     $yarn_bin = "/usr/lib/hadoop-yarn/sbin"
     $mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
@@ -349,6 +351,7 @@ class hdp::params()
     $hive_conf_dir = "/etc/hive/conf"
     $hcat_conf_dir = "/etc/hcatalog/conf"
     $hadoop_jar_location = "/usr/lib/hadoop/"
+    $hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
     $hbase_daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
     $use_32_bits_on_slaves = false
     $zk_bin = '/usr/lib/zookeeper/bin'
@@ -384,7 +387,7 @@ class hdp::params()
      suse => 'htpasswd2'} 
 
     }
-    
+
     # StackId => Arch => Os
     $package_names = 
     {
@@ -419,12 +422,7 @@ class hdp::params()
             'ALL' => ['hadoop','hadoop-libhdfs.i386','hadoop-native.i386','hadoop-pipes.i386','hadoop-sbin.i386','hadoop-lzo', 'hadoop-lzo-native.i386']
           },
           64 => {
-            'ALL' => ['hadoop','hadoop-libhdfs','hadoop-native','hadoop-pipes','hadoop-sbin','hadoop-lzo', 'hadoop-lzo-native']
-          }
-        },
-        '2.0.1' => {
-          64 => {
-            'ALL' => ['hadoop','hadoop-libhdfs','hadoop-lzo', 'hadoop-lzo-native']
+            'ALL' => $hadoop_deps
           }
         }
       },

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
index e621f4f..1fcb718 100644
--- a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
+++ b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
@@ -115,6 +115,7 @@ rolesToClass = {
   'HUE_SERVER': 'hdp-hue::server',
   'HDFS_SERVICE_CHECK': 'hdp-hadoop::hdfs::service_check',
   'MAPREDUCE_SERVICE_CHECK': 'hdp-hadoop::mapred::service_check',
+  'MAPREDUCE2_SERVICE_CHECK': 'hdp-yarn::mapred2::service_check',
   'ZOOKEEPER_SERVICE_CHECK': 'hdp-zookeeper::zookeeper::service_check',
   'ZOOKEEPER_QUORUM_SERVICE_CHECK': 'hdp-zookeeper::quorum::service_check',
   'HBASE_SERVICE_CHECK': 'hdp-hbase::hbase::service_check',
@@ -130,6 +131,7 @@ rolesToClass = {
   'RESOURCEMANAGER_SERVICE_CHECK': 'hdp-yarn::resourcemanager::service_check',
   'HISTORYSERVER_SERVICE_CHECK': 'hdp-yarn::historyserver::service_check',
   'TEZ_CLIENT': 'hdp-tez::tez_client',
+  'YARN_SERVICE_CHECK': 'hdp-yarn::yarn::service_check',
   'FLUME_SERVER': 'hdp-flume'
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-agent/src/main/python/ambari_agent/Controller.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/Controller.py b/ambari-agent/src/main/python/ambari_agent/Controller.py
index 67ff920..3e59882 100644
--- a/ambari-agent/src/main/python/ambari_agent/Controller.py
+++ b/ambari-agent/src/main/python/ambari_agent/Controller.py
@@ -141,6 +141,7 @@ class Controller(threading.Thread):
         if not retry:
           data = json.dumps(
               self.heartbeat.build(self.responseId, int(hb_interval), self.hasMappedComponents))
+          logger.debug("Sending request: " + data)
           pass
         else:
           self.DEBUG_HEARTBEAT_RETRIES += 1

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-server/docs/api/v1/index.md
----------------------------------------------------------------------
diff --git a/ambari-server/docs/api/v1/index.md b/ambari-server/docs/api/v1/index.md
index de2de58..8157ae7 100644
--- a/ambari-server/docs/api/v1/index.md
+++ b/ambari-server/docs/api/v1/index.md
@@ -1080,7 +1080,6 @@ The "end" keyword indicates the end of the set of resources and is equivalent to
 
 The default ordering of the resources (by the natural ordering of the resource key properties) is implied.	
 	
-
 HTTP Return Codes
 ----
 

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-server/src/main/java/org/apache/ambari/eventdb/db/DBConnector.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/eventdb/db/DBConnector.java b/ambari-server/src/main/java/org/apache/ambari/eventdb/db/DBConnector.java
index b859114..7a2913c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/eventdb/db/DBConnector.java
+++ b/ambari-server/src/main/java/org/apache/ambari/eventdb/db/DBConnector.java
@@ -19,11 +19,8 @@ package org.apache.ambari.eventdb.db;
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.ambari.eventdb.model.DataTable;
+import org.apache.ambari.eventdb.model.*;
 import org.apache.ambari.eventdb.model.Jobs.JobDBEntry;
-import org.apache.ambari.eventdb.model.TaskAttempt;
-import org.apache.ambari.eventdb.model.WorkflowContext;
-import org.apache.ambari.eventdb.model.Workflows;
 import org.apache.ambari.eventdb.model.Workflows.WorkflowDBEntry.WorkflowFields;
 
 public interface DBConnector {
@@ -37,9 +34,11 @@ public interface DBConnector {
   
   public DataTable fetchWorkflows(int offset, int limit, String searchTerm, int echo, WorkflowFields field, boolean sortAscending, String searchWorkflowId,
       String searchWorkflowName, String searchWorkflowType, String searchUserName, int minJobs, int maxJobs, long minInputBytes, long maxInputBytes,
-      long minOutputBytes, long maxOutputBytes, long minDuration, long maxDuration, long minStartTime, long maxStartTime, long minFinishTime, long maxFinishTime)
-      throws IOException;
-  
+      long minOutputBytes, long maxOutputBytes, long minDuration, long maxDuration, long minStartTime, long maxStartTime, long minFinishTime, 
+      long maxFinishTime, String tagSearchTerm) throws IOException;
+
+  public List<Apps.AppDBEntry> fetchAppDetails(String workflowId) throws IOException;
+
   public List<JobDBEntry> fetchJobDetails(String workflowID) throws IOException;
   
   public List<JobDBEntry> fetchJobDetails(long minFinishTime, long maxStartTime) throws IOException;

http://git-wip-us.apache.org/repos/asf/incubator-ambari/blob/a718fc45/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java b/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java
index 9c5d7ee..6b0d36c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java
+++ b/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java
@@ -27,20 +27,18 @@ import java.util.EnumMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.eventdb.model.DataTable;
+import org.apache.ambari.eventdb.model.*;
 import org.apache.ambari.eventdb.model.DataTable.AvgData;
 import org.apache.ambari.eventdb.model.DataTable.Summary;
 import org.apache.ambari.eventdb.model.DataTable.Summary.SummaryFields;
 import org.apache.ambari.eventdb.model.DataTable.Times;
 import org.apache.ambari.eventdb.model.Jobs.JobDBEntry;
 import org.apache.ambari.eventdb.model.Jobs.JobDBEntry.JobFields;
-import org.apache.ambari.eventdb.model.TaskAttempt;
 import org.apache.ambari.eventdb.model.TaskAttempt.TaskAttemptFields;
-import org.apache.ambari.eventdb.model.WorkflowContext;
-import org.apache.ambari.eventdb.model.Workflows;
 import org.apache.ambari.eventdb.model.Workflows.WorkflowDBEntry;
 import org.apache.ambari.eventdb.model.Workflows.WorkflowDBEntry.WorkflowFields;
 import org.apache.commons.lang.NotImplementedException;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.JsonParseException;
@@ -50,6 +48,7 @@ import org.codehaus.jackson.map.ObjectMapper;
 public class PostgresConnector implements DBConnector {
   private static Log LOG = LogFactory.getLog(PostgresConnector.class);
   private static final String WORKFLOW_TABLE_NAME = "workflow";
+  private static final String APP_TABLE_NAME = "application";
   private static final String JOB_TABLE_NAME = "job";
   private static final String TASK_ATTEMPT_TABLE_NAME = "taskattempt";
   public static final String SORT_ASC = "ASC";
@@ -75,6 +74,7 @@ public class PostgresConnector implements DBConnector {
         + getAvg(WorkflowFields.DURATION, SummaryFields.avgDuration, SummaryFields.minDuration, SummaryFields.maxDuration) + ", min("
         + WorkflowFields.STARTTIME + ") as " + SummaryFields.youngest + ", max(" + WorkflowFields.STARTTIME + ") as " + SummaryFields.oldest + " FROM "
         + WORKFLOW_TABLE_NAME),
+    FAD_PS("SELECT " + Apps.AppDBEntry.APP_FIELDS + " FROM " + APP_TABLE_NAME + " WHERE " + Apps.AppDBEntry.AppFields.WORKFLOWID.toString() + " = ?"),
     FJD_PS("SELECT " + JobDBEntry.JOB_FIELDS + " FROM " + JOB_TABLE_NAME + " WHERE " + JobFields.WORKFLOWID.toString() + " = ?"),
     FJD_TIMERANGE_PS("SELECT " + JobDBEntry.JOB_FIELDS + " FROM " + JOB_TABLE_NAME + " WHERE " + JobFields.FINISHTIME.toString() + " >= ? AND "
         + JobFields.SUBMITTIME.toString() + " <= ? ORDER BY " + JobFields.WORKFLOWID + ", " + JobFields.JOBID),
@@ -213,6 +213,7 @@ public class PostgresConnector implements DBConnector {
     w.setOutputBytes(WorkflowFields.OUTPUTBYTES.getLong(rs));
     w.setNumJobsCompleted(WorkflowFields.NUMJOBSCOMPLETED.getInt(rs));
     w.setWorkflowContext(jsonMapper.readValue(WorkflowFields.WORKFLOWCONTEXT.getString(rs), WorkflowContext.class));
+    w.setWorkflowTags(WorkflowFields.WORKFLOWTAGS.getString(rs));
     return w;
   }
   
@@ -227,8 +228,8 @@ public class PostgresConnector implements DBConnector {
   @Override
   public DataTable fetchWorkflows(int offset, int limit, String searchTerm, int echo, WorkflowFields col, boolean sortAscending, String searchWorkflowId,
       String searchWorkflowName, String searchWorkflowType, String searchUserName, int minJobs, int maxJobs, long minInputBytes, long maxInputBytes,
-      long minOutputBytes, long maxOutputBytes, long minDuration, long maxDuration, long minStartTime, long maxStartTime, long minFinishTime, long maxFinishTime)
-      throws IOException {
+      long minOutputBytes, long maxOutputBytes, long minDuration, long maxDuration, long minStartTime, long maxStartTime, long minFinishTime, 
+      long maxFinishTime, String tagSearchTerm) throws IOException {
     int total = 0;
     PreparedStatement ps = getPS(Statements.FW_COUNT_PS);
     ResultSet rs = null;
@@ -248,7 +249,8 @@ public class PostgresConnector implements DBConnector {
     }
     
     String searchClause = buildSearchClause(searchTerm, searchWorkflowId, searchWorkflowName, searchWorkflowType, searchUserName, minJobs, maxJobs,
-        minInputBytes, maxInputBytes, minOutputBytes, maxOutputBytes, minDuration, maxDuration, minStartTime, maxStartTime, minFinishTime, maxFinishTime);
+        minInputBytes, maxInputBytes, minOutputBytes, maxOutputBytes, minDuration, maxDuration, minStartTime, maxStartTime, minFinishTime, maxFinishTime,
+        tagSearchTerm);
     List<WorkflowDBEntry> workflows = fetchWorkflows(getQualifiedPS(Statements.FW_PS, searchClause, col, sortAscending, offset, limit));
     Summary summary = fetchSummary(getQualifiedPS(Statements.FW_SUMMARY_PS, searchClause));
     DataTable table = new DataTable();
@@ -266,7 +268,54 @@ public class PostgresConnector implements DBConnector {
     table.setSummary(summary);
     return table;
   }
-  
+
+  private static Apps.AppDBEntry getAppDBEntry(ResultSet rs) throws SQLException {
+    Apps.AppDBEntry a = new Apps.AppDBEntry();
+    a.setWorkflowId(Apps.AppDBEntry.AppFields.WORKFLOWID.getString(rs));
+    a.setWorkflowEntityName(Apps.AppDBEntry.AppFields.WORKFLOWENTITYNAME.getString(rs));
+    a.setAppId(Apps.AppDBEntry.AppFields.APPID.getString(rs));
+    a.setAppName(Apps.AppDBEntry.AppFields.APPNAME.getString(rs));
+    a.setAppType(Apps.AppDBEntry.AppFields.APPTYPE.getString(rs));
+    a.setFinishTime(Apps.AppDBEntry.AppFields.FINISHTIME.getLong(rs));
+    a.setLaunchTime(Apps.AppDBEntry.AppFields.LAUNCHTIME.getLong(rs));
+    a.setQueue(Apps.AppDBEntry.AppFields.QUEUE.getString(rs));
+    String[] stageStrings = StringUtils.split(Apps.AppDBEntry.AppFields.APPINFO.getString(rs), "-");
+    List<Integer> stages = new ArrayList<Integer>();
+    for (String s : stageStrings)
+      stages.add(Integer.parseInt(s));
+    a.setStages(stages);
+    a.setStatus(Apps.AppDBEntry.AppFields.STATUS.getString(rs));
+    a.setSubmitTime(Apps.AppDBEntry.AppFields.SUBMITTIME.getLong(rs));
+    a.setUserName(Apps.AppDBEntry.AppFields.USERNAME.getString(rs));
+    return a;
+  }
+
+  @Override
+  public List<Apps.AppDBEntry> fetchAppDetails(String workflowId) throws IOException {
+    PreparedStatement ps = getPS(Statements.FAD_PS);
+    List<Apps.AppDBEntry> apps = new ArrayList<Apps.AppDBEntry>();
+    ResultSet rs = null;
+    try {
+      ps.setString(1, workflowId);
+      rs = ps.executeQuery();
+      while (rs.next()) {
+        apps.add(getAppDBEntry(rs));
+      }
+      rs.close();
+    } catch (SQLException e) {
+      throw new IOException(e);
+    } finally {
+      if (rs != null)
+        try {
+          rs.close();
+        } catch (SQLException e) {
+          LOG.error("Exception while closing ResultSet", e);
+        }
+
+    }
+    return apps;
+  }
+
   private static JobDBEntry getJobDBEntry(ResultSet rs) throws SQLException {
     JobDBEntry j = new JobDBEntry();
     j.setConfPath(JobFields.CONFPATH.getString(rs));
@@ -288,7 +337,7 @@ public class PostgresConnector implements DBConnector {
     j.setWorkflowId(JobFields.WORKFLOWID.getString(rs));
     return j;
   }
-  
+
   @Override
   public List<JobDBEntry> fetchJobDetails(String workflowId) throws IOException {
     PreparedStatement ps = getPS(Statements.FJD_PS);
@@ -310,11 +359,11 @@ public class PostgresConnector implements DBConnector {
         } catch (SQLException e) {
           LOG.error("Exception while closing ResultSet", e);
         }
-      
+
     }
     return jobs;
   }
-  
+
   @Override
   public List<JobDBEntry> fetchJobDetails(long minFinishTime, long maxStartTime) throws IOException {
     PreparedStatement ps = getPS(Statements.FJD_TIMERANGE_PS);
@@ -547,7 +596,7 @@ public class PostgresConnector implements DBConnector {
   
   private static String buildSearchClause(String searchTerm, String searchWorkflowId, String searchWorkflowName, String searchWorkflowType,
       String searchUserName, int minJobs, int maxJobs, long minInputBytes, long maxInputBytes, long minOutputBytes, long maxOutputBytes, long minDuration,
-      long maxDuration, long minStartTime, long maxStartTime, long minFinishTime, long maxFinishTime) {
+      long maxDuration, long minStartTime, long maxStartTime, long minFinishTime, long maxFinishTime, String searchTags) {
     StringBuilder sb = new StringBuilder();
     sb.append(WHERE);
     if (searchTerm != null && searchTerm.length() > 0) {
@@ -567,6 +616,8 @@ public class PostgresConnector implements DBConnector {
       append(sb, startsWith(WorkflowFields.WORKFLOWID, searchWorkflowType));
     if (searchUserName != null)
       append(sb, equals(WorkflowFields.USERNAME, searchUserName));
+    if (searchTags != null)
+      append(sb, like(WorkflowFields.WORKFLOWTAGS, searchTags));
     addRangeSearch(sb, WorkflowFields.NUMJOBSTOTAL, minJobs, maxJobs);
     addRangeSearch(sb, WorkflowFields.INPUTBYTES, minInputBytes, maxInputBytes);
     addRangeSearch(sb, WorkflowFields.OUTPUTBYTES, minOutputBytes, maxOutputBytes);


Mime
View raw message