ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From odiache...@apache.org
Subject ambari git commit: AMBARI-12320. Develop HAWQ plugin for HDP and PHD stacks under common-services (Newton Alex via odiachenko).
Date Tue, 20 Oct 2015 21:54:15 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 476d2700d -> f19a8da03


AMBARI-12320. Develop HAWQ plugin for HDP and PHD stacks under common-services (Newton Alex via odiachenko).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f19a8da0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f19a8da0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f19a8da0

Branch: refs/heads/trunk
Commit: f19a8da034698060f75afabb42ee96f27c7aec05
Parents: 476d270
Author: Oleksandr Diachenko <odiachenko@pivotal.io>
Authored: Tue Oct 20 14:56:53 2015 -0700
Committer: Oleksandr Diachenko <odiachenko@pivotal.io>
Committed: Tue Oct 20 14:56:53 2015 -0700

----------------------------------------------------------------------
 ambari-server/pom.xml                           |   3 +
 .../HAWQ/2.0.0.0/configuration/gpcheck-env.xml  |  86 +++++++
 .../HAWQ/2.0.0.0/configuration/hawq-site.xml    | 167 ++++++++++++
 .../common-services/HAWQ/2.0.0.0/metainfo.xml   | 129 ++++++++++
 .../HAWQ/2.0.0.0/package/scripts/common.py      | 253 +++++++++++++++++++
 .../HAWQ/2.0.0.0/package/scripts/constants.py   |  62 +++++
 .../HAWQ/2.0.0.0/package/scripts/hawqmaster.py  |  55 ++++
 .../HAWQ/2.0.0.0/package/scripts/hawqsegment.py | 102 ++++++++
 .../HAWQ/2.0.0.0/package/scripts/hawqstandby.py |  58 +++++
 .../HAWQ/2.0.0.0/package/scripts/hawqstatus.py  |  65 +++++
 .../2.0.0.0/package/scripts/master_helper.py    | 194 ++++++++++++++
 .../HAWQ/2.0.0.0/package/scripts/params.py      |  88 +++++++
 .../2.0.0.0/package/scripts/service_check.py    | 102 ++++++++
 .../HAWQ/2.0.0.0/package/scripts/utils.py       | 108 ++++++++
 .../2.0.0.0/package/templates/hawq-hosts.j2     |   5 +
 .../package/templates/hawq-profile.sh.j2        |   8 +
 .../package/templates/hawq.limits.conf.j2       |   7 +
 .../package/templates/hawq.sysctl.conf.j2       |  27 ++
 .../HAWQ/2.0.0.0/package/templates/slaves.j2    |   3 +
 .../common-services/PXF/3.0.0.0/metainfo.xml    |  71 ++++++
 .../PXF/3.0.0.0/package/scripts/pxfservice.py   |  41 +++
 21 files changed, 1634 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 17e9ea9..5a7ddc4 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -203,6 +203,9 @@
             <exclude>src/main/resources/stacks/HDP/2.0._/services/HBASE/package/templates/regionservers.j2</exclude>
             <exclude>src/main/resources/stacks/HDPWIN/2.1/services/*/configuration*/*</exclude>
 
+            <!-- HAWQ Stack definitions -->
+            <exclude>src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/*.j2</exclude>
+
             <!--test samples -->
             <exclude>src/test/resources/TestAmbaryServer.samples/**</exclude>
             <exclude>src/test/resources/*.txt</exclude>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml
new file mode 100755
index 0000000..a61a34f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml
@@ -0,0 +1,86 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+  <!-- gpcheck.cnf -->
+    <property>
+      <name>content</name>
+      <display-name>Content</display-name>
+      <description>Contents of the configuration file /usr/local/hawq/etc/gpcheck.cnf. This file is used by 'hawq check' command, which can be run manually by gpadmin user on the HAWQ master host. This command validates the system parameters and HDFS parameters mentioned in this file to ensure optimal HAWQ operation.</description>
+        <value>
+[global]
+configfile_version = 4
+
+[linux.mount]
+mount.points = /
+
+[linux.sysctl]
+sysctl.kernel.shmmax = 500000000
+sysctl.kernel.shmmni = 4096
+sysctl.kernel.shmall = 4000000000
+sysctl.kernel.sem = 250 512000 100 2048
+sysctl.kernel.sysrq = 1
+sysctl.kernel.core_uses_pid = 1
+sysctl.kernel.msgmnb = 65536
+sysctl.kernel.msgmax = 65536
+sysctl.kernel.msgmni = 2048
+sysctl.net.ipv4.tcp_syncookies = 0
+sysctl.net.ipv4.ip_forward = 0
+sysctl.net.ipv4.conf.default.accept_source_route = 0
+sysctl.net.ipv4.tcp_tw_recycle = 1
+sysctl.net.ipv4.tcp_max_syn_backlog = 200000
+sysctl.net.ipv4.conf.all.arp_filter = 1
+sysctl.net.ipv4.ip_local_port_range = 1281 65535
+sysctl.net.core.netdev_max_backlog = 200000
+sysctl.vm.overcommit_memory = 2
+sysctl.fs.nr_open = 3000000
+sysctl.kernel.threads-max = 798720
+sysctl.kernel.pid_max = 798720
+# increase network
+sysctl.net.core.rmem_max = 2097152
+sysctl.net.core.wmem_max = 2097152
+
+[linux.limits]
+soft.nofile = 2900000
+hard.nofile = 2900000
+soft.nproc  = 131072
+hard.nproc  = 131072
+
+[linux.diskusage]
+diskusage.monitor.mounts = /
+diskusage.monitor.usagemax = 90%
+
+[hdfs]
+dfs.mem.namenode.heap = 40960
+dfs.mem.datanode.heap = 6144
+# in hdfs-site.xml
+dfs.support.append = true
+dfs.client.enable.read.from.local = true
+dfs.block.local-path-access.user = gpadmin
+dfs.datanode.max.transfer.threads = 40960
+dfs.client.socket-timeout = 300000000
+dfs.datanode.socket.write.timeout = 7200000
+dfs.namenode.handler.count = 60
+ipc.server.handler.queue.size = 3300
+dfs.datanode.handler.count = 60
+ipc.client.connection.maxidletime = 3600000
+dfs.namenode.accesstime.precision = -1
+    </value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml
new file mode 100644
index 0000000..a322a5b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml
@@ -0,0 +1,167 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>hawq_master_address_host</name>
+    <display-name>HAWQ Master</display-name>
+    <value>localhost</value>
+    <description>The host name of HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_standby_address_host</name>
+    <display-name>HAWQ Standby Master</display-name>
+    <value>localhost</value>
+    <description>The host name of HAWQ standby.</description>
+  </property>
+
+  <property>
+    <name>hawq_master_address_port</name>
+    <display-name>HAWQ Master Port</display-name>
+    <value>5432</value>
+    <description>The port of HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_segment_address_port</name>
+    <display-name>HAWQ Segment Port</display-name>
+    <value>40000</value>
+    <description>The port of HAWQ segment.</description>
+  </property>
+
+  <property>
+    <name>hawq_dfs_url</name>
+    <display-name>HAWQ DFS URL</display-name>
+    <value>localhost:8020/hawq_default</value>
+    <description>URL for accessing HDFS.</description>
+  </property>
+
+  <property>
+    <name>hawq_master_directory</name>
+    <display-name>HAWQ Master Directory</display-name>
+    <value>/data/hawq/master</value>
+    <description>The directory of HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_segment_directory</name>
+    <display-name>HAWQ Segment Directory</display-name>
+    <value>/data/hawq/segment</value>
+    <description>The directory of HAWQ segment.</description>
+  </property> 
+
+  <property>
+    <name>hawq_master_temp_directory</name>
+    <display-name>HAWQ Master Temp Directory</display-name>
+    <value>/tmp</value>
+    <description>The temporary directory reserved for HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_segment_temp_directory</name>
+    <display-name>HAWQ Segment Temp Directory</display-name>
+    <value>/tmp</value>
+    <description>The temporary directory reserved for HAWQ segment.</description>
+  </property>
+
+  <!-- HAWQ resource manager parameters -->
+  <property>
+    <name>hawq_resourcemanager_server_type</name>
+    <value>none</value>
+    <description>The resource manager type to start for allocating resource. 
+      'none' means HAWQ resource manager exclusively uses whole
+      cluster; 'yarn' means HAWQ resource manager contacts YARN
+      resource manager to negotiate resource.
+    </description>
+  </property>
+
+  <property>
+    <name>hawq_resourcemanager_segment_limit_memory_use</name>
+    <value>64GB</value>
+    <description>The limit of memory usage in a HAWQ segment when 
+      HAWQ is set 'none'.
+    </description>
+  </property>
+
+  <property>
+    <name>hawq_resourcemanager_segment_limit_core_use</name>
+    <value>16</value>
+    <description>The limit of virtual core usage in a HAWQ segment when 
+      HAWQ is set 'none'.
+    </description>
+  </property>
+
+  <property>
+    <name>hawq_resourcemanager_yarn_resourcemanager_address</name>
+    <value>localhost:8032</value>
+    <description>The address of YARN resource manager server.</description>
+  </property>
+
+  <property>
+    <name>hawq_resourcemanager_yarn_resourcemanager_scheduler_address</name>
+    <value>localhost:8030</value>
+    <description>The address of YARN scheduler server.</description>
+  </property>
+
+  <property>
+    <name>hawq_resourcemanager_yarn_queue</name>
+    <value>default</value>
+    <description>The YARN queue name to register HAWQ resource manager.</description>
+  </property>
+
+  <property>
+    <name>hawq_resourcemanager_yarn_application_name</name>
+    <value>hawq</value>
+    <description>The application name to register HAWQ resource manager in YARN.</description>
+  </property>
+  <property>
+    <name>hawq_resourcemanager_log_level</name>
+    <value>10</value>
+  </property>
+
+  <property>
+    <name>hawq_resourceenforcer_cpu_enable</name>
+    <value>false</value>
+  </property>
+
+  <property>
+    <name>hawq_resourceenforcer_cgroup_mount_point</name>
+    <value>/sys/fs/cgroup</value>
+  </property>
+
+  <property>
+    <name>hawq_resourceenforcer_cgroup_hierarchy_name</name>
+    <value>hadoop-yarn</value>
+  </property>
+
+  <property>
+    <name>hawq_resourceenforcer_cleanup_period</name>
+    <value>180</value>
+  </property>
+
+  <property>
+    <name>hawq_resourceenforcer_cpu_weight</name>
+    <value>1024.0</value>
+  </property>
+
+  <property>
+    <name>hawq_resourceenforcer_vcore_pcore_ratio</name>
+    <value>1.0</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml
new file mode 100644
index 0000000..2ceff96
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml
@@ -0,0 +1,129 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HAWQ</name>
+      <displayName>HAWQ</displayName>
+      <comment>Apache HAWQ - Apache Hadoop Native SQL</comment>
+      <version>2.0</version>
+      <components>
+
+        <component>
+          <name>HAWQMASTER</name>
+          <displayName>HAWQ Master</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <commandScript>
+            <script>scripts/hawqmaster.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>1200</timeout>
+          </commandScript>
+          <dependencies>
+            <dependency>
+              <name>HDFS/NAMENODE</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>false</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                  <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+        </component>
+
+      <component>
+          <name>HAWQSTANDBY</name>
+          <displayName>HAWQ Standby Master</displayName>
+          <category>MASTER</category>
+          <cardinality>0-1</cardinality>
+          <commandScript>
+              <script>scripts/hawqstandby.py</script>
+              <scriptType>PYTHON</scriptType>
+              <timeout>600</timeout>
+          </commandScript>
+          <dependencies>
+            <dependency>
+                <name>HDFS/HDFS_CLIENT</name>
+                <scope>host</scope>
+                <auto-deploy>
+                    <enabled>true</enabled>
+                </auto-deploy>
+            </dependency>
+          </dependencies>
+      </component>
+
+        <component>
+          <name>HAWQSEGMENT</name>
+          <displayName>HAWQ Segment</displayName>
+          <category>SLAVE</category>
+          <cardinality>1+</cardinality>
+          <commandScript>
+            <script>scripts/hawqsegment.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+           <dependencies>
+            <dependency>
+              <name>HDFS/DATANODE</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>false</enabled>
+                <co-locate>HDFS/DATANODE</co-locate>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+        </component>
+      </components> 
+      <requiredServices>
+          <service>HDFS</service>
+      </requiredServices>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+           <packages>
+            <package>
+              <name>hawq</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <configuration-dependencies>
+        <config-type>hawq-site</config-type>
+        <config-type>gpcheck-env</config-type>
+      </configuration-dependencies>
+    </service>
+
+  </services>
+</metainfo>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py
new file mode 100644
index 0000000..defd87c
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py
@@ -0,0 +1,253 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import re
+import os
+import time
+import crypt
+import filecmp
+from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.core.resources.system import Execute, Directory, File
+from resource_management.core.logger import Logger
+from resource_management.core.system import System
+from resource_management.core.exceptions import Fail
+from resource_management.core.resources.accounts import Group, User
+from resource_management.core.source import Template
+import xml.etree.ElementTree as ET
+
+import utils
+import constants
+
+
+def update_bashrc(source_file, target_file):
+  """
+  Updates the hawq_user's .bashrc file with HAWQ env variables like
+  MASTER_DATA_DIRECTORY, PGHOST, PGPORT and PGUSER. 
+  And sources the greenplum_path file.
+  """
+  append_src_cmd = "echo 'source {0}' >> {1}".format(source_file, target_file)
+  src_cmd_exists = "grep 'source {0}' {1}".format(source_file, target_file)
+  Execute(append_src_cmd, user=constants.hawq_user, timeout=constants.default_exec_timeout, not_if=src_cmd_exists)
+
+
+def setup_user():
+  """
+  Creates HAWQ user home directory and sets up the correct ownership.
+  """
+  __create_hawq_user()
+  __set_home_dir_ownership()
+
+
+def __create_hawq_user():
+  """
+  Creates HAWQ user with default password and group.
+  """
+  import params
+  Group(constants.hawq_group, ignore_failures=True)
+
+  User(constants.hawq_user,
+       gid=constants.hawq_group,
+       password=crypt.crypt(constants.hawq_password, "salt"),
+       groups=[constants.hawq_group, params.user_group],
+       ignore_failures=True)
+
+
+def __set_home_dir_ownership():
+  """
+  Updates the HAWQ user home directory to be owned by gpadmin:gpadmin.
+  """
+  command = "chown -R {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, constants.hawq_home_dir)
+  Execute(command, timeout=constants.default_exec_timeout)
+
+
+def setup_common_configurations():
+  """
+  Sets up the config files common to master, standby and segment nodes.
+  """
+  import params
+
+  substituted_conf_dict = __substitute_hostnames_in_hawq_site()
+  XmlConfig("hawq-site.xml",
+            conf_dir=constants.hawq_config_dir,
+            configurations=substituted_conf_dict,
+            configuration_attributes=params.config['configuration_attributes']['hawq-site'],
+            owner=constants.hawq_user,
+            group=constants.hawq_group,
+            mode=0644)
+  __set_osparams()
+
+
+def __substitute_hostnames_in_hawq_site():
+  """
+  Temporary function to replace localhost with actual HAWQ component hostnames.
+  This function will be in place till the entire HAWQ plugin code along with the UI
+  changes are submitted to the trunk.
+  """
+  import params
+
+  LOCALHOST = "localhost"
+  
+  # in case there is no standby
+  hawqstandby_host_desired_value = params.hawqstandby_host if params.hawqstandby_host is not None else 'none' 
+  
+  substituted_hawq_site = params.hawq_site.copy()
+  hawq_site_property_map = {"hawq_master_address_host": params.hawqmaster_host,
+                            "hawq_standby_address_host": hawqstandby_host_desired_value,
+                            "hawq_resourcemanager_yarn_resourcemanager_address": params.rm_host,
+                            "hawq_resourcemanager_yarn_resourcemanager_scheduler_address": params.rm_host,
+                            "hawq_dfs_url": params.namenode_host
+                            }
+
+  for property, desired_value in hawq_site_property_map.iteritems():
+    if desired_value is not None:
+      # Replace localhost with required component hostname
+      substituted_hawq_site[property] = re.sub(LOCALHOST, desired_value, substituted_hawq_site[property])
+
+  return substituted_hawq_site
+
+
+def __set_osparams():
+  """
+  Updates parameters in sysctl.conf and limits.conf required by HAWQ.
+  """
+  # Create a temp scratchpad directory
+  utils.create_dir_as_hawq_user(constants.hawq_tmp_dir)
+
+  # Suse doesn't supports loading values from files in /etc/sysctl.d
+  # So we will have to directly edit the sysctl file
+  if System.get_instance().os_family == "suse":
+    # Update /etc/sysctl.conf
+    __update_sysctl_file_suse()
+  else:
+    # Update /etc/sysctl.d/hawq.conf
+    __update_sysctl_file()
+
+  __update_limits_file()
+
+
+def __update_limits_file():
+  """
+  Updates /etc/security/limits.d/hawq.conf file with the HAWQ parameters.
+  """
+  # Ensure limits directory exists
+  Directory(constants.limits_conf_dir, recursive=True, owner=constants.root_user, group=constants.root_user
+            )
+
+  # Generate limits for hawq user
+  File('{0}/{1}.conf'.format(constants.limits_conf_dir, constants.hawq_user), content=Template("hawq.limits.conf.j2"),
+       owner=constants.hawq_user, group=constants.hawq_group)
+
+
+def __update_sysctl_file():
+  """
+  Updates /etc/sysctl.d/hawq_sysctl.conf file with the HAWQ parameters on CentOS/RHEL.
+  """
+  # Ensure sys ctl sub-directory exists
+  Directory(constants.sysctl_conf_dir, recursive=True, owner=constants.root_user, group=constants.root_user)
+
+  # Generate temporary file with kernel parameters needed by hawq
+  File(constants.hawq_sysctl_tmp_file, content=Template("hawq.sysctl.conf.j2"), owner=constants.hawq_user,
+       group=constants.hawq_group)
+
+  is_changed = True
+  if os.path.exists(constants.hawq_sysctl_tmp_file) and os.path.exists(constants.hawq_sysctl_file):
+    is_changed = not filecmp.cmp(constants.hawq_sysctl_file, constants.hawq_sysctl_tmp_file)
+
+  if is_changed:
+    # Generate file with kernel parameters needed by hawq, only if something
+    # has been changed by user
+    Execute("cp -p {0} {1}".format(constants.hawq_sysctl_tmp_file, constants.hawq_sysctl_file))
+
+    # Reload kernel sysctl parameters from hawq file.
+    Execute("sysctl -e -p {0}".format(constants.hawq_sysctl_file), timeout=constants.default_exec_timeout)
+
+  # Wipe out temp file
+  File(constants.hawq_sysctl_tmp_file, action='delete')
+
+
+def __update_sysctl_file_suse():
+  """
+  Updates /etc/sysctl.conf file with the HAWQ parameters on SUSE.
+  """
+  # Backup file
+  backup_file_name = constants.sysctl_backup_file.format(str(int(time.time())))
+  try:
+    # Generate file with kernel parameters needed by hawq to temp file
+
+    File(constants.hawq_sysctl_tmp_file, content=Template("hawq.sysctl.conf.j2"), owner=constants.hawq_user,
+        group=constants.hawq_group)
+
+    sysctl_file_dict = utils.read_file_to_dict(constants.sysctl_suse_file)
+    sysctl_file_dict_original = sysctl_file_dict.copy()
+    hawq_sysctl_dict = utils.read_file_to_dict(constants.hawq_sysctl_tmp_file)
+
+    # Merge common system file with hawq specific file
+    sysctl_file_dict.update(hawq_sysctl_dict)
+
+    if sysctl_file_dict_original != sysctl_file_dict:
+      # Backup file
+      Execute("cp {0} {1}".format(constants.sysctl_suse_file, backup_file_name), timeout=constants.default_exec_timeout)
+      # Write merged properties to file
+      utils.write_dict_to_file(sysctl_file_dict, constants.sysctl_suse_file)
+      # Reload kernel sysctl parameters from /etc/sysctl.conf
+      Execute("sysctl -e -p", timeout=constants.default_exec_timeout)
+
+  except Exception as e:
+    Logger.error("Error occurred while updating sysctl.conf file, reverting the contents" + str(e))
+    Execute("cp {0} {1}".format(constants.sysctl_suse_file, constants.hawq_sysctl_tmp_file))
+    Execute("mv {0} {1}".format(backup_file_name, constants.sysctl_suse_file), timeout=constants.default_exec_timeout)
+    Logger.error("Please execute `sysctl -e -p` on the command line manually to reload the contents of file {0}".format(
+      constants.hawq_sysctl_tmp_file))
+    raise Fail("Failed to update sysctl.conf file ")
+
+
+def get_local_hawq_site_property(property_name):
+  """
+  Fetches the value of the property specified, from the local hawq-site.xml.
+  """
+  hawq_site_path = None
+  try:
+    hawq_site_path = os.path.join(constants.hawq_config_dir, "hawq-site.xml")
+    hawq_site_root = ET.parse(hawq_site_path).getroot()
+    for property in hawq_site_root.findall("property"):
+      for item in property:
+        if item.tag == 'name':
+          current_property_name = item.text.strip() if item and item.text else item.text
+        elif item.tag == 'value':
+          current_property_value = item.text.strip() if item and item.text else item.text
+      if property_name == current_property_name:
+          return current_property_value
+    raise #If property has not been found
+  except Exception:
+    raise Fail("Unable to read property {0} from local {1}".format(property_name, hawq_site_path))
+
+def validate_configuration():
+  """
+  Validates if YARN is present in the configuration when the user specifies YARN as HAWQ's resource manager.
+  """
+  import params
+
+  # At this point, hawq should be included.
+  if 'hawq-site' not in params.config['configurations']:
+    raise Fail("Configurations does not contain hawq-site. Please include HAWQ")
+
+  # If HAWQ is set to use YARN and YARN is not configured, error.
+  rm_type = params.config["configurations"]["hawq-site"].get("hawq_resourcemanager_server_type")
+  if rm_type == "yarn" and "yarn-site" not in params.config["configurations"]:
+    raise Fail("HAWQ is set to use YARN but YARN is not deployed. " + 
+               "hawq_resourcemanager_server_type property in hawq-site is set to 'yarn' but YARN is not configured. " + 
+               "Please deploy YARN before starting HAWQ or change the value of hawq_resourcemanager_server_type property to 'none'")

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py
new file mode 100644
index 0000000..78a636e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py
@@ -0,0 +1,62 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os
+
+MASTER = "master"
+STANDBY = "standby"
+SEGMENT = "segment"
+START = "start"
+INIT = "init"
+STOP = "stop"
+
+# Users
+root_user = "root"
+gpadmin_user = "gpadmin"
+hawq_user = gpadmin_user
+hawq_group = gpadmin_user
+hawq_password = gpadmin_user
+
+# Directories
+hawq_home_dir = "/usr/local/hawq/"
+hawq_config_dir = "/usr/local/hawq/etc/"
+hawq_pid_dir = "/var/run/hawq/"
+hawq_tmp_dir = '/tmp/hawq/'
+hawq_user_home_dir = os.path.expanduser("~{0}".format(hawq_user))
+limits_conf_dir = "/etc/security/limits.d"
+sysctl_conf_dir = "/etc/sysctl.d"
+
+# Files
+hawq_slaves_file = os.path.join(hawq_config_dir, "slaves")
+hawq_user_bashrc_file = os.path.join(hawq_user_home_dir, ".bashrc")
+hawq_greenplum_path_file = os.path.join(hawq_home_dir, "greenplum_path.sh")
+hawq_hosts_file = "/tmp/hawq_hosts"
+hawq_check_file = os.path.join(hawq_config_dir, "gpcheck.cnf")
+sysctl_suse_file = "/etc/sysctl.conf"
+sysctl_backup_file = "/etc/sysctl.conf.backup.{0}"
+hawq_sysctl_filename = "hawq_sysctl.conf"
+hawq_sysctl_tmp_file = os.path.join(hawq_tmp_dir, hawq_sysctl_filename)
+hawq_sysctl_file = os.path.join(sysctl_conf_dir, hawq_sysctl_filename)
+postmaster_opts_filename = "postmaster.opts"
+postmaster_pid_filename = "postmaster.pid"
+
+# Smoke check table
+smoke_check_table_name = "ambari_hawq_smoke_test"
+
+# Timeouts
+default_exec_timeout = 600
+hawq_operation_exec_timeout = 900

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py
new file mode 100644
index 0000000..4443264
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from resource_management import Script
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+import master_helper
+import common
+import constants
+
+class HawqMaster(Script):
+  """
+  Contains the interface definitions for methods like install, 
+  start, stop, status, etc. for the HAWQ Master
+  """
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    env.set_params(constants)
+    master_helper.configure_master()
+
+  def start(self, env):
+    self.configure(env)
+    common.validate_configuration()
+    master_helper.start_master()
+
+  def stop(self, env):
+    master_helper.stop_master()
+
+  def status(self, env):
+    from hawqstatus import get_pid_file
+    check_process_status(get_pid_file())
+
+if __name__ == "__main__":
+  HawqMaster().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py
new file mode 100644
index 0000000..b4be502
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os
+from resource_management import Script
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+import utils
+import common
+import constants
+
+class HawqSegment(Script):
+  """
+  Contains the interface definitions for methods like install, 
+  start, stop, status, etc. for the HAWQ Segment
+  """
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+    env.set_params(constants)
+    common.setup_user()
+    common.setup_common_configurations()
+    common.update_bashrc(constants.hawq_greenplum_path_file, constants.hawq_user_bashrc_file)
+
+
+  def __start_segment(self):
+    import params
+    return utils.exec_hawq_operation(
+          constants.START, 
+          "{0} -a".format(constants.SEGMENT), 
+          not_if=utils.chk_postgres_status_cmd(params.hawq_segment_address_port))
+
+  def start(self, env):
+    self.configure(env)
+    common.validate_configuration()
+
+    if self.__is_segment_initialized():
+      self.__start_segment()
+      return
+
+    # Initialization also starts process.
+    self.__init_segment()
+
+
+  def stop(self, env):
+    import params
+
+    utils.exec_hawq_operation(constants.STOP, "{0} -a".format(constants.SEGMENT), only_if=utils.chk_postgres_status_cmd(
+                                params.hawq_segment_address_port))
+
+
+  def status(self, env):
+    from hawqstatus import get_pid_file
+    check_process_status(get_pid_file())
+
+
+  @staticmethod
+  def __init_segment():
+    import params
+
+    # Create segment directories
+    utils.create_dir_as_hawq_user(params.hawq_segment_dir)
+    utils.create_dir_as_hawq_user(params.hawq_segment_temp_dir.split(','))
+
+    Execute("chown {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, os.path.dirname(params.hawq_segment_dir)),
+            user=constants.root_user, timeout=constants.default_exec_timeout)
+
+    # Initialize hawq segment
+    utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.SEGMENT))
+
+  def __is_segment_initialized(self):
+    """
+    Check whether the HAWQ Segment is initialized
+    """
+    import params
+    return os.path.exists(os.path.join(params.hawq_segment_dir, constants.postmaster_opts_filename))
+
+
+if __name__ == "__main__":
+  HawqSegment().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py
new file mode 100644
index 0000000..d8254f9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from resource_management import Script
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+import master_helper
+import common
+import constants
+
+class HawqStandby(Script):
+  """
+  Contains the interface definitions for methods like install, 
+  start, stop, status, etc. for the HAWQ Standby Master
+  """
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    env.set_params(constants)
+    master_helper.configure_master()
+
+  def start(self, env):
+    self.configure(env)
+    common.validate_configuration()
+    master_helper.start_master()
+
+  def stop(self, env):
+    master_helper.stop_master()
+
+  def status(self, env):
+    from hawqstatus import get_pid_file
+    check_process_status(get_pid_file())
+
+  def activatestandby(self, env):
+    pass
+
+if __name__ == "__main__":
+    HawqStandby().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py
new file mode 100644
index 0000000..59742bd
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py
@@ -0,0 +1,65 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+
+from resource_management import Script
+from resource_management.core.resources.system import File
+from resource_management.core.exceptions import Fail
+
+import utils
+import common
+import constants
+
+
+def get_pid_file():
+  """
+  Fetches the pid file, which will be used to get the status of the HAWQ Master, Standby
+  or Segments
+  """
+
+  config = Script.get_config()
+  
+  component_name = config['componentName']
+  component = "master" if component_name in ["HAWQMASTER", "HAWQSTANDBY"] else "segment"
+  hawq_pid_file = os.path.join(constants.hawq_pid_dir, "hawq-{0}.pid".format(component))
+
+  File(hawq_pid_file, action='delete')
+  utils.create_dir_as_hawq_user(constants.hawq_pid_dir)
+
+  #Get hawq_master_directory or hawq_segment_directory value from hawq-site.xml depending 
+  #on the component
+  hawq_site_directory_property = "hawq_{0}_directory".format(component)
+  
+  #hawq-site content from Ambari server will not be available when the 
+  #command type is STATUS_COMMAND. Hence, reading it directly from the local file
+  postmaster_pid_file = os.path.join(common.get_local_hawq_site_property(
+      hawq_site_directory_property), constants.postmaster_pid_filename)
+
+  pid = ""
+  if os.path.exists(postmaster_pid_file):
+    with open(postmaster_pid_file, 'r') as fh:
+      pid = fh.readline().strip()
+
+  if not pid:
+    raise Fail("Failed to fetch pid from {0}".format(postmaster_pid_file))
+
+  File(hawq_pid_file, content=pid, owner=constants.gpadmin_user, group=constants.gpadmin_user)
+
+  return hawq_pid_file
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py
new file mode 100644
index 0000000..82a5168
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py
@@ -0,0 +1,194 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os
+from resource_management.core.resources.system import File, Execute
+from resource_management.core.source import Template
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+
+import utils
+import common
+import constants
+
+def __setup_master_specific_conf_files():
+  """
+  Sets up config files only applicable for HAWQ Master and Standby nodes
+  """
+  import params
+
+  File(constants.hawq_check_file, content=params.gpcheck_content, owner=constants.hawq_user, group=constants.hawq_group,
+      mode=0644)
+
+  File(constants.hawq_slaves_file, content=Template("slaves.j2"), owner=constants.hawq_user, group=constants.hawq_group,
+       mode=0644)
+
+  File(constants.hawq_hosts_file, content=Template("hawq-hosts.j2"), owner=constants.hawq_user, group=constants.hawq_group,
+       mode=0644)
+
+
+def __setup_passwordless_ssh():
+  """
+  Exchanges ssh keys to setup passwordless ssh for the hawq_user between the HAWQ Master and the HAWQ Segment nodes
+  """
+  utils.exec_hawq_operation("ssh-exkeys", "-f {0} -p {1}".format(constants.hawq_hosts_file, constants.hawq_password))
+
+  File(constants.hawq_hosts_file, action='delete')
+
+
+def __setup_hawq_user_profile():
+  """
+  Sets up the ENV variables for hawq_user as a convenience for the command line users
+  """
+  hawq_profile_file = os.path.join(os.path.expanduser("~{0}".format(constants.hawq_user)), ".hawq-profile.sh")
+  File(hawq_profile_file, content=Template("hawq-profile.sh.j2"), owner=constants.hawq_user, group=constants.hawq_group)
+  common.update_bashrc(hawq_profile_file, constants.hawq_user_bashrc_file)
+
+
+def configure_master():
+  """
+  Configures the master node after rpm install
+  """
+  common.setup_user()
+  common.setup_common_configurations()
+  __setup_master_specific_conf_files()
+  __setup_passwordless_ssh()
+  __setup_hawq_user_profile()
+  __create_local_dirs()
+
+
+def __create_local_dirs():
+  """
+  Creates the required local directories for HAWQ 
+  """
+  import params
+  # Create Master directories
+  utils.create_dir_as_hawq_user(params.hawq_master_dir)
+  utils.create_dir_as_hawq_user(params.hawq_master_temp_dir.split(','))
+
+  Execute("chown {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, os.path.dirname(params.hawq_master_dir)),
+          user=constants.root_user, timeout=constants.default_exec_timeout)
+
+  Execute("chmod 700 {0}".format(params.hawq_master_dir), user=constants.root_user, timeout=constants.default_exec_timeout)
+
+
+def __create_hdfs_dirs():
+  """
+  Creates the required HDFS directories for HAWQ
+  """
+  import params
+  params.HdfsResource(params.hawq_hdfs_data_dir, type="directory", action="create_on_execute", owner=constants.hawq_user, group=constants.hawq_group, mode=0755)
+  params.HdfsResource(None, action="execute")
+
+
+def __init_active():
+  """
+  Initializes the active master
+  """
+  __create_hdfs_dirs()
+  utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.MASTER))
+
+
+def __init_standby():
+  """
+  Initializes the HAWQ Standby Master
+  """
+    utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.STANDBY))
+
+
+def __get_component_name():
+  """
+  Identifies current node as either HAWQ Master or HAWQ Standby Master
+  """
+  return constants.MASTER if __is_active_master() else constants.STANDBY
+
+
+def __start_local_master():
+  """
+  Starts HAWQ Master or HAWQ Standby Master component on the host
+  """
+  import params
+  utils.exec_hawq_operation(
+        constants.START, 
+        "{0} -a".format(__get_component_name()), 
+        not_if=utils.chk_postgres_status_cmd(params.hawq_master_address_port))
+
+  
+def __is_local_initialized():
+  """
+  Checks if the local node has been initialized
+  """
+  import params
+  return os.path.exists(os.path.join(params.hawq_master_dir, constants.postmaster_opts_filename))
+
+
+def __get_standby_host():
+  """
+  Returns the name of the HAWQ Standby Master host from hawq-site.xml, or None if no standby is configured
+  """
+  standby_host = common.get_local_hawq_site_property("hawq_standby_address_host")
+  return None if standby_host is None or standby_host.lower() == 'none' else standby_host
+
+
+def __is_standby_initialized():
+  """
+  Returns True if HAWQ Standby Master is initialized, False otherwise
+  """
+  import params
+  
+  file_path = os.path.join(params.hawq_master_dir, constants.postmaster_opts_filename)
+  (retcode, _, _) = utils.exec_ssh_cmd(__get_standby_host(), "[ -f {0} ]".format(file_path))
+  return retcode == 0
+
+
+def start_master():
+  """
+  Initializes HAWQ Master/Standby if not already done and starts them
+  """
+  import params
+
+  if not params.hostname in [params.hawqmaster_host, params.hawqstandby_host]:
+    Fail("Host should be either active Hawq master or Hawq standby.")
+
+  is_active_master = __is_active_master()
+  if __is_local_initialized():
+    __start_local_master()
+
+  elif is_active_master:
+    __init_active()
+
+  if is_active_master and __get_standby_host() is not None and not __is_standby_initialized():
+    __init_standby()
+
+
+def stop_master():
+  """
+  Stops the HAWQ Master/Standby
+  """
+  import params
+  utils.exec_hawq_operation(
+                constants.STOP,
+                "{0} -a".format(__get_component_name()),
+                only_if=utils.chk_postgres_status_cmd(params.hawq_master_address_port))
+
+
+def __is_active_master():
+  """
+  Finds if this node is the active master
+  """
+  import params
+  return params.hostname == common.get_local_hawq_site_property("hawq_master_address_host")

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py
new file mode 100644
index 0000000..df33d0b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py
@@ -0,0 +1,88 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import functools
+from resource_management import Script
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+
+config = Script.get_config()
+
+
+def __get_component_host(component):
+  """
+  Returns the first host where the given component is deployed, None if the component is not deployed
+  """
+  component_host = None
+  if component in config['clusterHostInfo'] and len(config['clusterHostInfo'][component]) > 0:
+    component_host = config['clusterHostInfo'][component][0]
+  return component_host
+
+
+def __get_namenode_host():
+  """
+  Gets the namenode host; active namenode in case of HA
+  """
+  namenode_host = __get_component_host('namenode_host')
+  
+  # hostname of the active HDFS HA Namenode (only used when HA is enabled)
+  dfs_ha_namenode_active = default('/configurations/hadoop-env/dfs_ha_initial_namenode_active', None)
+  if dfs_ha_namenode_active is not None:
+    namenode_host = dfs_ha_namenode_active
+  return namenode_host
+
+
+hostname = config['hostname']
+
+# Users and Groups
+hdfs_superuser = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+
+# HAWQ Hostnames
+hawqmaster_host = __get_component_host('hawqmaster_hosts')
+hawqstandby_host = __get_component_host('hawqstandby_hosts')
+hawqsegment_hosts = default('/clusterHostInfo/hawqsegment_hosts', [])
+
+# HDFS
+hdfs_site = config['configurations']['hdfs-site']
+default_fs = config['configurations']['core-site']['fs.defaultFS']
+
+# HDFSResource partial function
+HdfsResource = functools.partial(HdfsResource, user=hdfs_superuser, hdfs_site=hdfs_site, default_fs=default_fs)
+
+namenode_host= __get_namenode_host()
+
+# YARN
+# Note: YARN is not mandatory for HAWQ. It is required only when the users set HAWQ to use YARN as resource manager
+rm_host = __get_component_host('rm_host')
+
+# Config files
+gpcheck_content = config['configurations']['gpcheck-env']['content']
+
+hawq_site = config['configurations']['hawq-site']
+hawq_master_dir = hawq_site.get('hawq_master_directory')
+hawq_segment_dir = hawq_site.get('hawq_segment_directory')
+hawq_master_temp_dir = hawq_site.get('hawq_master_temp_directory')
+hawq_segment_temp_dir = hawq_site.get('hawq_segment_temp_directory')
+# Extract hawq hdfs directory from hdfs url. Ex: /hawq/hawq_default from
+# host:8080/hawq/hawq_default
+hawq_hdfs_data_dir = "/{0}".format(hawq_site.get('hawq_dfs_url').split('/', 1)[1])
+hawq_master_address_port = hawq_site.get('hawq_master_address_port')
+hawq_segment_address_port = hawq_site.get('hawq_segment_address_port')
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py
new file mode 100644
index 0000000..7a99f5e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py
@@ -0,0 +1,102 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import common
+import constants
+from utils import exec_psql_cmd, exec_ssh_cmd
+from resource_management.libraries.script import Script
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+
+import sys
+
+class HAWQServiceCheck(Script):
+  """
+  Runs a set of simple HAWQ tests to verify if the service has been setup correctly
+  """
+
+  def __init__(self):
+    self.active_master_host = common.get_local_hawq_site_property("hawq_master_address_host")
+
+
+  def service_check(self, env):
+    Logger.info("Starting HAWQ service checks..")
+    # All the tests are run on the active_master_host using ssh irrespective of the node on which service check
+    # is executed by Ambari
+    try:
+      self.check_state()
+      self.drop_table()
+      self.create_table()
+      self.insert_data()
+      self.query_data()
+      self.check_data_correctness()
+    except:
+      Logger.error("Service check failed")
+      sys.exit(1)
+    finally:
+      self.drop_table()
+
+    Logger.info("Service check completed successfully")
+
+
+  def drop_table(self):
+    Logger.info("Dropping {0} table if exists".format(constants.smoke_check_table_name))
+    sql_cmd = "drop table if exists {0}".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def create_table(self):
+    Logger.info("Creating table {0}".format(constants.smoke_check_table_name))
+    sql_cmd = "create table {0} (col1 int) distributed randomly".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def insert_data(self):
+    Logger.info("Inserting data to table {0}".format(constants.smoke_check_table_name))
+    sql_cmd = "insert into {0} select * from generate_series(1,10)".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def query_data(self):
+    Logger.info("Querying data from table {0}".format(constants.smoke_check_table_name))
+    sql_cmd = "select * from {0}".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def check_data_correctness(self):
+    expected_data = "55"
+    Logger.info("Validating data inserted, finding sum of all the inserted entries. Expected output: {0}".format(expected_data))
+    sql_cmd = "select sum(col1) from {0}".format(constants.smoke_check_table_name)
+    _, stdout, _ = exec_psql_cmd(sql_cmd, self.active_master_host, tuples_only=False)
+    if expected_data != stdout.strip():
+      Logger.error("Incorrect data returned. Expected Data: {0} Actual Data: {1}".format(expected_data, stdout))
+      raise Fail("Incorrect data returned.")
+
+
+  def check_state(self):
+    import params
+    command = "source {0} && hawq state -d {1}".format(constants.hawq_greenplum_path_file, params.hawq_master_dir)
+    Logger.info("Executing hawq status check..")
+    (retcode, out, err) = exec_ssh_cmd(self.active_master_host, command)
+    if retcode:
+      Logger.error("hawq state command returned non-zero result: {0}. Out: {1} Error: {2}".format(retcode, out, err))
+      raise Fail("Unexpected result of hawq state command.")
+    Logger.info("Output of command:\n{0}".format(str(out) + "\n"))
+
+
+if __name__ == "__main__":
+  HAWQServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py
new file mode 100644
index 0000000..b42b3a6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py
@@ -0,0 +1,108 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import subprocess
+
+from resource_management.core.resources.system import Execute, Directory
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+
+import constants
+
+def chk_postgres_status_cmd(port):
+  """
+  Checks if the postgres port is occupied
+  """
+  return "netstat -tupln | egrep ':{0}\s' | egrep postgres".format(port)
+
+
+def create_dir_as_hawq_user(directory):
+  """
+  Creates directories with hawq_user and hawq_group (defaults to gpadmin:gpadmin)
+  """
+  Directory(directory, recursive=True, owner=constants.hawq_user, group=constants.hawq_group)
+
+
+def exec_hawq_operation(operation, option, not_if=None, only_if=None, logoutput=True):
+  """
+  Sets up execution environment and runs a given command as HAWQ user
+  """
+  hawq_cmd = "source {0} && hawq {1} {2}".format(constants.hawq_greenplum_path_file, operation, option)
+  Execute(
+        hawq_cmd,
+        user=constants.hawq_user,
+        timeout=constants.hawq_operation_exec_timeout,
+        not_if=not_if,
+        only_if=only_if,
+        logoutput=logoutput)
+
+
+def read_file_to_dict(file_name):
+  """ 
+  Converts a file with key=value format to dictionary
+  """
+  with open(file_name, "r") as fh:
+    lines = fh.readlines()
+    lines = [item for item in lines if '=' in item]
+    result_dict = dict(item.split("=") for item in lines)
+  return result_dict
+
+
+def write_dict_to_file(source_dict, dest_file):
+  """
+  Writes a dictionary into a file with key=value format
+  """
+  with open(dest_file, "w") as fh:
+    for property_key, property_value in source_dict.items():
+      if property_value is None:
+        fh.write(property_key + "\n")
+      else:
+        fh.write("{0}={1}\n".format(property_key, property_value))
+
+
+def exec_ssh_cmd(hostname, cmd):
+  """
+  Runs the command on the remote host as gpadmin user
+  """
+  import params
+  # Only gpadmin should be allowed to run command via ssh, thus not exposing user as a parameter
+  if params.hostname != hostname:
+    cmd = "su - {0} -c 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null {1} \"{2} \" '".format(constants.gpadmin_user, hostname, cmd)
+  else:
+    cmd = "su - {0} -c \"{1}\"".format(constants.gpadmin_user, cmd)
+  Logger.info("Command executed: {0}".format(cmd))
+  process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+  (stdout, stderr) = process.communicate()
+  return process.returncode, stdout, stderr
+
+
+def exec_psql_cmd(command, host, db="template1", tuples_only=True):
+  """
+  Sets up execution environment and runs the HAWQ queries
+  """
+  src_cmd = "source {0}".format(constants.hawq_greenplum_path_file)
+  if tuples_only:
+    cmd = src_cmd + " && psql -d {0} -c \\\"{1};\\\"".format(db, command)
+  else:
+    cmd = src_cmd + " && psql -t -d {0} -c \\\"{1};\\\"".format(db, command)
+  retcode, out, err = exec_ssh_cmd(host, cmd)
+  if retcode:
+    Logger.error("SQL command executed failed: {0}\nReturncode: {1}\nStdout: {2}\nStderr: {3}".format(cmd, retcode, out, err))
+    raise Fail("SQL command executed failed.")
+
+  Logger.info("Output:\n{0}".format(out))
+  return retcode, out, err

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2
new file mode 100644
index 0000000..9bdaa75
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2
@@ -0,0 +1,5 @@
+{{hawqmaster_host}}
+{{hawqstandby_host}}
+{% for host in hawqsegment_hosts %}
+{{host}}
+{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2
new file mode 100644
index 0000000..e28e416
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2
@@ -0,0 +1,8 @@
+######################################
+##### HAWQ env #######################
+######################################
+source {{hawq_greenplum_path_file}}
+export MASTER_DATA_DIRECTORY={{hawq_master_dir}}
+export PGPORT={{hawq_master_address_port}}
+export PGHOST={{hawqmaster_host}}
+export PGUSER={{hawq_user}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.limits.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.limits.conf.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.limits.conf.j2
new file mode 100644
index 0000000..93e027c
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.limits.conf.j2
@@ -0,0 +1,7 @@
+#######################################
+#### HAWQ Limits Parameters ###########
+#######################################
+{{hawq_user}} soft nofile 2900000
+{{hawq_user}} hard nofile 2900000
+{{hawq_user}} soft nproc 131072
+{{hawq_user}} hard nproc 131072
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.sysctl.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.sysctl.conf.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.sysctl.conf.j2
new file mode 100644
index 0000000..480228e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq.sysctl.conf.j2
@@ -0,0 +1,27 @@
+#######################################
+#### HAWQ System Parameters ###########
+#######################################
+kernel.shmmax = 500000000
+kernel.shmmni = 4096
+kernel.shmall = 4000000000
+kernel.sem = 250 512000 100 2048
+kernel.sysrq = 1
+kernel.core_uses_pid = 1
+kernel.msgmnb = 65536
+kernel.msgmax = 65536
+kernel.msgmni = 2048
+net.ipv4.tcp_syncookies = 0
+net.ipv4.ip_forward = 0
+net.ipv4.conf.default.accept_source_route = 0
+net.ipv4.tcp_tw_recycle = 1
+net.ipv4.tcp_max_syn_backlog = 200000
+net.ipv4.conf.all.arp_filter = 1
+net.ipv4.ip_local_port_range = 1281 65535
+net.core.netdev_max_backlog = 200000
+fs.nr_open = 3000000
+kernel.threads-max = 798720
+kernel.pid_max = 798720
+# increase network
+net.core.rmem_max=2097152
+net.core.wmem_max=2097152
+vm.overcommit_memory = 1

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2
new file mode 100644
index 0000000..591d92b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2
@@ -0,0 +1,3 @@
+{% for host in hawqsegment_hosts %}
+{{host}}
+{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml
new file mode 100644
index 0000000..4df1bd6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>PXF</name>
+      <displayName>PXF</displayName>
+      <comment>HAWQ Extension Framework</comment>
+      <version>3.0</version>
+      <components>
+
+        <component>
+          <name>PXF</name>
+          <displayName>PXF</displayName>
+          <category>SLAVE</category>
+          <cardinality>1+</cardinality>
+          <commandScript>
+            <script>scripts/pxfservice.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+      </components>
+
+      <requiredServices>
+         <service>HAWQ</service>
+      </requiredServices>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+           <packages>
+             <package>
+               <name>pxf-service</name>
+             </package>
+             <package>
+               <name>apache-tomcat</name>
+             </package>
+             <package>
+               <name>pxf-hive</name>
+             </package>
+             <package>
+               <name>pxf-hdfs</name>
+             </package>
+             <package>
+               <name>pxf-hbase</name>
+             </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f19a8da0/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxfservice.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxfservice.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxfservice.py
new file mode 100644
index 0000000..c267794
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxfservice.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from resource_management import Script
+
+class PxfService(Script):
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    pass
+
+  def start(self, env):
+    pass
+
+  def stop(self, env):
+    pass
+
+  def status(self, env):
+    pass
+
+if __name__ == "__main__":
+  PxfService().execute()
\ No newline at end of file


Mime
View raw message