ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [12/21] ambari git commit: AMBARI-9068. Remove HDP 1.3 stack defn from Ambari. (dlysnichenko)
Date Mon, 12 Jan 2015 11:53:07 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-site.xml
deleted file mode 100644
index cb9cb0e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/configuration/oozie-site.xml
+++ /dev/null
@@ -1,236 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-        
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-
-<configuration supports_final="true">
-
-  <!--
-      Refer to the oozie-default.xml file for the complete list of
-      Oozie configuration properties and their default values.
-  -->
-  <property>
-    <name>oozie.base.url</name>
-    <value>http://localhost:11000/oozie</value>
-    <description>Base Oozie URL.</description>
-  </property>
-
-  <property>
-    <name>oozie.system.id</name>
-    <value>oozie-${user.name}</value>
-    <description>
-      The Oozie system ID.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.systemmode</name>
-    <value>NORMAL</value>
-    <description>
-      System mode for Oozie at startup.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.AuthorizationService.authorization.enabled</name>
-    <value>true</value>
-    <description>
-      Specifies whether security (user name/admin role) is enabled or not.
-      If disabled any user can manage Oozie system and manage any job.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.PurgeService.older.than</name>
-    <value>30</value>
-    <description>
-      Jobs older than this value, in days, will be purged by the PurgeService.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.PurgeService.purge.interval</name>
-    <value>3600</value>
-    <description>
-      Interval at which the purge service will run, in seconds.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.CallableQueueService.queue.size</name>
-    <value>1000</value>
-    <description>Max callable queue size</description>
-  </property>
-
-  <property>
-    <name>oozie.service.CallableQueueService.threads</name>
-    <value>10</value>
-    <description>Number of threads used for executing callables</description>
-  </property>
-
-  <property>
-    <name>oozie.service.CallableQueueService.callable.concurrency</name>
-    <value>3</value>
-    <description>
-      Maximum concurrency for a given callable type.
-      Each command is a callable type (submit, start, run, signal, job, jobs, suspend,resume, etc).
-      Each action type is a callable type (Map-Reduce, Pig, SSH, FS, sub-workflow, etc).
-      All commands that use action executors (action-start, action-end, action-kill and action-check) use
-      the action type as the callable type.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.coord.normal.default.timeout</name>
-    <value>120</value>
-    <description>Default timeout for a coordinator action input check (in minutes) for normal job.
-      -1 means infinite timeout
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.db.schema.name</name>
-    <value>oozie</value>
-    <description>
-      Oozie DataBase Name
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.authentication.type</name>
-    <value>simple</value>
-    <description>
-      Authentication used for Oozie HTTP endpoint, the supported values are: simple | kerberos |
-      #AUTHENTICATION_HANDLER_CLASSNAME#.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.WorkflowAppService.system.libpath</name>
-    <value>/user/${user.name}/share/lib</value>
-    <description>
-      System library path to use for workflow applications.
-      This path is added to workflow application if their job properties sets
-      the property 'oozie.use.system.libpath' to true.
-    </description>
-  </property>
-
-  <property>
-    <name>use.system.libpath.for.mapreduce.and.pig.jobs</name>
-    <value>false</value>
-    <description>
-      If set to true, submissions of MapReduce and Pig jobs will include
-      automatically the system library path, thus not requiring users to
-      specify where the Pig JAR files are. Instead, the ones from the system
-      library path are used.
-    </description>
-  </property>
-  <property>
-    <name>oozie.authentication.kerberos.name.rules</name>
-    <value>DEFAULT</value>
-    <description>The mapping from kerberos principal names to local OS user names.</description>
-  </property>
-  <property>
-    <name>oozie.service.HadoopAccessorService.hadoop.configurations</name>
-    <value>*=/etc/hadoop/conf</value>
-    <description>
-      Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of
-      the Hadoop service (JobTracker, HDFS). The wildcard '*' configuration is
-      used when there is no exact match for an authority. The HADOOP_CONF_DIR contains
-      the relevant Hadoop *-site.xml files. If the path is relative is looked within
-      the Oozie configuration directory; though the path can be absolute (i.e. to point
-      to Hadoop client conf/ directories in the local filesystem.
-    </description>
-  </property>
-  <property>
-    <name>oozie.service.ActionService.executor.ext.classes</name>
-    <value>org.apache.oozie.action.email.EmailActionExecutor,
-      org.apache.oozie.action.hadoop.HiveActionExecutor,
-      org.apache.oozie.action.hadoop.ShellActionExecutor,
-      org.apache.oozie.action.hadoop.SqoopActionExecutor,
-      org.apache.oozie.action.hadoop.DistcpActionExecutor
-    </value>
-    <description>
-      List of ActionExecutors extension classes (separated by commas). Only action types with associated executors can
-      be used in workflows. This property is a convenience property to add extensions to the built in executors without
-      having to include all the built in ones.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.SchemaService.wf.ext.schemas</name>
-    <value>shell-action-0.1.xsd,email-action-0.1.xsd,hive-action-0.2.xsd,sqoop-action-0.2.xsd,ssh-action-0.1.xsd,distcp-action-0.1.xsd,hive-action-0.3.xsd</value>
-    <description>
-      Schemas for additional actions types. IMPORTANT: if there are no schemas leave a 1 space string, the service
-      trims the value, if empty Configuration assumes it is NULL.
-    </description>
-  </property>
-  <property>
-    <name>oozie.service.JPAService.create.db.schema</name>
-    <value>false</value>
-    <description>
-      Creates Oozie DB.
-
-      If set to true, it creates the DB schema if it does not exist. If the DB schema exists is a NOP.
-      If set to false, it does not create the DB schema. If the DB schema does not exist it fails start up.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.JPAService.jdbc.driver</name>
-    <value>org.apache.derby.jdbc.EmbeddedDriver</value>
-    <description>
-      JDBC driver class.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.JPAService.jdbc.url</name>
-    <value>jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;create=true</value>
-    <description>
-      JDBC URL.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.JPAService.jdbc.username</name>
-    <value>oozie</value>
-    <description>
-      DB user name.
-    </description>
-  </property>
-
-  <property require-input = "true">
-    <name>oozie.service.JPAService.jdbc.password</name>
-    <value> </value>
-    <property-type>PASSWORD</property-type>
-    <description>
-      DB user password.
-
-      IMPORTANT: if password is emtpy leave a 1 space string, the service trims the value,
-      if empty Configuration assumes it is NULL.
-    </description>
-  </property>
-
-  <property>
-    <name>oozie.service.JPAService.pool.max.active.conn</name>
-    <value>10</value>
-    <description>
-      Max number of connections.
-    </description>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
deleted file mode 100644
index cc86c8e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
+++ /dev/null
@@ -1,135 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>OOZIE</name>
-      <displayName>Oozie</displayName>
-      <comment>System for workflow coordination and execution of Apache Hadoop jobs.  This also includes the installation of the optional Oozie Web Console which relies on and will install the &lt;a target="_blank" href="http://www.sencha.com/legal/open-source-faq/"&gt;ExtJS&lt;/a&gt; Library.
-      </comment>
-      <version>3.3.2.1.3.3.0</version>
-      <components>
-        <component>
-          <name>OOZIE_SERVER</name>
-          <displayName>Oozie Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/oozie_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-
-        <component>
-          <name>OOZIE_CLIENT</name>
-          <displayName>Oozie Client</displayName>
-          <category>CLIENT</category>
-          <cardinality>1+</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/oozie_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>oozie-site.xml</fileName>
-              <dictionaryName>oozie-site</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>oozie-env.sh</fileName>
-              <dictionaryName>oozie-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>oozie-log4j.properties</fileName>
-              <dictionaryName>oozie-log4j</dictionaryName>
-            </configFile>            
-          </configFiles>
-        </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>oozie.noarch</name>
-            </package>
-            <package>
-              <name>oozie-client.noarch</name>
-            </package>
-            <package>
-              <name>extjs-2.2-1</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>MAPREDUCE</service>
-      </requiredServices>
-
-      <configuration-dependencies>
-        <config-type>oozie-site</config-type>
-        <config-type>oozie-env</config-type>
-        <config-type>oozie-log4j</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/alerts/alert_check_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/alerts/alert_check_oozie_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/alerts/alert_check_oozie_server.py
deleted file mode 100644
index 4e3e6ae..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/alerts/alert_check_oozie_server.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from resource_management import *
-from resource_management.libraries.functions import format
-from resource_management.libraries.functions import get_kinit_path
-from resource_management.core.environment import Environment
-
-RESULT_CODE_OK = 'OK'
-RESULT_CODE_CRITICAL = 'CRITICAL'
-RESULT_CODE_UNKNOWN = 'UNKNOWN'
-
-OOZIE_URL_KEY = '{{oozie-site/oozie.base.url}}'
-SECURITY_ENABLED = '{{cluster-env/security_enabled}}'
-SMOKEUSER_KEY = '{{cluster-env/smokeuser}}'
-SMOKEUSER_KEYTAB_KEY = '{{cluster-env/smokeuser_keytab}}'
-
-def get_tokens():
-  """
-  Returns a tuple of tokens in the format {{site/property}} that will be used
-  to build the dictionary passed into execute
-  """
-  return (OOZIE_URL_KEY, SMOKEUSER_KEY, SECURITY_ENABLED,SMOKEUSER_KEYTAB_KEY)
-
-def execute(parameters=None, host_name=None):
-  """
-  Returns a tuple containing the result code and a pre-formatted result label
-
-  Keyword arguments:
-  parameters (dictionary): a mapping of parameter key to value
-  host_name (string): the name of this host where the alert is running
-  """
-
-  if parameters is None:
-    return (RESULT_CODE_UNKNOWN, ['There were no parameters supplied to the script.'])
-
-  security_enabled = False
-  if set([OOZIE_URL_KEY, SMOKEUSER_KEY, SECURITY_ENABLED]).issubset(parameters):
-    oozie_url = parameters[OOZIE_URL_KEY]
-    smokeuser = parameters[SMOKEUSER_KEY]
-    security_enabled = str(parameters[SECURITY_ENABLED]).upper() == 'TRUE'
-  else:
-    return (RESULT_CODE_UNKNOWN, ['The Oozie URL and Smokeuser are a required parameters.'])
-
-  try:
-    if security_enabled:
-      if set([SMOKEUSER_KEYTAB_KEY]).issubset(parameters):
-        smokeuser_keytab = parameters[SMOKEUSER_KEYTAB_KEY]
-      else:
-        return (RESULT_CODE_UNKNOWN, ['The Smokeuser keytab is required when security is enabled.'])
-      kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-      kinitcmd = format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser}; ")
-
-      Execute(kinitcmd,
-              user=smokeuser,
-              )
-
-    Execute(format("source /etc/oozie/conf/oozie-env.sh ; oozie admin -oozie {oozie_url} -status"),
-            user=smokeuser,
-            )
-    return (RESULT_CODE_OK, ["Oozie check success"])
-
-  except Exception, ex:
-    return (RESULT_CODE_CRITICAL, [str(ex)])

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh
deleted file mode 100644
index 68bc22c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-function getValueFromField {
-  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
-  return $?
-}
-
-function checkOozieJobStatus {
-  local job_id=$1
-  local num_of_tries=$2
-  #default num_of_tries to 10 if not present
-  num_of_tries=${num_of_tries:-10}
-  local i=0
-  local rc=1
-  local cmd="source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie job -oozie ${OOZIE_SERVER} -info $job_id"
-  sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"
-  while [ $i -lt $num_of_tries ] ; do
-    cmd_output=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
-    (IFS='';echo $cmd_output)
-    act_status=$(IFS='';echo $cmd_output | grep ^Status | cut -d':' -f2 | sed 's| ||g')
-    echo "workflow_status=$act_status"
-    if [ "RUNNING" == "$act_status" ]; then
-      #increment the couner and get the status again after waiting for 15 secs
-      sleep 15
-      (( i++ ))
-      elif [ "SUCCEEDED" == "$act_status" ]; then
-        rc=0;
-        break;
-      else
-        rc=1
-        break;
-      fi
-    done
-    return $rc
-}
-
-export oozie_conf_dir=$1
-export hadoop_conf_dir=$2
-export smoke_test_user=$3
-export security_enabled=$4
-export smoke_user_keytab=$5
-export kinit_path_local=$6
-
-export OOZIE_EXIT_CODE=0
-export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/mapred-site.xml mapred.job.tracker`
-export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.default.name`
-export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url | tr '[:upper:]' '[:lower:]'`
-export OOZIE_EXAMPLES_DIR=`rpm -ql oozie-client | grep 'oozie-examples.tar.gz$' | xargs dirname`
-cd $OOZIE_EXAMPLES_DIR
-
-sudo tar -zxf oozie-examples.tar.gz
-sudo sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
-sudo sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
-sudo sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-sudo sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-sudo sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
-
-if [[ $security_enabled == "True" ]]; then
-  kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smoke_test_user}; "
-else 
-  kinitcmd=""
-fi
-
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -rmr examples"
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -rmr input-data"
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
-
-cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
-job_info=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd" | grep "job:"`
-job_id="`echo $job_info | cut -d':' -f2`"
-checkOozieJobStatus "$job_id"
-OOZIE_EXIT_CODE="$?"
-exit $OOZIE_EXIT_CODE

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/wrap_ooziedb.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/wrap_ooziedb.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/wrap_ooziedb.sh
deleted file mode 100644
index 97a513c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/wrap_ooziedb.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-OUT=`cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh "$@" 2>&1`
-EC=$?
-echo $OUT
-GRVAR=`echo ${OUT} | grep -o "java.lang.Exception: DB schema exists"`
-if [ ${EC} -ne 0 ] && [ -n "$GRVAR" ]
-then
-  exit 0
-else
-  exit $EC
-fi  

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
deleted file mode 100644
index 4975c42..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-from resource_management import *
-
-def oozie(is_server=False
-              ):
-  import params
-
-  if is_server:
-    params.HdfsDirectory(params.oozie_hdfs_user_dir,
-                         action="create",
-                         owner=params.oozie_user,
-                         mode=params.oozie_hdfs_user_mode
-    )
-  XmlConfig( "oozie-site.xml",
-    conf_dir = params.conf_dir, 
-    configurations = params.config['configurations']['oozie-site'],
-    configuration_attributes=params.config['configuration_attributes']['oozie-site'],
-    owner = params.oozie_user,
-    group = params.user_group,
-    mode = 0664
-  )
-  
-  Directory( params.conf_dir,
-    owner = params.oozie_user,
-    group = params.user_group
-  )
-  
-  File(format("{conf_dir}/oozie-env.sh"),
-       owner=params.oozie_user,
-       content=InlineTemplate(params.oozie_env_sh_template)
-  )  
-
-  if (params.log4j_props != None):
-    File(format("{params.conf_dir}/oozie-log4j.properties"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.oozie_user,
-         content=params.log4j_props
-    )
-  elif (os.path.exists(format("{params.conf_dir}/oozie-log4j.properties"))):
-    File(format("{params.conf_dir}/oozie-log4j.properties"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.oozie_user
-    )
-
-  environment = {
-    "no_proxy": format("{ambari_server_hostname}")
-  }
-
-  if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
-     params.jdbc_driver_name == "org.postgresql.Driver" or \
-     params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
-    Execute(format("/bin/sh -c 'cd /usr/lib/ambari-agent/ &&\
-    curl -kf -x \"\" \
-    --retry 5 {jdk_location}{check_db_connection_jar_name}\
-     -o {check_db_connection_jar_name}'"),
-      not_if  = format("[ -f {check_db_connection_jar} ]"),
-      environment=environment
-    )
-    
-  oozie_ownership( )
-  
-  if is_server:      
-    oozie_server_specific( )
-  
-def oozie_ownership(
-):
-  import params
-  
-  File ( format("{conf_dir}/adminusers.txt"),
-    owner = params.oozie_user,
-    group = params.user_group
-  )
-
-  File ( format("{conf_dir}/hadoop-config.xml"),
-    owner = params.oozie_user,
-    group = params.user_group
-  )
-
-  File ( format("{conf_dir}/oozie-default.xml"),
-    owner = params.oozie_user,
-    group = params.user_group
-  )
-
-  Directory ( format("{conf_dir}/action-conf"),
-    owner = params.oozie_user,
-    group = params.user_group
-  )
-
-  File ( format("{conf_dir}/action-conf/hive.xml"),
-    owner = params.oozie_user,
-    group = params.user_group
-  )
-  
-def oozie_server_specific(
-):
-  import params
-
-  File(params.pid_file,
-    action="delete",
-    not_if="ls {pid_file} >/dev/null 2>&1 && !(ps `cat {pid_file}` >/dev/null 2>&1)"
-  )
-  
-  oozie_server_directorties = [params.oozie_pid_dir, params.oozie_log_dir, params.oozie_tmp_dir, params.oozie_data_dir, params.oozie_lib_dir, params.oozie_webapps_dir]            
-  Directory( oozie_server_directorties,
-    owner = params.oozie_user,
-    mode = 0755,
-    recursive = True
-  )
-  
-  Directory(params.oozie_libext_dir,
-            recursive=True,
-  )
-  
-  no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
-       
-  Execute(('tar','-xvf', format('{oozie_home}/oozie-sharelib.tar.gz'), '-C', params.oozie_home),
-    not_if  = no_op_test,
-    sudo = True
-  )
-
-  if params.jdbc_driver_name=="com.mysql.jdbc.Driver" or params.jdbc_driver_name=="oracle.jdbc.driver.OracleDriver":
-
-    environment = {
-      "no_proxy": format("{ambari_server_hostname}")
-    }
-
-    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o',
-             params.downloaded_custom_connector),
-            not_if=format("test -f {downloaded_custom_connector}"),
-            path=["/bin", "/usr/bin/"],
-            environment=environment,
-            sudo = True)
-
-
-    Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.target),
-            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
-            path=["/bin", "/usr/bin/"],
-            sudo = True)
-
-  oozie_setup_cmd = format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 {hadoop_jar_location} -extjs {ext_js_path} {jar_option} {jar_path}")
-  Execute( oozie_setup_cmd,
-    user = params.oozie_user,
-    not_if  = no_op_test
-  )
-  

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_client.py
deleted file mode 100644
index d38bfb8..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_client.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from oozie import oozie
-from oozie_service import oozie_service
-
-         
-class OozieClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-    
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    oozie(is_server=False)
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-    
-
-if __name__ == "__main__":
-  OozieClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
deleted file mode 100644
index 502a9d3..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from oozie import oozie
-from oozie_service import oozie_service
-
-         
-class OozieServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-    
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    oozie(is_server=True)
-    
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env)
-    oozie_service(action='start')
-    
-  def stop(self, env):
-    import params
-    env.set_params(params)
-    oozie_service(action='stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.pid_file)
-    
-
-if __name__ == "__main__":
-  OozieServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py
deleted file mode 100644
index aaabcbd..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-def oozie_service(action = 'start'): # 'start' or 'stop'
-  import params
-
-  kinit_if_needed = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal};") if params.security_enabled else ""
-  no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
-  
-  if action == 'start':
-    start_cmd = format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/oozie-start.sh")
-    
-    if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
-       params.jdbc_driver_name == "org.postgresql.Driver" or \
-       params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
-      db_connection_check_command = format("{java_home}/bin/java -cp {check_db_connection_jar}:{target} org.apache.ambari.server.DBConnectionVerification '{oozie_jdbc_connection_url}' {oozie_metastore_user_name} {oozie_metastore_user_passwd!p} {jdbc_driver_name}")
-    else:
-      db_connection_check_command = None
-      
-    cmd1 =  format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run")
-    cmd2 =  format("{kinit_if_needed} hadoop dfs -put /usr/lib/oozie/share {oozie_hdfs_user_dir} ; hadoop dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
-      
-    if db_connection_check_command:
-      Execute( db_connection_check_command, tries=5, try_sleep=10)
-                  
-    Execute( cmd1,
-      user = params.oozie_user,
-      not_if  = no_op_test,
-      ignore_failures = True
-    ) 
-    
-    Execute( cmd2,
-      user = params.oozie_user,       
-      not_if = format("{kinit_if_needed} hadoop dfs -ls /user/oozie/share | awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if (count > 0) {{exit 0}} else {{exit 1}}}}'")
-    )
-    
-    Execute( start_cmd,
-      user = params.oozie_user,
-      not_if  = no_op_test,
-    )
-  elif action == 'stop':
-    stop_cmd  = format("cd {oozie_tmp_dir} && {oozie_home}/bin/oozie-stop.sh")
-    Execute(stop_cmd,
-      only_if  = no_op_test,
-      user = params.oozie_user
-    )
-    File(params.pid_file,
-         action = "delete",
-    )
-
-  
-  

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
deleted file mode 100644
index 8525b38..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import status_params
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-oozie_user = config['configurations']['oozie-env']['oozie_user']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-conf_dir = "/etc/oozie/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
-user_group = config['configurations']['cluster-env']['user_group']
-jdk_location = config['hostLevelParams']['jdk_location']
-check_db_connection_jar_name = "DBConnectionVerification.jar"
-check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hadoop_prefix = "/usr"
-oozie_tmp_dir = "/var/tmp/oozie"
-oozie_hdfs_user_dir = format("/user/{oozie_user}")
-oozie_pid_dir = status_params.oozie_pid_dir
-pid_file = status_params.pid_file
-hadoop_jar_location = "/usr/lib/hadoop/"
-# for HDP2 it's "/usr/share/HDP-oozie/ext-2.2.zip"
-ext_js_path = "/usr/share/HDP-oozie/ext.zip"
-oozie_home = "/usr/lib/oozie"
-oozie_libext_dir = "/usr/lib/oozie/libext"
-lzo_enabled = config['configurations']['mapred-env']['lzo_enabled']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
-oozie_principal = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
-smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-oozie_keytab = config['configurations']['oozie-env']['oozie_keytab']
-
-oracle_driver_jar_name = "ojdbc6.jar"
-java_share_dir = "/usr/share/java"
-
-java_home = config['hostLevelParams']['java_home']
-oozie_metastore_user_name = config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
-oozie_metastore_user_passwd = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.password","")
-oozie_jdbc_connection_url = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.url", "")
-oozie_log_dir = config['configurations']['oozie-env']['oozie_log_dir']
-oozie_data_dir = config['configurations']['oozie-env']['oozie_data_dir']
-oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
-oozie_server_admin_port = config['configurations']['oozie-env']['oozie_admin_port']
-oozie_lib_dir = "/var/lib/oozie/"
-oozie_webapps_dir = "/var/lib/oozie/oozie-server/webapps/"
-
-jdbc_driver_name = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.driver", "")
-oozie_env_sh_template = config['configurations']['oozie-env']['content']
-
-if jdbc_driver_name == "com.mysql.jdbc.Driver":
-  jdbc_driver_jar = "mysql-connector-java.jar"
-  jdbc_symlink_name = "mysql-jdbc-driver.jar"
-elif jdbc_driver_name == "org.postgresql.Driver":
-  jdbc_driver_jar = format("{oozie_home}/libtools/postgresql-9.0-801.jdbc4.jar") #oozie using it's own postgres jdbc
-  jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
-  jdbc_driver_jar = "ojdbc.jar"
-  jdbc_symlink_name = "oracle-jdbc-driver.jar"
-else:
-  jdbc_driver_jar = ""
-  jdbc_symlink_name = ""
-
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-driver_curl_target = format("{java_share_dir}/{jdbc_driver_jar}")
-downloaded_custom_connector = format("{tmp_dir}/{jdbc_driver_jar}")
-if jdbc_driver_name == "org.postgresql.Driver":
-  target = jdbc_driver_jar
-else:
-  target = format("{oozie_libext_dir}/{jdbc_driver_jar}")
-
-if lzo_enabled or jdbc_driver_name:
-  jar_option = "-jars"         
-else:
-  jar_option = ""
-  
-lzo_jar_suffix = '`LZO_JARS=($(find /usr/lib/hadoop/lib/ -name "hadoop-lzo-*")); echo ${LZO_JARS[0]}`' if lzo_enabled else ""
-  
-if lzo_enabled and jdbc_driver_name:
-    jar_path = format("{lzo_jar_suffix}:{target}")
-else:
-    jar_path = "{lzo_jar_suffix}{target}"
-
-#oozie-log4j.properties
-if (('oozie-log4j' in config['configurations']) and ('content' in config['configurations']['oozie-log4j'])):
-  log4j_props = config['configurations']['oozie-log4j']['content']
-else:
-  log4j_props = None
-
-#hdfs directories
-oozie_hdfs_user_dir = format("/user/{oozie_user}")
-oozie_hdfs_user_mode = 0775
-#for create_hdfs_directory
-hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/service_check.py
deleted file mode 100644
index 470d898..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/service_check.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-class OozieServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-    
-    # on HDP2 this file is different
-    smoke_test_file_name = 'oozieSmoke.sh'
-
-    oozie_smoke_shell_file( smoke_test_file_name)
-  
-def oozie_smoke_shell_file(
-  file_name
-):
-  import params
-
-  File( format("{tmp_dir}/{file_name}"),
-    content = StaticFile(file_name),
-    mode = 0755
-  )
-  
-  if params.security_enabled:
-    sh_cmd = format("sh {tmp_dir}/{file_name} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local}")
-  else:
-    sh_cmd = format("sh {tmp_dir}/{file_name} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled}")
-
-  Execute( format("{tmp_dir}/{file_name}"),
-    command   = sh_cmd,
-    tries     = 3,
-    try_sleep = 5,
-    logoutput = True
-  )
-
-if __name__ == "__main__":
-  OozieServiceCheck().execute()
-  

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/status_params.py
deleted file mode 100644
index a665449..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/status_params.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-oozie_pid_dir = config['configurations']['oozie-env']['oozie_pid_dir']
-pid_file = format("{oozie_pid_dir}/oozie.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-env.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-env.xml
deleted file mode 100644
index e564ccb..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-env.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- pig-env.sh -->
-  <property>
-    <name>content</name>
-    <description>This is the jinja template for pig-env.sh file</description>
-    <value>
-JAVA_HOME={{java64_home}}
-HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
-    </value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-log4j.xml
deleted file mode 100644
index 4fe323c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-log4j.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false">
-
-  <property>
-    <name>content</name>
-    <description>Custom log4j.properties</description>
-    <value>
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#
-
-# ***** Set root logger level to DEBUG and its only appender to A.
-log4j.logger.org.apache.pig=info, A
-
-# ***** A is set to be a ConsoleAppender.
-log4j.appender.A=org.apache.log4j.ConsoleAppender
-# ***** A uses PatternLayout.
-log4j.appender.A.layout=org.apache.log4j.PatternLayout
-log4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
-    </value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-properties.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-properties.xml
deleted file mode 100644
index 2277a40..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/configuration/pig-properties.xml
+++ /dev/null
@@ -1,83 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false">
-
-  <property>
-    <name>content</name>
-    <description>Describe all the Pig agent configurations</description>
-    <value>
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-
-# Pig configuration file. All values can be overwritten by command line arguments.
-
-# log4jconf log4j configuration file
-# log4jconf=./conf/log4j.properties
-
-# a file that contains pig script
-#file=
-
-# load jarfile, colon separated
-#jar=
-
-#verbose print all log messages to screen (default to print only INFO and above to screen)
-#verbose=true
-
-#exectype local|mapreduce, mapreduce is default
-#exectype=local
-
-#pig.logfile=
-
-#Do not spill temp files smaller than this size (bytes)
-#pig.spill.size.threshold=5000000
-#EXPERIMENT: Activate garbage collection when spilling a file bigger than this size (bytes)
-#This should help reduce the number of files being spilled.
-#pig.spill.gc.activation.size=40000000
-
-#the following two parameters are to help estimate the reducer number
-#pig.exec.reducers.bytes.per.reducer=1000000000
-#pig.exec.reducers.max=999
-
-#Use this option only when your Pig job will otherwise die because of
-#using more counter than hadoop configured limit
-#pig.disable.counter=true
-hcat.bin=/usr/bin/hcat
-    </value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
deleted file mode 100644
index 85d5961..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
+++ /dev/null
@@ -1,85 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>PIG</name>
-      <displayName>Pig</displayName>
-      <comment>Scripting platform for analyzing large datasets</comment>
-      <version>0.11.1.1.3.3.0</version>
-      <components>
-        <component>
-          <name>PIG</name>
-          <displayName>Pig</displayName>
-          <category>CLIENT</category>
-          <cardinality>1+</cardinality>
-          <commandScript>
-            <script>scripts/pig_client.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>pig-env.sh</fileName>
-              <dictionaryName>pig-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>log4j.properties</fileName>
-              <dictionaryName>pig-log4j</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>pig.properties</fileName>
-              <dictionaryName>pig-properties</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>MAPREDUCE</service>
-      </requiredServices>
-
-      <configuration-dependencies>
-        <config-type>pig-env</config-type>
-        <config-type>pig-log4j</config-type>
-        <config-type>pig-properties</config-type>
-      </configuration-dependencies>
-
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/files/pigSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/files/pigSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/files/pigSmoke.sh
deleted file mode 100644
index a22456e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/files/pigSmoke.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-/*Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License */
-
-A = load 'passwd' using PigStorage(':');
-B = foreach A generate \$0 as id;
-store B into 'pigsmoke.out';

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py
deleted file mode 100644
index 111e158..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-pig_conf_dir = "/etc/pig/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-user_group = config['configurations']['cluster-env']['user_group']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-pig_env_sh_template = config['configurations']['pig-env']['content']
-
-# not supporting 32 bit jdk.
-java64_home = config['hostLevelParams']['java_home']
-hadoop_home = "/usr"
-
-#log4j.properties
-if (('pig-log4j' in config['configurations']) and ('content' in config['configurations']['pig-log4j'])):
-  log4j_props = config['configurations']['pig-log4j']['content']
-else:
-  log4j_props = None
-
-pig_properties = config['configurations']['pig-properties']['content']

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig.py
deleted file mode 100644
index 0b61dd3..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import os
-
-from resource_management import *
-
-def pig():
-  import params
-
-  Directory( params.pig_conf_dir,
-    owner = params.hdfs_user,
-    group = params.user_group
-  )
-
-  File(format("{pig_conf_dir}/pig-env.sh"),
-       owner=params.hdfs_user,
-       content=InlineTemplate(params.pig_env_sh_template)
-  )
-
-  File(format("{params.pig_conf_dir}/pig.properties"),
-       mode=0644,
-       group=params.user_group,
-       owner=params.hdfs_user,
-       content=params.pig_properties
-  )
-
-  if (params.log4j_props != None):
-    File(format("{params.pig_conf_dir}/log4j.properties"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hdfs_user,
-         content=params.log4j_props
-    )
-  elif (os.path.exists(format("{params.pig_conf_dir}/log4j.properties"))):
-    File(format("{params.pig_conf_dir}/log4j.properties"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hdfs_user
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig_client.py
deleted file mode 100644
index 931dceb..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/pig_client.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-import sys
-from resource_management import *
-from pig import pig
-
-
-class PigClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    pig()
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  PigClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/service_check.py
deleted file mode 100644
index 8431b6d..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/service_check.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-
-class PigServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    input_file = 'passwd'
-    output_file = "pigsmoke.out"
-
-    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
-    #cleanup put below to handle retries; if retrying there wil be a stale file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command needs hadoop
-    test_cmd = format("fs -test -e {output_file}")
-
-    ExecuteHadoop( create_file_cmd,
-      tries     = 3,
-      try_sleep = 5,
-      user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir,
-      # for kinit run
-      keytab = params.smoke_user_keytab,
-      security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local
-    )
-
-    File( format("{tmp_dir}/pigSmoke.sh"),
-      content = StaticFile("pigSmoke.sh"),
-      mode = 0755
-    )
-
-    Execute( format("pig {tmp_dir}/pigSmoke.sh"),
-      tries     = 3,
-      try_sleep = 5,
-      path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-      user      = params.smokeuser
-    )
-
-    ExecuteHadoop( test_cmd,
-      user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir
-    )
-
-if __name__ == "__main__":
-  PigServiceCheck().execute()
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
deleted file mode 100644
index 6edbfdc..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
+++ /dev/null
@@ -1,55 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- sqoop-env.sh -->
-  <property>
-    <name>content</name>
-    <description>This is the jinja template for sqoop-env.sh file</description>
-    <value>
-# Set Hadoop-specific environment variables here.
-
-#Set path to where bin/hadoop is available
-#Set path to where bin/hadoop is available
-export HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}
-
-#set the path to where bin/hbase is available
-export HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}
-
-#Set the path to where bin/hive is available
-export HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}
-
-#Set the path for where zookeper config dir is
-export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
-
-# add libthrift in hive to sqoop class path first so hive imports work
-export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}"
-    </value>
-  </property>
-  <property>
-    <name>sqoop_user</name>
-    <description>User to run Sqoop as</description>
-    <property-type>USER</property-type>
-    <value>sqoop</value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
deleted file mode 100644
index 1893de9..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>SQOOP</name>
-      <displayName>Sqoop</displayName>
-      <comment>Tool for transferring bulk data between Apache Hadoop and
-        structured data stores such as relational databases
-      </comment>
-      <version>1.4.3.1.3.3.0</version>
-
-      <components>
-        <component>
-          <name>SQOOP</name>
-          <displayName>Sqoop</displayName>
-          <category>CLIENT</category>
-          <cardinality>1+</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/sqoop_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>sqoop-env.sh</fileName>
-              <dictionaryName>sqoop-env</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>sqoop</name>
-            </package>
-            <package>
-              <name>mysql-connector-java</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HDFS</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>sqoop-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/__init__.py
deleted file mode 100644
index 3860581..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
deleted file mode 100644
index 5c31aae..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-user_group = config['configurations']['cluster-env']['user_group']
-sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
-
-sqoop_conf_dir = "/usr/lib/sqoop/conf"
-hbase_home = "/usr"
-hive_home = "/usr"
-zoo_conf_dir = "/etc/zookeeper"
-sqoop_lib = "/usr/lib/sqoop/lib"
-sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
-
-keytab_path = config['configurations']['cluster-env']['keytab_path']
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/service_check.py
deleted file mode 100644
index c548c2d..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/service_check.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-
-from resource_management import *
-
-
-class SqoopServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-    if params.security_enabled:
-      Execute(format("{kinit_path_local}  -kt {smoke_user_keytab} {smokeuser}"),
-              user = params.smokeuser,
-      )
-    Execute("sqoop version",
-            user = params.smokeuser,
-            logoutput = True
-    )
-
-if __name__ == "__main__":
-  SqoopServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop.py
deleted file mode 100644
index b96be94..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from resource_management import *
-import sys
-
-def sqoop(type=None):
-  import params
-  Link(params.sqoop_lib + "/mysql-connector-java.jar",
-       to = '/usr/share/java/mysql-connector-java.jar'
-  )
-  Directory(params.sqoop_conf_dir,
-            owner = params.sqoop_user,
-            group = params.user_group
-  )
-  File(format("{sqoop_conf_dir}/sqoop-env.sh"),
-       owner=params.sqoop_user,
-       content=InlineTemplate(params.sqoop_env_sh_template)
-  )
-  
-  File (params.sqoop_conf_dir + "/sqoop-env-template.sh",
-          owner = params.sqoop_user,
-          group = params.user_group
-  )
-  File (params.sqoop_conf_dir + "/sqoop-site-template.xml",
-         owner = params.sqoop_user,
-         group = params.user_group
-  )
-  File (params.sqoop_conf_dir + "/sqoop-site.xml",
-         owner = params.sqoop_user,
-         group = params.user_group
-  )
-  pass
-
-def sqoop_TemplateConfig(name, tag=None):
-  import params
-  TemplateConfig( format("{sqoop_conf_dir}/{name}"),
-                  owner = params.sqoop_user,
-                  template_tag = tag
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/23b7c110/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop_client.py
deleted file mode 100644
index bd2863c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/sqoop_client.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-from resource_management import *
-
-from sqoop import sqoop
-
-
-class SqoopClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    sqoop(type='client')
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  SqoopClient().execute()


Mime
View raw message