ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vbrodets...@apache.org
Subject ambari git commit: AMBARI-12875. BE: Modify Hive stack definition to support SQL Anywhere.(vbrodetskyi)
Date Wed, 26 Aug 2015 11:34:34 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 76990f4a8 -> adc32c91e


AMBARI-12875. BE: Modify Hive stack definition to support SQL Anywhere.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/adc32c91
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/adc32c91
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/adc32c91

Branch: refs/heads/trunk
Commit: adc32c91e7ebf316bd04b50bfac63e88db2133d5
Parents: 76990f4
Author: Vitaly Brodetskyi <vbrodetskyi@hortonworks.com>
Authored: Wed Aug 26 14:34:51 2015 +0300
Committer: Vitaly Brodetskyi <vbrodetskyi@hortonworks.com>
Committed: Wed Aug 26 14:34:51 2015 +0300

----------------------------------------------------------------------
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |  3 -
 .../services/HIVE/configuration/hive-env.xml    |  3 -
 .../services/HIVE/configuration/hive-env.xml    | 71 ++++++++++++++++++++
 .../services/HIVE/configuration/hive-site.xml   | 12 ++++
 .../stacks/HDP/2.3/services/stack_advisor.py    | 34 +++++++++-
 .../stacks/2.3/common/test_stack_advisor.py     |  3 +-
 6 files changed, 118 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/adc32c91/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
index 21c52e1..784f06b 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
@@ -153,9 +153,6 @@ else
 fi
 export METASTORE_PORT={{hive_metastore_port}}
 
-{% if sqla_db_used %}
-export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{{jdbc_libs_dir}}"
-{% endif %}
     </value>
   </property>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/adc32c91/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
index 6420333..ec0381a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
@@ -72,9 +72,6 @@ fi
 
 export METASTORE_PORT={{hive_metastore_port}}
 
-{% if sqla_db_used %}
-export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{{jdbc_libs_dir}}"
-{% endif %}
     </value>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/adc32c91/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
new file mode 100644
index 0000000..b7b3127
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+
+  <!-- hive-env.sh -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for hive-env.sh file</description>
+    <value>
+ if [ "$SERVICE" = "cli" ]; then
+   if [ -z "$DEBUG" ]; then
+     export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15
-XX:+UseNUMA -XX:+UseParallelGC -XX:-UseGCOverheadLimit"
+   else
+     export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15
-XX:-UseGCOverheadLimit"
+   fi
+ fi
+
+# The heap size of the jvm stared by hive shell script can be controlled via:
+
+# Larger heap size may be required when running queries over large number of files or partitions.
+# By default hive shell scripts use a heap size of 256 (MB).  Larger heap size would also
be
+# appropriate for hive server (hwi etc).
+
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+
+# Hive Configuration Directory can be controlled by:
+export HIVE_CONF_DIR={{hive_config_dir}}
+
+# Folder containing extra libraries required for hive compilation/execution can be controlled
by:
+if [ "${HIVE_AUX_JARS_PATH}" != "" ]; then
+  if [ -f "${HIVE_AUX_JARS_PATH}" ]; then
+    export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}
+  elif [ -d "/usr/hdp/current/hive-webhcat/share/hcatalog" ]; then
+    export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar
+  fi
+elif [ -d "/usr/hdp/current/hive-webhcat/share/hcatalog" ]; then
+  export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar
+fi
+
+export METASTORE_PORT={{hive_metastore_port}}
+
+{% if sqla_db_used %}
+export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{{jdbc_libs_dir}}"
+export JAVA_LIBRARY_PATH="$JAVA_LIBRARY_PATH:{{jdbc_libs_dir}}"
+{% endif %}
+    </value>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/adc32c91/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-site.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-site.xml
index eaba2bc..e65c9ef 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-site.xml
@@ -29,4 +29,16 @@ limitations under the License.
     </description>
   </property>
 
+  <property>
+    <name>datanucleus.rdbms.datastoreAdapterClassName</name>
+    <value>org.datanucleus.store.rdbms.adapter.SQLAnywhereAdapter</value>
+    <description>Datanucleus Class, This property used only when hive db is SQLA</description>
+    <depends-on>
+      <property>
+        <type>hive-site</type>
+        <name>javax.jdo.option.ConnectionDriverName</name>
+      </property>
+    </depends-on>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/adc32c91/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 9329032..9a6602a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -161,6 +161,8 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     super(HDP23StackAdvisor, self).recommendHIVEConfigurations(configurations, clusterData,
services, hosts)
     putHiveSiteProperty = self.putProperty(configurations, "hive-site", services)
     putHiveServerProperty = self.putProperty(configurations, "hiveserver2-site", services)
+    hive_site_properties = getSiteProperties(configurations, "hive-site")
+    putHiveSitePropertyAttribute = self.putPropertyAttribute(configurations, "hive-site")
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     # hive_security_authorization == 'ranger'
     if str(configurations["hive-env"]["properties"]["hive_security_authorization"]).lower()
== "ranger":
@@ -178,6 +180,13 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
           jvmGCParams = "-XX:+UseG1GC -XX:+ResizeTLAB"
     putHiveSiteProperty('hive.tez.java.opts', "-server -Djava.net.preferIPv4Stack=true -XX:NewRatio=8
-XX:+UseNUMA " + jvmGCParams + " -XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps")
 
+    # if hive using sqla db, then we should add DataNucleus property
+    sqla_db_used = 'javax.jdo.option.ConnectionDriverName' in hive_site_properties and \
+                   hive_site_properties['javax.jdo.option.ConnectionDriverName'] == 'sap.jdbc4.sqlanywhere.IDriver'
+    if sqla_db_used:
+      putHiveSiteProperty('datanucleus.rdbms.datastoreAdapterClassName','org.datanucleus.store.rdbms.adapter.SQLAnywhereAdapter')
+    else:
+      putHiveSitePropertyAttribute('datanucleus.rdbms.datastoreAdapterClassName', 'delete',
'true')
 
   def recommendHDFSConfigurations(self, configurations, clusterData, services, hosts):
     super(HDP23StackAdvisor, self).recommendHDFSConfigurations(configurations, clusterData,
services, hosts)
@@ -202,7 +211,8 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
       parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()
       childValidators = {
         "HDFS": {"hdfs-site": self.validateHDFSConfigurations},
-        "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations},
+        "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations,
+                 "hive-site": self.validateHiveConfigurations},
         "HBASE": {"hbase-site": self.validateHBASEConfigurations},
         "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations}        
       }
@@ -227,6 +237,28 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     return self.toConfigurationValidationProblems(validationItems, "hdfs-site")
 
 
+  def validateHiveConfigurations(self, properties, recommendedDefaults, configurations, services,
hosts):
+    super(HDP23StackAdvisor, self).validateHiveConfigurations(properties, recommendedDefaults,
configurations, services, hosts)
+    hive_site = properties
+    validationItems = []
+    sqla_db_used = "javax.jdo.option.ConnectionDriverName" in hive_site and \
+                   hive_site['javax.jdo.option.ConnectionDriverName'] == 'sap.jdbc4.sqlanywhere.IDriver'
+    prop_name = "datanucleus.rdbms.datastoreAdapterClassName"
+    prop_value = "org.datanucleus.store.rdbms.adapter.SQLAnywhereAdapter"
+    if sqla_db_used:
+      if not prop_name in hive_site:
+        validationItems.append({"config-name": prop_name,
+                              "item": self.getWarnItem(
+                              "If Hive using SQLA db." \
+                              " {0} needs to be added with value {1}".format(prop_name,prop_value))})
+      elif prop_name in hive_site and hive_site[prop_name] != "org.datanucleus.store.rdbms.adapter.SQLAnywhereAdapter":
+        validationItems.append({"config-name": prop_name,
+                                "item": self.getWarnItem(
+                                  "If Hive using SQLA db." \
+                                  " {0} needs to be set to {1}".format(prop_name,prop_value))})
+    return self.toConfigurationValidationProblems(validationItems, "hive-site")
+
+
   def validateHiveServer2Configurations(self, properties, recommendedDefaults, configurations,
services, hosts):
     super(HDP23StackAdvisor, self).validateHiveServer2Configurations(properties, recommendedDefaults,
configurations, services, hosts)
     hive_server2 = properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/adc32c91/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 7f948bd..08c48d8 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -331,7 +331,8 @@ class TestHDP23StackAdvisor(TestCase):
          'hive.server2.authentication.ldap.url': {'delete': 'true'},
          'hive.server2.tez.default.queues': {
            'entries': [{'value': 'queue1', 'label': 'queue1 queue'}, {'value': 'queue2',
'label': 'queue2 queue'}]
-          }
+          },
+         'datanucleus.rdbms.datastoreAdapterClassName': {'delete': 'true'}
         }
       },
       'hiveserver2-site': {


Mime
View raw message