falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From srik...@apache.org
Subject [2/3] git commit: FALCON-88 Add embedded hive and webhcat for integration tests. Contributed by Venkatesh Seetharam
Date Fri, 30 Aug 2013 05:41:34 GMT
FALCON-88 Add embedded hive and webhcat for integration tests. Contributed by Venkatesh Seetharam


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/19bcf999
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/19bcf999
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/19bcf999

Branch: refs/heads/master
Commit: 19bcf9994125bba86bf5ed79ef3d44c5bf0a2f8a
Parents: 8b09e1b
Author: srikanth.sundarrajan <srikanth.sundarrajan@inmobi.com>
Authored: Fri Aug 30 10:52:39 2013 +0530
Committer: srikanth.sundarrajan <srikanth.sundarrajan@inmobi.com>
Committed: Fri Aug 30 10:52:39 2013 +0530

----------------------------------------------------------------------
 hadoop-webapp/pom.xml                           |  21 +++
 .../falcon/listener/HadoopStartupListener.java  | 140 +++++++++++++------
 hadoop-webapp/src/main/resources/hive-site.xml  |  27 ++++
 .../src/main/resources/webhcat-site.xml         |  29 ++++
 pom.xml                                         |  30 +++-
 webapp/pom.xml                                  |   1 +
 6 files changed, 204 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/19bcf999/hadoop-webapp/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/pom.xml b/hadoop-webapp/pom.xml
index 312a71b..c8c51d6 100644
--- a/hadoop-webapp/pom.xml
+++ b/hadoop-webapp/pom.xml
@@ -91,6 +91,27 @@
             <groupId>org.apache.activemq</groupId>
             <artifactId>activemq-core</artifactId>
         </dependency>
+
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-metastore</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hcatalog</groupId>
+            <artifactId>webhcat</artifactId>
+        </dependency>
+
+        <!-- Hive Metastore and WebHcat fails with out these dependencies -->
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+        </dependency>
     </dependencies>
 
     <build>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/19bcf999/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
index 5114a4b..570ba02 100644
--- a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
+++ b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
@@ -19,6 +19,8 @@
 package org.apache.falcon.listener;
 
 import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
@@ -28,7 +30,9 @@ import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hcatalog.templeton.AppConfig;
 import org.apache.log4j.Logger;
 
 /**
@@ -48,54 +52,18 @@ public class HadoopStartupListener implements ServletContextListener {
             final String[] emptyArgs = {};
             String hadoopProfle = System.getProperty("hadoop.profile", "1");
             if (hadoopProfle.equals("1")) {
-                NameNode.createNameNode(emptyArgs, conf);
-                DataNode.createDataNode(emptyArgs, conf);
-                JobConf jobConf = new JobConf(conf);
-                /**
-                 * Reflection code:
-                 * JobTracker jt = JobTracker.startTracker(jobConf);
-                 * jt.offerService();
-                 * TaskTracker tt = new TaskTracker(jobConf);
-                 * tt.run();
-                 */
-                Object jt = Class.forName("org.apache.hadoop.mapred.JobTracker")
-                                .getMethod("startTracker", JobConf.class).invoke(null, jobConf);
-                startService(jt, "offerService");
-                Object tt = Class.forName("org.apache.hadoop.mapred.TaskTracker")
-                                .getConstructor(JobConf.class).newInstance(jobConf);
-                startService(tt, "run");
+                startHadoop1Services(conf, emptyArgs);
             } else if (hadoopProfle.equals("2")) {
-                /**
-                 * Reflection code:
-                 * DefaultMetricsSystem.setMiniClusterMode(true);
-                 * ResourceManager resourceManager = new ResourceManager(new MemStore());
-                 * YarnConfiguration yarnConf = new YarnConfiguration(conf);
-                 * resourceManager.init(yarnConf);
-                 * resourceManager.start();
-                 * NodeManager nodeManager = new NodeManager();
-                 * nodeManager.init(yarnConf);
-                 * nodeManager.start();
-                 */
-                Class.forName("org.apache.hadoop.metrics2.lib.DefaultMetricsSystem")
-                                .getMethod("setMiniClusterMode", boolean.class).invoke(null,
true);
-                NameNode.createNameNode(emptyArgs, conf);
-                DataNode.createDataNode(emptyArgs, conf);
-
-                Object memStore = instance("org.apache.hadoop.yarn.server.resourcemanager.recovery.MemStore");
-                Object resourceManager = Class.forName("org.apache.hadoop.yarn.server.resourcemanager.ResourceManager")
-                        .getConstructor(Class.forName("org.apache.hadoop.yarn.server.resourcemanager.recovery.Store"))
-                        .newInstance(memStore);
-                Object yarnConf = Class.forName("org.apache.hadoop.yarn.conf.YarnConfiguration")
-                        .getConstructor(Configuration.class).newInstance(conf);
-                invoke(resourceManager, "init", Configuration.class, yarnConf);
-                startService(resourceManager, "start");
-                Object nodeManager = instance("org.apache.hadoop.yarn.server.nodemanager.NodeManager");
-                invoke(nodeManager, "init", Configuration.class, yarnConf);
-                startService(nodeManager, "start");
+                startHadoop2Services(conf, emptyArgs);
             } else {
                 throw new RuntimeException("Unhandled hadoop profile " + hadoopProfle);
             }
+
             startBroker();
+
+            startHiveMetaStore();
+            startHiveWebMetaStore();
+
         } catch (Exception e) {
             e.printStackTrace();
             LOG.error("Unable to start hadoop cluster", e);
@@ -103,6 +71,59 @@ public class HadoopStartupListener implements ServletContextListener {
         }
     }
 
+    private void startHadoop1Services(Configuration conf, String[] emptyArgs)
+        throws IOException, IllegalAccessException, InvocationTargetException,
+               NoSuchMethodException, ClassNotFoundException, InstantiationException {
+
+        NameNode.createNameNode(emptyArgs, conf);
+        DataNode.createDataNode(emptyArgs, conf);
+
+        JobConf jobConf = new JobConf(conf);
+        // JobTracker jt = JobTracker.startTracker(jobConf);
+        // jt.offerService();
+        // TaskTracker tt = new TaskTracker(jobConf);
+        // tt.run();
+
+        Object jt = Class.forName("org.apache.hadoop.mapred.JobTracker")
+                        .getMethod("startTracker", JobConf.class).invoke(null, jobConf);
+        startService(jt, "offerService");
+
+        Object tt = Class.forName("org.apache.hadoop.mapred.TaskTracker")
+                        .getConstructor(JobConf.class).newInstance(jobConf);
+        startService(tt, "run");
+    }
+
+    private void startHadoop2Services(Configuration conf, String[] emptyArgs) throws Exception
{
+
+        // DefaultMetricsSystem.setMiniClusterMode(true);
+        // ResourceManager resourceManager = new ResourceManager(new MemStore());
+        // YarnConfiguration yarnConf = new YarnConfiguration(conf);
+        // resourceManager.init(yarnConf);
+        // resourceManager.start();
+        // NodeManager nodeManager = new NodeManager();
+        // nodeManager.init(yarnConf);
+        // nodeManager.start();
+
+        Class.forName("org.apache.hadoop.metrics2.lib.DefaultMetricsSystem")
+                        .getMethod("setMiniClusterMode", boolean.class).invoke(null, true);
+
+        NameNode.createNameNode(emptyArgs, conf);
+        DataNode.createDataNode(emptyArgs, conf);
+
+        Object memStore = instance("org.apache.hadoop.yarn.server.resourcemanager.recovery.MemStore");
+        Object resourceManager = Class.forName("org.apache.hadoop.yarn.server.resourcemanager.ResourceManager")
+                .getConstructor(Class.forName("org.apache.hadoop.yarn.server.resourcemanager.recovery.Store"))
+                .newInstance(memStore);
+        Object yarnConf = Class.forName("org.apache.hadoop.yarn.conf.YarnConfiguration")
+                .getConstructor(Configuration.class).newInstance(conf);
+        invoke(resourceManager, "init", Configuration.class, yarnConf);
+        startService(resourceManager, "start");
+
+        Object nodeManager = instance("org.apache.hadoop.yarn.server.nodemanager.NodeManager");
+        invoke(nodeManager, "init", Configuration.class, yarnConf);
+        startService(nodeManager, "start");
+    }
+
     private void startBroker() throws Exception {
         broker = new BrokerService();
         broker.setUseJmx(false);
@@ -112,6 +133,39 @@ public class HadoopStartupListener implements ServletContextListener
{
         broker.start();
     }
 
+    public static final String META_STORE_PORT = "49083";
+    private void startHiveMetaStore() {
+        try {
+            new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    try {
+                        String[] args = new String[]{
+                            "-v",
+                            "-p", META_STORE_PORT,
+                        };
+
+                        HiveMetaStore.main(args);
+                    } catch (Throwable t) {
+                        throw new RuntimeException(t);
+                    }
+                }
+            }).start();
+        } catch (Exception e) {
+            throw new RuntimeException("Unable to start hive metastore server.", e);
+        }
+    }
+
+    public static final String WEB_HCAT_PORT = "48080";
+    private void startHiveWebMetaStore() {
+        String[] args = new String[]{
+            "-D" + AppConfig.PORT + "=" + WEB_HCAT_PORT,
+            "-D" + AppConfig.HADOOP_CONF_DIR + "=./target/webapps/hadoop/conf",
+        };
+
+        org.apache.hcatalog.templeton.Main.main(args);
+    }
+
     private Object instance(String clsName) throws Exception {
         return Class.forName(clsName).newInstance();
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/19bcf999/hadoop-webapp/src/main/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/resources/hive-site.xml b/hadoop-webapp/src/main/resources/hive-site.xml
new file mode 100644
index 0000000..89d739b
--- /dev/null
+++ b/hadoop-webapp/src/main/resources/hive-site.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<configuration>
+    <!-- Forcing the creation of the db dir under target -->
+    <property>
+        <name>javax.jdo.option.ConnectionURL</name>
+        <value>jdbc:derby:;databaseName=./target/metastore_db;create=true</value>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/19bcf999/hadoop-webapp/src/main/resources/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/resources/webhcat-site.xml b/hadoop-webapp/src/main/resources/webhcat-site.xml
new file mode 100644
index 0000000..6d0772c
--- /dev/null
+++ b/hadoop-webapp/src/main/resources/webhcat-site.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<configuration>
+
+    <property>
+        <name>templeton.hadoop.conf.dir</name>
+        <value>${env.HADOOP_CONF_DIR}</value>
+        <description>The path to the Hadoop configuration.</description>
+    </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/19bcf999/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d211e21..7498a69 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,10 +98,11 @@
 
         <hadoop.profile>1</hadoop.profile>
         <hadoop.version>1.1.2</hadoop.version>
-        <slf4j.version>1.2</slf4j.version>
+        <slf4j.version>1.6.1</slf4j.version>
         <oozie.version>3.2.2</oozie.version>
         <activemq.version>5.4.3</activemq.version>
         <hadoop-distcp.version>0.3</hadoop-distcp.version>
+        <hive.version>0.11.0</hive.version>
         <jetty.version>6.1.26</jetty.version>
         <internal.maven.repo>file:///tmp/falcontemprepo</internal.maven.repo>
         <skipCheck>false</skipCheck>
@@ -703,6 +704,30 @@
             </dependency>
 
             <dependency>
+                <groupId>org.apache.hive</groupId>
+                <artifactId>hive-metastore</artifactId>
+                <version>${hive.version}</version>
+                <exclusions>
+                    <exclusion> <!--Oozie already imports this-->
+                        <groupId>org.apache.derby</groupId>
+                        <artifactId>derby</artifactId>
+                    </exclusion>
+                </exclusions>
+            </dependency>
+
+            <dependency>
+                <groupId>org.apache.hcatalog</groupId>
+                <artifactId>webhcat</artifactId>
+                <version>${hive.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <groupId>xerces</groupId>
+                        <artifactId>xercesImpl</artifactId>
+                    </exclusion>
+                </exclusions>
+            </dependency>
+
+            <dependency>
                 <groupId>net.sourceforge.findbugs</groupId>
                 <artifactId>annotations</artifactId>
                 <version>1.3.2</version>
@@ -881,6 +906,9 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-javadoc-plugin</artifactId>
+                <configuration>
+                    <skip>${skipCheck}</skip>
+                </configuration>
                 <executions>
                     <execution>
                         <id>attach-javadocs</id>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/19bcf999/webapp/pom.xml
----------------------------------------------------------------------
diff --git a/webapp/pom.xml b/webapp/pom.xml
index ffeeb46..fc7f9e6 100644
--- a/webapp/pom.xml
+++ b/webapp/pom.xml
@@ -174,6 +174,7 @@
                             <dependencySourceIncludes>
                                 <dependencySourceInclude>org.apache.falcon:*</dependencySourceInclude>
                             </dependencySourceIncludes>
+                            <skip>${skipCheck}</skip>
                         </configuration>
                     </execution>
                 </executions>


Mime
View raw message