falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From venkat...@apache.org
Subject [2/5] git commit: FALCON-757 Discontinue support for Hadoop-1.x. Contributed by Sowmya Ramesh
Date Tue, 07 Oct 2014 00:15:44 GMT
FALCON-757 Discontinue support for Hadoop-1.x. Contributed by Sowmya Ramesh


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/b9c7ffd5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/b9c7ffd5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/b9c7ffd5

Branch: refs/heads/master
Commit: b9c7ffd5153d95531dd3af1586d746c269951c6c
Parents: a00b2ef
Author: Venkatesh Seetharam <venkatesh@apache.org>
Authored: Mon Oct 6 16:05:50 2014 -0700
Committer: Venkatesh Seetharam <venkatesh@apache.org>
Committed: Mon Oct 6 16:05:50 2014 -0700

----------------------------------------------------------------------
 CHANGES.txt                                     |   3 +
 Installation-steps.txt                          |   4 +-
 .../resources/hdfs-replication-workflow.xml     |   4 -
 client/pom.xml                                  |  11 +-
 .../org/apache/falcon/recipe/RecipeTool.java    |   2 +-
 client/src/main/resources/cluster-0.1.xsd       |   6 +-
 common/pom.xml                                  |  17 +-
 .../org/apache/falcon/entity/ClusterHelper.java |   2 +-
 .../falcon/hadoop/HadoopClientFactory.java      |   7 +-
 .../apache/falcon/entity/AbstractTestBase.java  |   4 +-
 .../falcon/entity/FileSystemStorageTest.java    |   4 +-
 .../entity/parser/ClusterEntityParserTest.java  |   6 +-
 .../retention/EvictedInstanceSerDeTest.java     |   3 +-
 .../apache/falcon/update/UpdateHelperTest.java  |   5 +-
 docs/src/site/twiki/EntitySpecification.twiki   |   4 +-
 docs/src/site/twiki/InstallationSteps.twiki     |  12 +-
 docs/src/site/twiki/OnBoarding.twiki            |   6 +-
 falcon-regression/README.md                     |   8 +-
 falcon-regression/merlin-core/pom.xml           |  31 +--
 falcon-regression/merlin/pom.xml                |  29 ---
 .../cluster-0.1.xml                             |   6 +-
 .../merlin/src/test/resources/cluster-0.1.xsd   |   2 +-
 falcon-regression/pom.xml                       |  37 ----
 hadoop-dependencies/pom.xml                     |  12 +-
 .../apache/falcon/hadoop/JailedFileSystem.java  |  12 +-
 .../mapred/ClassicClientProtocolProvider.java   |   3 +-
 hadoop-webapp/pom.xml                           |  35 +---
 .../falcon/listener/HadoopStartupListener.java  |  42 ----
 hadoop-webapp/src/main/resources/core-site.xml  |   2 +-
 hadoop-webapp/src/main/resources/hive-site.xml  |   2 +-
 .../src/main/resources/mapred-site.xml          |   2 +-
 .../apache/hadoop/mapred/LocalRunnerTest.java   |  11 +-
 .../org/apache/hadoop/mapred/LocalRunnerV1.java | 208 -------------------
 messaging/pom.xml                               |  16 +-
 .../falcon/messaging/FeedProducerTest.java      |   3 +-
 oozie-el-extensions/pom.xml                     |  11 +-
 oozie/pom.xml                                   |  34 +--
 .../OozieOrchestrationWorkflowBuilder.java      |   2 +-
 .../action/feed/replication-action.xml          |   4 -
 .../feed/OozieFeedWorkflowBuilderTest.java      |   5 +-
 .../OozieProcessWorkflowBuilderTest.java        |   3 +-
 .../falcon/logging/v1/TaskLogRetrieverV1.java   |  90 --------
 pom.xml                                         |  92 +-------
 prism/pom.xml                                   |  13 +-
 replication/pom.xml                             |  12 +-
 rerun/pom.xml                                   |  12 +-
 .../falcon/rerun/handler/TestLateData.java      |   4 +-
 retention/pom.xml                               |  15 +-
 .../falcon/retention/FeedEvictorTest.java       |  30 +--
 test-util/pom.xml                               |  17 +-
 .../falcon/cluster/util/EmbeddedCluster.java    |   6 +-
 .../falcon/cluster/util/StandAloneCluster.java  |   4 +-
 webapp/pom.xml                                  |  14 +-
 53 files changed, 106 insertions(+), 823 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index d258dae..3f77dfb 100755
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -33,6 +33,9 @@ Trunk (Unreleased)
    FALCON-263 API to get workflow parameters. (pavan kumar kolamuri via Shwetha GS)
 
   IMPROVEMENTS
+   FALCON-757 Discontinue support for Hadoop-1.x (Sowmya Ramesh via
+   Venkatesh Seetharam)
+
    FALCON-748 Falcon throws '413 Full Head' error message when kerberos is
    enabled with AD (Balu Vellanki via Venkatesh Seetharam)
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/Installation-steps.txt
----------------------------------------------------------------------
diff --git a/Installation-steps.txt b/Installation-steps.txt
index cd24a04..85eabed 100644
--- a/Installation-steps.txt
+++ b/Installation-steps.txt
@@ -38,6 +38,7 @@ a. Building falcon from the source release
 * export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install
 
 [optionally -Dhadoop.version=<<hadoop.version>> can be appended to build for a specific version of hadoop]
+*Note:* Falcon drops support for Hadoop-1 and only supports Hadoop-2 from Falcon 0.6 onwards
 
 
 b. Building falcon from the source repository
@@ -48,6 +49,7 @@ b. Building falcon from the source repository
 * export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install
 
 [optionally -Dhadoop.version=<<hadoop.version>> can be appended to build for a specific version of hadoop]
+*Note:* Falcon drops support for Hadoop-1 and only supports Hadoop-2 from Falcon 0.6 onwards
 
 
 2. Deploying Falcon
@@ -122,5 +124,5 @@ d. Stopping Falcon Server
 
 * cd <<project home>>
 * src/bin/pacakge.sh <<hadoop-version>>
-  >> ex. src/bin/pacakge.sh 1.1.2 or src/bin/pacakge.sh 0.20.2-cdh3u5
+  >> ex. src/bin/pacakge.sh 2.5.0
   >> oozie bundle available in target/package/oozie-4.0.0/distro/target/oozie-4.0.0-distro.tar.gz

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/addons/recipes/hdfs-replication/src/main/resources/hdfs-replication-workflow.xml
----------------------------------------------------------------------
diff --git a/addons/recipes/hdfs-replication/src/main/resources/hdfs-replication-workflow.xml b/addons/recipes/hdfs-replication/src/main/resources/hdfs-replication-workflow.xml
index 7043e42..6868011 100644
--- a/addons/recipes/hdfs-replication/src/main/resources/hdfs-replication-workflow.xml
+++ b/addons/recipes/hdfs-replication/src/main/resources/hdfs-replication-workflow.xml
@@ -27,10 +27,6 @@
                     <name>oozie.launcher.mapreduce.job.user.classpath.first</name>
                     <value>true</value>
                 </property>
-                <property> <!-- hadoop 1 parameter -->
-                    <name>oozie.launcher.mapreduce.user.classpath.first</name>
-                    <value>true</value>
-                </property>
                 <property>
                     <name>mapred.job.queue.name</name>
                     <value>${queueName}</value>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/client/pom.xml
----------------------------------------------------------------------
diff --git a/client/pom.xml b/client/pom.xml
index e76865d..2423442 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -34,22 +34,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-auth</artifactId>
                 </dependency>
                 <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java b/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java
index 0e94b62..bd91e8e 100644
--- a/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java
+++ b/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java
@@ -258,7 +258,7 @@ public class RecipeTool extends Configured implements Tool {
 
     private static Configuration getConfiguration(final String storageEndpoint) throws Exception {
         Configuration conf = new Configuration();
-        conf.set("fs.default.name", storageEndpoint);
+        conf.set("fs.defaultFS", storageEndpoint);
         return conf;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/client/src/main/resources/cluster-0.1.xsd
----------------------------------------------------------------------
diff --git a/client/src/main/resources/cluster-0.1.xsd b/client/src/main/resources/cluster-0.1.xsd
index 79d9fdd..6fd9de7 100644
--- a/client/src/main/resources/cluster-0.1.xsd
+++ b/client/src/main/resources/cluster-0.1.xsd
@@ -105,7 +105,7 @@
                 entities in workflow engine, to save and read data from hadoop and to
                 publish messages to messaging engine.
                 endpoint: is the url for each interface; examples: for write it is the
-                url of hdfs (fs.default.name) and
+                url of hdfs (fs.defaultFS) and
                 for workflow it is url of workflow engine like oozie.
                 version: The current runtime version of each interface.
             </xs:documentation>
@@ -158,9 +158,9 @@
                 readonly specifies the hadoop's hftp address, it's endpoint is the value of
                 dfs.http.address.ex: hftp://corp.namenode:50070/
                 write specifies the interface to write to hdfs, it's endpoint is the value
-                of fs.default.name.ex: hdfs://corp.namenode:8020
+                of fs.defaultFS ex: hdfs://corp.namenode:8020
                 execute specifies the interface for job tracker, it's endpoint is the value
-                of mapred.job.tracker. ex:corp.jt:8021
+                of mapreduce.jobtracker.address. ex:corp.jt:8021
                 workflow specifies the interface for workflow engine, example of it's
                 endpoint is value for OOZIE_URL.ex: http://corp.oozie:11000/oozie
                 messaging specifies the interface for sending feed availability messages, it's

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index 31280e4..39d3782 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -34,28 +34,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-                
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-test</artifactId>
-                    <scope>test</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                 </dependency>
                 <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java b/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
index 2689cb7..6945cea 100644
--- a/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
+++ b/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
@@ -47,7 +47,7 @@ public final class ClusterHelper {
         conf.set(HadoopClientFactory.FS_DEFAULT_NAME_KEY, storageUrl);
 
         final String executeEndPoint = getMREndPoint(cluster);
-        conf.set(HadoopClientFactory.MR_JOB_TRACKER_KEY, executeEndPoint);
+        conf.set(HadoopClientFactory.MR_JT_ADDRESS_KEY, executeEndPoint);
         conf.set(HadoopClientFactory.YARN_RM_ADDRESS_KEY, executeEndPoint);
 
         if (cluster.getProperties() != null) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java b/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java
index 48b45a2..ecdbf14 100644
--- a/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java
+++ b/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java
@@ -24,6 +24,7 @@ import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.security.SecurityUtil;
 import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
@@ -39,8 +40,8 @@ import java.security.PrivilegedExceptionAction;
  */
 public final class HadoopClientFactory {
 
-    public static final String FS_DEFAULT_NAME_KEY = "fs.default.name";
-    public static final String MR_JOB_TRACKER_KEY = "mapred.job.tracker";
+    public static final String FS_DEFAULT_NAME_KEY = CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
+    public static final String MR_JT_ADDRESS_KEY = "mapreduce.jobtracker.address";
     public static final String YARN_RM_ADDRESS_KEY = "yarn.resourcemanager.address";
 
     private static final HadoopClientFactory INSTANCE = new HadoopClientFactory();
@@ -173,7 +174,7 @@ public final class HadoopClientFactory {
      */
     public static void validateJobClient(String executeUrl) throws IOException {
         final JobConf jobConf = new JobConf();
-        jobConf.set(MR_JOB_TRACKER_KEY, executeUrl);
+        jobConf.set(MR_JT_ADDRESS_KEY, executeUrl);
         jobConf.set(YARN_RM_ADDRESS_KEY, executeUrl);
 
         UserGroupInformation loginUser = UserGroupInformation.getLoginUser();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java b/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
index 2140335..2d41661 100644
--- a/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
+++ b/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
@@ -28,6 +28,7 @@ import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.process.Process;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
@@ -97,7 +98,8 @@ public class AbstractTestBase {
         case CLUSTER:
             Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(CLUSTER_XML));
             cluster.setName(name);
-            ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(conf.get("fs.default.name"));
+            ClusterHelper.getInterface(cluster, Interfacetype.WRITE)
+                    .setEndpoint(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
             store.publish(type, cluster);
             break;
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java b/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
index 4bb7772..b97564d 100644
--- a/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
@@ -23,6 +23,7 @@ import org.apache.falcon.cluster.util.EmbeddedCluster;
 import org.apache.falcon.entity.v0.AccessControlList;
 import org.apache.falcon.entity.v0.feed.Location;
 import org.apache.falcon.entity.v0.feed.LocationType;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -167,7 +168,8 @@ public class FileSystemStorageTest {
         FileSystem fs = cluster.getFileSystem();
         fs.mkdirs(path);
 
-        FileSystemStorage storage = new FileSystemStorage(cluster.getConf().get("fs.default.name"), locations);
+        FileSystemStorage storage = new FileSystemStorage(
+                cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY), locations);
         storage.validateACL(new TestACL(user, user, "0x755"));
 
         //-ve case

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/src/test/java/org/apache/falcon/entity/parser/ClusterEntityParserTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/parser/ClusterEntityParserTest.java b/common/src/test/java/org/apache/falcon/entity/parser/ClusterEntityParserTest.java
index 41f041a..08c4c29 100644
--- a/common/src/test/java/org/apache/falcon/entity/parser/ClusterEntityParserTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/parser/ClusterEntityParserTest.java
@@ -34,6 +34,7 @@ import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interface;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.util.StartupProperties;
 import org.testng.Assert;
 import org.testng.annotations.AfterClass;
@@ -54,7 +55,8 @@ public class ClusterEntityParserTest extends AbstractTestBase {
         InputStream stream = this.getClass().getResourceAsStream(CLUSTER_XML);
 
         Cluster cluster = parser.parse(stream);
-        ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(conf.get("fs.default.name"));
+        ClusterHelper.getInterface(cluster, Interfacetype.WRITE)
+                .setEndpoint(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
 
         Assert.assertNotNull(cluster);
         Assert.assertEquals(cluster.getName(), "testCluster");
@@ -69,7 +71,7 @@ public class ClusterEntityParserTest extends AbstractTestBase {
         Assert.assertEquals(readonly.getVersion(), "0.20.2");
 
         Interface write = ClusterHelper.getInterface(cluster, Interfacetype.WRITE);
-        //assertEquals(write.getEndpoint(), conf.get("fs.default.name"));
+        //assertEquals(write.getEndpoint(), conf.get("fs.defaultFS"));
         Assert.assertEquals(write.getVersion(), "0.20.2");
 
         Interface workflow = ClusterHelper.getInterface(cluster, Interfacetype.WORKFLOW);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/src/test/java/org/apache/falcon/retention/EvictedInstanceSerDeTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/retention/EvictedInstanceSerDeTest.java b/common/src/test/java/org/apache/falcon/retention/EvictedInstanceSerDeTest.java
index 4fa38eb..0f2ee7b 100644
--- a/common/src/test/java/org/apache/falcon/retention/EvictedInstanceSerDeTest.java
+++ b/common/src/test/java/org/apache/falcon/retention/EvictedInstanceSerDeTest.java
@@ -19,6 +19,7 @@
 package org.apache.falcon.retention;
 
 import org.apache.falcon.cluster.util.EmbeddedCluster;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
@@ -47,7 +48,7 @@ public class EvictedInstanceSerDeTest {
     @BeforeClass
     public void start() throws Exception {
         cluster = EmbeddedCluster.newCluster("test");
-        String hdfsUrl = cluster.getConf().get("fs.default.name");
+        String hdfsUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
 
         fs = FileSystem.get(cluster.getConf());
         csvFilePath = new Path(hdfsUrl + "/falcon/staging/feed/instancePaths-2014-10-01-01-00.csv");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/common/src/test/java/org/apache/falcon/update/UpdateHelperTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/update/UpdateHelperTest.java b/common/src/test/java/org/apache/falcon/update/UpdateHelperTest.java
index 71f251b..ef0d769 100644
--- a/common/src/test/java/org/apache/falcon/update/UpdateHelperTest.java
+++ b/common/src/test/java/org/apache/falcon/update/UpdateHelperTest.java
@@ -41,6 +41,7 @@ import org.apache.falcon.entity.v0.feed.Properties;
 import org.apache.falcon.entity.v0.feed.Property;
 import org.apache.falcon.entity.v0.process.PolicyType;
 import org.apache.falcon.entity.v0.process.Process;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -141,8 +142,8 @@ public class UpdateHelperTest extends AbstractTestBase {
         //Don't Update if the lib is not updated
         fs.delete(new Path(staging, "checksums"), true);
         FSDataOutputStream stream = fs.create(new Path(staging, "checksums"));
-        stream.write((dfsCluster.getConf().get("fs.default.name") + lib.toString() + "="
-                + fs.getFileChecksum(lib).toString() + "\n").getBytes());
+        stream.write((dfsCluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
+                + lib.toString() + "=" + fs.getFileChecksum(lib).toString() + "\n").getBytes());
         stream.close();
         Assert.assertFalse(UpdateHelper.isWorkflowUpdated(cluster, process, staging));
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/docs/src/site/twiki/EntitySpecification.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/EntitySpecification.twiki b/docs/src/site/twiki/EntitySpecification.twiki
index df572cb..ffc350c 100644
--- a/docs/src/site/twiki/EntitySpecification.twiki
+++ b/docs/src/site/twiki/EntitySpecification.twiki
@@ -29,14 +29,14 @@ this would be used in the context of feed replication.
 <verbatim>
 <interface type="write" endpoint="hdfs://localhost:8020" version="0.20.2" />
 </verbatim>
-A write interface specifies the interface to write to hdfs, it's endpoint is the value of fs.default.name.
+A write interface specifies the interface to write to hdfs, it's endpoint is the value of fs.defaultFS.
 Falcon uses this interface to write system data to hdfs and feeds referencing this cluster are written to hdfs
 using the same write interface.
 
 <verbatim>
 <interface type="execute" endpoint="localhost:8021" version="0.20.2" />
 </verbatim>
-An execute interface specifies the interface for job tracker, it's endpoint is the value of mapred.job.tracker. 
+An execute interface specifies the interface for job tracker, it's endpoint is the value of mapreduce.jobtracker.address.
 Falcon uses this interface to submit the processes as jobs on !JobTracker defined here.
 
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/docs/src/site/twiki/InstallationSteps.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/InstallationSteps.twiki b/docs/src/site/twiki/InstallationSteps.twiki
index 70f2cf6..2813c6c 100644
--- a/docs/src/site/twiki/InstallationSteps.twiki
+++ b/docs/src/site/twiki/InstallationSteps.twiki
@@ -8,10 +8,10 @@ git clone https://git-wip-us.apache.org/repos/asf/incubator-falcon.git falcon
 
 cd falcon
 
-export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install [For hadoop 1]
-export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install -Phadoop-2 [For hadoop 2]
+export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install
 
 [optionally -Dhadoop.version=<<hadoop.version>> can be appended to build for a specific version of hadoop]
+*Note:* Falcon drops support for Hadoop-1 and only supports Hadoop-2 from Falcon 0.6 onwards
 [optionally -Doozie.version=<<oozie version>> can be appended to build with a specific version of oozie. Oozie versions >= 3.oozie-3.2.0-incubating are supported]
 
 </verbatim>
@@ -21,8 +21,7 @@ Once the build successfully completes, artifacts can be packaged for deployment.
 *Embedded Mode*
 <verbatim>
 
-mvn clean assembly:assembly -DskipTests -DskipCheck=true [For hadoop 1]
-mvn clean assembly:assembly -DskipTests -DskipCheck=true -P hadoop-2 [For hadoop 2]
+mvn clean assembly:assembly -DskipTests -DskipCheck=true
 
 </verbatim>
 
@@ -63,8 +62,7 @@ Tar is structured as follows
 
 <verbatim>
 
-mvn clean assembly:assembly -DskipTests -DskipCheck=true -Pdistributed,hadoop-1 [For hadoop 1]
-mvn clean assembly:assembly -DskipTests -DskipCheck=true -Pdistributed,hadoop-2 [For hadoop 2]
+mvn clean assembly:assembly -DskipTests -DskipCheck=true -Pdistributed,hadoop-2
 
 </verbatim>
 
@@ -249,7 +247,7 @@ bin/prism-stop
 cd <<project home>>
 src/bin/package.sh <<hadoop-version>> <<oozie-version>>
 
->> ex. src/bin/package.sh 1.1.2 3.1.3-incubating or src/bin/package.sh 0.20.2-cdh3u5 4.0.0
+>> ex. src/bin/package.sh 2.5.0 4.0.0
 >> Falcon package is available in <<falcon home>>/target/falcon-<<version>>-bin.tar.gz
 >> Oozie package is available in <<falcon home>>/target/oozie-3.3.2-distro.tar.gz
 </verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/docs/src/site/twiki/OnBoarding.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/OnBoarding.twiki b/docs/src/site/twiki/OnBoarding.twiki
index fd5bec7..7348665 100644
--- a/docs/src/site/twiki/OnBoarding.twiki
+++ b/docs/src/site/twiki/OnBoarding.twiki
@@ -24,11 +24,11 @@ Cluster definition that contains end points for name node, job tracker, oozie an
 <cluster colo="ua2" description="" name="corp" xmlns="uri:falcon:cluster:0.1"
     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">    
     <interfaces>
-        <interface type="readonly" endpoint="hftp://name-node.com:50070" version="0.20.2-cdh3u0" />
+        <interface type="readonly" endpoint="hftp://name-node.com:50070" version="2.5.0" />
 
-        <interface type="write" endpoint="hdfs://name-node.com:54310" version="0.20.2-cdh3u0" />
+        <interface type="write" endpoint="hdfs://name-node.com:54310" version="2.5.0" />
 
-        <interface type="execute" endpoint="job-tracker:54311" version="0.20.2-cdh3u0" />
+        <interface type="execute" endpoint="job-tracker:54311" version="2.5.0" />
 
         <interface type="workflow" endpoint="http://oozie.com:11000/oozie/" version="3.1.4" />
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/falcon-regression/README.md
----------------------------------------------------------------------
diff --git a/falcon-regression/README.md b/falcon-regression/README.md
index 659657c..3a0a3cc 100644
--- a/falcon-regression/README.md
+++ b/falcon-regression/README.md
@@ -1,4 +1,3 @@
-
  Licensed to the Apache Software Foundation (ASF) under one
  or more contributor license agreements.  See the NOTICE file
  distributed with this work for additional information
@@ -29,7 +28,6 @@ merlin-core has al the utils used by merlin
 Build Command : 
 ------------------
 
-Fast Build : mvn clean install -DskipTests -DskipCheck=true -Phadoop-1
-Regression build : mvn clean install -Phadoop-1
-Profiles Supported: hadoop-1,hadoop-2
-(hadoop-1 is by default for chd repo)
+Fast Build : mvn clean install -DskipTests -DskipCheck=true -Phadoop-2
+Regression build : mvn clean install -Phadoop-2
+Profiles Supported: hadoop-2

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/falcon-regression/merlin-core/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/pom.xml b/falcon-regression/merlin-core/pom.xml
index 5caaab9..21641a3 100644
--- a/falcon-regression/merlin-core/pom.xml
+++ b/falcon-regression/merlin-core/pom.xml
@@ -33,35 +33,6 @@
     <packaging>jar</packaging>
     <profiles>
         <profile>
-            <id>hadoop-1</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                    <exclusions>
-                        <exclusion>
-                            <groupId>org.eclipse.jdt</groupId>
-                            <artifactId>core</artifactId>
-                        </exclusion>
-                        <exclusion>
-                            <groupId>tomcat</groupId>
-                            <artifactId>jasper-runtime</artifactId>
-                        </exclusion>
-                        <exclusion>
-                            <groupId>tomcat</groupId>
-                            <artifactId>jasper-compiler</artifactId>
-                        </exclusion>
-                    </exclusions>
-                </dependency>
-
-                <dependency>
-                    <groupId>org.apache.hive.hcatalog</groupId>
-                    <artifactId>webhcat-java-client</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <profile>
             <id>hadoop-2</id>
             <dependencies>
             	<dependency>
@@ -189,4 +160,4 @@
             <artifactId>falcon-client</artifactId>
         </dependency>
     </dependencies>
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/falcon-regression/merlin/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/pom.xml b/falcon-regression/merlin/pom.xml
index b01aaaa..9766bf6 100644
--- a/falcon-regression/merlin/pom.xml
+++ b/falcon-regression/merlin/pom.xml
@@ -31,35 +31,6 @@
     <packaging>jar</packaging>
     <profiles>
         <profile>
-            <id>hadoop-1</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                    <exclusions>
-                        <exclusion>
-                            <groupId>org.eclipse.jdt</groupId>
-                            <artifactId>core</artifactId>
-                        </exclusion>
-                        <exclusion>
-                            <groupId>tomcat</groupId>
-                            <artifactId>jasper-runtime</artifactId>
-                        </exclusion>
-                        <exclusion>
-                            <groupId>tomcat</groupId>
-                            <artifactId>jasper-compiler</artifactId>
-                        </exclusion>
-                    </exclusions>
-                </dependency>
-
-                <dependency>
-                    <groupId>org.apache.hive.hcatalog</groupId>
-                    <artifactId>webhcat-java-client</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <profile>
             <id>hadoop-2</id>
             <dependencies>
                 <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/falcon-regression/merlin/src/test/resources/LocalDC_feedReplicaltion_BillingRC/cluster-0.1.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/resources/LocalDC_feedReplicaltion_BillingRC/cluster-0.1.xml b/falcon-regression/merlin/src/test/resources/LocalDC_feedReplicaltion_BillingRC/cluster-0.1.xml
index afa86c4..1b16836 100755
--- a/falcon-regression/merlin/src/test/resources/LocalDC_feedReplicaltion_BillingRC/cluster-0.1.xml
+++ b/falcon-regression/merlin/src/test/resources/LocalDC_feedReplicaltion_BillingRC/cluster-0.1.xml
@@ -19,9 +19,9 @@
 
 <cluster name="ivory2-stg4" description="" colo="stg" xmlns="uri:falcon:cluster:0.1">
     <interfaces>
-        <interface type="readonly" endpoint="hftp://stg-nn.blue.ua2.inmobi.com:50070" version="0.20.2-cdh3u0"/>
-        <interface type="execute" endpoint="stg-jt.blue.ua2.inmobi.com:54311" version="0.20.2-cdh3u0"/>
-        <interface type="write" endpoint="hdfs://stg-nn.blue.ua2.inmobi.com:54310" version="0.20.2-cdh3u0"/>
+        <interface type="readonly" endpoint="hftp://stg-nn.blue.ua2.inmobi.com:50070" version="2.5.0"/>
+        <interface type="execute" endpoint="stg-jt.blue.ua2.inmobi.com:54311" version="2.5.0"/>
+        <interface type="write" endpoint="hdfs://stg-nn.blue.ua2.inmobi.com:54310" version="2.5.0"/>
         <interface type="messaging" endpoint="tcp://gs1134.blue.ua2.inmobi.com:61618?daemon=true" version="5.1.6"/>
         <interface type="workflow" endpoint="http://gs1134.blue.ua2.inmobi.com:11002/oozie/" version="3.1.4"/>
     </interfaces>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/falcon-regression/merlin/src/test/resources/cluster-0.1.xsd
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/resources/cluster-0.1.xsd b/falcon-regression/merlin/src/test/resources/cluster-0.1.xsd
index a7b2750..9da349a 100644
--- a/falcon-regression/merlin/src/test/resources/cluster-0.1.xsd
+++ b/falcon-regression/merlin/src/test/resources/cluster-0.1.xsd
@@ -159,7 +159,7 @@
                 write specifies the interface to write to hdfs, it's endpoint is the value
                 of fs.default.name.ex: hdfs://corp.namenode:8020
                 execute specifies the interface for job tracker, it's endpoint is the value
-                of mapred.job.tracker. ex:corp.jt:8021
+                of mapreduce.jobtracker.address. ex:corp.jt:8021
                 workflow specifies the interface for workflow engine, example of it's
                 endpoint is value for OOZIE_URL.ex: http://corp.oozie:11000/oozie
                 messaging specifies the interface for sending feed availability messages, it's

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/falcon-regression/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/pom.xml b/falcon-regression/pom.xml
index b1931d8..0424cb8 100644
--- a/falcon-regression/pom.xml
+++ b/falcon-regression/pom.xml
@@ -46,43 +46,6 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
-            <properties>
-                <hadoop1.version>0.20.2-cdh3u3</hadoop1.version>
-                <hive.version>0.12.0</hive.version>
-            </properties>
-            <dependencyManagement>
-                <dependencies>
-                    <dependency>
-                        <groupId>org.apache.hadoop</groupId>
-                        <artifactId>hadoop-core</artifactId>
-                        <version>${hadoop1.version}</version>
-                        <exclusions>
-                            <exclusion>
-                                <groupId>org.eclipse.jdt</groupId>
-                                <artifactId>core</artifactId>
-                            </exclusion>
-                            <exclusion>
-                                <groupId>tomcat</groupId>
-                                <artifactId>jasper-runtime</artifactId>
-                            </exclusion>
-                            <exclusion>
-                                <groupId>tomcat</groupId>
-                                <artifactId>jasper-compiler</artifactId>
-                            </exclusion>
-                        </exclusions>
-                    </dependency>
-
-                    <dependency>
-                        <groupId>org.apache.hive.hcatalog</groupId>
-                        <artifactId>webhcat-java-client</artifactId>
-                        <version>${hive.version}</version>
-                    </dependency>
-                </dependencies>
-            </dependencyManagement>
-        </profile>
-
-        <profile>
             <id>hadoop-2</id>
             <dependencyManagement>
                 <dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-dependencies/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-dependencies/pom.xml b/hadoop-dependencies/pom.xml
index 2ebc20f..b3e3ebc 100644
--- a/hadoop-dependencies/pom.xml
+++ b/hadoop-dependencies/pom.xml
@@ -32,23 +32,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                     <scope>compile</scope>
                 </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-dependencies/src/main/java/org/apache/falcon/hadoop/JailedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-dependencies/src/main/java/org/apache/falcon/hadoop/JailedFileSystem.java b/hadoop-dependencies/src/main/java/org/apache/falcon/hadoop/JailedFileSystem.java
index 584929f..72e390e 100644
--- a/hadoop-dependencies/src/main/java/org/apache/falcon/hadoop/JailedFileSystem.java
+++ b/hadoop-dependencies/src/main/java/org/apache/falcon/hadoop/JailedFileSystem.java
@@ -98,11 +98,6 @@ public class JailedFileSystem extends FileSystem {
     }
 
     @Override
-    public boolean delete(Path f) throws IOException {
-        return delete(toLocalPath(f), false);
-    }
-
-    @Override
     public boolean delete(Path f, boolean recursive) throws IOException {
         Path localPath = toLocalPath(f);
         if (localPath.toUri().getPath().trim().equals("/")) {
@@ -121,10 +116,11 @@ public class JailedFileSystem extends FileSystem {
             FileStatus[] jailFileStatuses = new FileStatus[fileStatuses.length];
             for (int index = 0; index < fileStatuses.length; index++) {
                 FileStatus status = fileStatuses[index];
-                jailFileStatuses[index] = new FileStatus(status.getLen(), status.isDir(),
+                jailFileStatuses[index] = new FileStatus(status.getLen(), status.isDirectory(),
                         status.getReplication(), status.getBlockSize(), status.getModificationTime(),
                         status.getAccessTime(), status.getPermission(), status.getOwner(), status.getGroup(),
-                        fromLocalPath(status.getPath()).makeQualified(this));
+                        fromLocalPath(status.getPath())
+                                .makeQualified(this.getUri(), this.getWorkingDirectory()));
             }
             return jailFileStatuses;
         }
@@ -181,7 +177,7 @@ public class JailedFileSystem extends FileSystem {
         if (status == null) {
             return null;
         }
-        return new FileStatus(status.getLen(), status.isDir(),
+        return new FileStatus(status.getLen(), status.isDirectory(),
                 status.getReplication(), status.getBlockSize(), status.getModificationTime(),
                 status.getAccessTime(), status.getPermission(), status.getOwner(), status.getGroup(),
                 fromLocalPath(status.getPath()).makeQualified(this.getUri(), this.getWorkingDirectory()));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-dependencies/src/versioned-src/v2/java/org/apache/hadoop/mapred/ClassicClientProtocolProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-dependencies/src/versioned-src/v2/java/org/apache/hadoop/mapred/ClassicClientProtocolProvider.java b/hadoop-dependencies/src/versioned-src/v2/java/org/apache/hadoop/mapred/ClassicClientProtocolProvider.java
index 079eca9..2167375 100644
--- a/hadoop-dependencies/src/versioned-src/v2/java/org/apache/hadoop/mapred/ClassicClientProtocolProvider.java
+++ b/hadoop-dependencies/src/versioned-src/v2/java/org/apache/hadoop/mapred/ClassicClientProtocolProvider.java
@@ -35,7 +35,8 @@ public class ClassicClientProtocolProvider extends ClientProtocolProvider {
     @Override
     public ClientProtocol create(Configuration conf) throws IOException {
         String framework = conf.get(MRConfig.FRAMEWORK_NAME, "unittests");
-        String tracker = conf.get("mapred.job.tracker", conf.get("yarn.resourcemanager.address", LOCALHOST));
+        String tracker = conf.get("mapreduce.jobtracker.address",
+                conf.get("yarn.resourcemanager.address", LOCALHOST));
         if (!"unittests".equals(framework) || !tracker.startsWith(LOCALHOST)) {
             return null;
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-webapp/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/pom.xml b/hadoop-webapp/pom.xml
index 7712afa..1228074 100644
--- a/hadoop-webapp/pom.xml
+++ b/hadoop-webapp/pom.xml
@@ -33,46 +33,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                    <scope>compile</scope>
-                </dependency>
-            </dependencies>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.codehaus.mojo</groupId>
-                        <artifactId>build-helper-maven-plugin</artifactId>
-                        <version>1.5</version>
-                        <executions>
-                            <execution>
-                                <id>add-source</id>
-                                <phase>generate-sources</phase>
-                                <goals>
-                                    <goal>add-source</goal>
-                                </goals>
-                                <configuration>
-                                    <sources>
-                                        <source>${project.basedir}/src/versioned-src/v1/java</source>
-                                    </sources>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                     <scope>compile</scope>
                 </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
index 8dea54c..a203290 100644
--- a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
+++ b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
@@ -19,7 +19,6 @@
 package org.apache.falcon.listener;
 
 import org.apache.activemq.broker.BrokerService;
-import org.apache.falcon.JobTrackerService;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -37,8 +36,6 @@ public class HadoopStartupListener implements ServletContextListener {
     @Override
     public void contextInitialized(ServletContextEvent sce) {
         try {
-            startLocalJobRunner();
-
             startBroker();
             startHiveMetaStore();
 
@@ -48,18 +45,6 @@ public class HadoopStartupListener implements ServletContextListener {
         }
     }
 
-    @SuppressWarnings("unchecked")
-    private void startLocalJobRunner() throws Exception {
-        String className = "org.apache.hadoop.mapred.LocalRunnerV1";
-        try {
-            Class<? extends JobTrackerService>  runner = (Class<? extends JobTrackerService>) Class.forName(className);
-            JobTrackerService service = runner.newInstance();
-            service.start();
-        } catch (ClassNotFoundException e) {
-            LOG.warn("v1 Hadoop components not found. Assuming v2", e);
-        }
-    }
-
     private void startBroker() throws Exception {
         broker = new BrokerService();
         broker.setUseJmx(false);
@@ -92,33 +77,6 @@ public class HadoopStartupListener implements ServletContextListener {
         }
     }
 
-    private Object instance(String clsName) throws Exception {
-        return Class.forName(clsName).newInstance();
-    }
-
-    @SuppressWarnings("rawtypes")
-    private void invoke(Object service, String methodName, Class argCls, Object arg) throws Exception {
-        if (argCls == null) {
-            service.getClass().getMethod(methodName).invoke(service);
-        } else {
-            service.getClass().getMethod(methodName, argCls).invoke(service, arg);
-        }
-    }
-
-    private void startService(final Object service, final String method) {
-        new Thread(new Runnable() {
-            @Override
-            public void run() {
-                try {
-                    LOG.info("Starting service {}", service.getClass().getName());
-                    invoke(service, method, null, null);
-                } catch(Exception e) {
-                    throw new RuntimeException(e);
-                }
-            }
-        }).start();
-    }
-
     @Override
     public void contextDestroyed(ServletContextEvent sce) {
         try {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-webapp/src/main/resources/core-site.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/resources/core-site.xml b/hadoop-webapp/src/main/resources/core-site.xml
index 615b4d9..ef1558e 100644
--- a/hadoop-webapp/src/main/resources/core-site.xml
+++ b/hadoop-webapp/src/main/resources/core-site.xml
@@ -30,7 +30,7 @@
     </property>
 
     <property>
-        <name>fs.default.name</name>
+        <name>fs.defaultFS</name>
         <value>jail://global:00</value>
     </property>
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-webapp/src/main/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/resources/hive-site.xml b/hadoop-webapp/src/main/resources/hive-site.xml
index f03a3a6..6b39592 100644
--- a/hadoop-webapp/src/main/resources/hive-site.xml
+++ b/hadoop-webapp/src/main/resources/hive-site.xml
@@ -30,7 +30,7 @@
     </property>
 
     <property>
-        <name>fs.default.name</name>
+        <name>fs.defaultFS</name>
         <value>jail://global:00</value>
     </property>
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-webapp/src/main/resources/mapred-site.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/resources/mapred-site.xml b/hadoop-webapp/src/main/resources/mapred-site.xml
index 5328d6b..cf297de 100644
--- a/hadoop-webapp/src/main/resources/mapred-site.xml
+++ b/hadoop-webapp/src/main/resources/mapred-site.xml
@@ -22,7 +22,7 @@
 
 <configuration>
     <property>
-        <name>mapred.job.tracker</name>
+        <name>mapreduce.jobtracker.address</name>
         <value>localhost:41021</value>
     </property>
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java b/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java
index c96bb0e..193fab0 100644
--- a/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java
+++ b/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.apache.falcon.JobTrackerService;
 import org.apache.hadoop.conf.Configuration;
 import org.testng.annotations.Test;
 
@@ -30,16 +29,8 @@ public class LocalRunnerTest {
     @SuppressWarnings("unchecked")
     public void testLocalRunner() throws Exception {
         Configuration conf = new Configuration();
-        conf.set("mapred.job.tracker", "localhost:41021");
+        conf.set("mapreduce.jobtracker.address", "localhost:41021");
         conf.set("mapreduce.framework.name", "unittests");
-        String hadoopProfle = System.getProperty("hadoop.profile", "1");
-        if (hadoopProfle.equals("1")) {
-            String className = "org.apache.hadoop.mapred.LocalRunnerV1";
-            Class<? extends JobTrackerService> runner =
-                    (Class<? extends JobTrackerService>) Class.forName(className);
-            JobTrackerService service = runner.newInstance();
-            service.start();
-        }
         JobClient client = new JobClient(new JobConf(conf));
         System.out.println(client.getSystemDir());
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/hadoop-webapp/src/versioned-src/v1/java/org/apache/hadoop/mapred/LocalRunnerV1.java
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/versioned-src/v1/java/org/apache/hadoop/mapred/LocalRunnerV1.java b/hadoop-webapp/src/versioned-src/v1/java/org/apache/hadoop/mapred/LocalRunnerV1.java
deleted file mode 100644
index 5819cce..0000000
--- a/hadoop-webapp/src/versioned-src/v1/java/org/apache/hadoop/mapred/LocalRunnerV1.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred;
-
-import org.apache.falcon.JobTrackerService;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.security.token.Token;
-
-import java.io.IOException;
-
-/**
- * Hosted Local Job runner.
- * Please note that one of org.apache.hadoop.mapred.LocalRunnerV2 or
- * org.apache.hadoop.mapred.LocalRunnerV2 is active in the project depending
- * on the profile chosen.
- */
-public class LocalRunnerV1 implements JobSubmissionProtocol, JobTrackerService {
-
-    private final JobSubmissionProtocol localProxy;
-    private final JobConf conf;
-    private RPC.Server server;
-
-    public LocalRunnerV1() {
-        try {
-            conf = new JobConf();
-            localProxy = new LocalJobRunner(conf);
-        } catch (IOException e) {
-            throw new RuntimeException("Unable to initialize localRunner");
-        }
-    }
-
-    @Override
-    public void start() throws Exception {
-        String[] tracker = conf.get("mapred.job.tracker", "localhost:41021").split(":");
-        server = RPC.getServer(this, tracker[0], Integer.parseInt(tracker[1]), conf);
-        server.start();
-    }
-
-    @Override
-    public void stop() throws Exception {
-        server.stop();
-    }
-
-    @Override
-    public JobID getNewJobId() throws IOException {
-        return localProxy.getNewJobId();
-    }
-
-    @Override
-    public JobStatus submitJob(JobID jobName, String jobSubmitDir, Credentials ts) throws IOException {
-        return localProxy.submitJob(jobName, jobSubmitDir, ts);
-    }
-
-    @Override
-    public ClusterStatus getClusterStatus(boolean detailed) throws IOException {
-        return localProxy.getClusterStatus(detailed);
-    }
-
-    @Override
-    public AccessControlList getQueueAdmins(String queueName) throws IOException {
-        return localProxy.getQueueAdmins(queueName);
-    }
-
-    @Override
-    public void killJob(JobID jobid) throws IOException {
-        localProxy.killJob(jobid);
-    }
-
-    @Override
-    public void setJobPriority(JobID jobid, String priority) throws IOException {
-        localProxy.setJobPriority(jobid, priority);
-    }
-
-    @Override
-    public boolean killTask(TaskAttemptID taskId, boolean shouldFail) throws IOException {
-        return localProxy.killTask(taskId, shouldFail);
-    }
-
-    @Override
-    public JobProfile getJobProfile(JobID jobid) throws IOException {
-        return localProxy.getJobProfile(jobid);
-    }
-
-    @Override
-    public JobStatus getJobStatus(JobID jobid) throws IOException {
-        return localProxy.getJobStatus(jobid);
-    }
-
-    @Override
-    public Counters getJobCounters(JobID jobid) throws IOException {
-        return localProxy.getJobCounters(jobid);
-    }
-
-    @Override
-    public TaskReport[] getMapTaskReports(JobID jobid) throws IOException {
-        return localProxy.getMapTaskReports(jobid);
-    }
-
-    @Override
-    public TaskReport[] getReduceTaskReports(JobID jobid) throws IOException {
-        return localProxy.getReduceTaskReports(jobid);
-    }
-
-    @Override
-    public TaskReport[] getCleanupTaskReports(JobID jobid) throws IOException {
-        return localProxy.getCleanupTaskReports(jobid);
-    }
-
-    @Override
-    public TaskReport[] getSetupTaskReports(JobID jobid) throws IOException {
-        return localProxy.getSetupTaskReports(jobid);
-    }
-
-    @Override
-    public String getFilesystemName() throws IOException {
-        return localProxy.getFilesystemName();
-    }
-
-    @Override
-    public JobStatus[] jobsToComplete() throws IOException {
-        return localProxy.jobsToComplete();
-    }
-
-    @Override
-    public JobStatus[] getAllJobs() throws IOException {
-        return localProxy.getAllJobs();
-    }
-
-    @Override
-    public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobid, int fromEventId, int maxEvents)
-        throws IOException {
-        return localProxy.getTaskCompletionEvents(jobid, fromEventId, maxEvents);
-    }
-
-    @Override
-    public String[] getTaskDiagnostics(TaskAttemptID taskId) throws IOException {
-        return localProxy.getTaskDiagnostics(taskId);
-    }
-
-    @Override
-    public String getSystemDir() {
-        return localProxy.getSystemDir();
-    }
-
-    @Override
-    public String getStagingAreaDir() throws IOException {
-        return localProxy.getStagingAreaDir();
-    }
-
-    @Override
-    public JobQueueInfo[] getQueues() throws IOException {
-        return localProxy.getQueues();
-    }
-
-    @Override
-    public JobQueueInfo getQueueInfo(String queue) throws IOException {
-        return localProxy.getQueueInfo(queue);
-    }
-
-    @Override
-    public JobStatus[] getJobsFromQueue(String queue) throws IOException {
-        return localProxy.getJobsFromQueue(queue);
-    }
-
-    @Override
-    public QueueAclsInfo[] getQueueAclsForCurrentUser() throws IOException {
-        return localProxy.getQueueAclsForCurrentUser();
-    }
-
-    @Override
-    public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException, InterruptedException {
-        return new Token<DelegationTokenIdentifier>(null, null, null, null);
-    }
-
-    @Override
-    public long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException, InterruptedException {
-        return localProxy.renewDelegationToken(token);
-    }
-
-    @Override
-    public void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException, InterruptedException {
-        localProxy.cancelDelegationToken(token);
-    }
-
-    @Override
-    public long getProtocolVersion(String protocol, long clientVersion) throws IOException {
-        return localProxy.getProtocolVersion(protocol, clientVersion);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/messaging/pom.xml
----------------------------------------------------------------------
diff --git a/messaging/pom.xml b/messaging/pom.xml
index 9d6dd02..4fa839c 100644
--- a/messaging/pom.xml
+++ b/messaging/pom.xml
@@ -33,25 +33,11 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-test</artifactId>
-                    <scope>test</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
                  <dependency>
                     <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
----------------------------------------------------------------------
diff --git a/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java b/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
index c45ea1e..6c8a926 100644
--- a/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
+++ b/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
@@ -33,6 +33,7 @@ import org.apache.activemq.ActiveMQConnectionFactory;
 import org.apache.activemq.broker.BrokerService;
 import org.apache.activemq.util.ByteArrayInputStream;
 import org.apache.falcon.cluster.util.EmbeddedCluster;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.workflow.WorkflowExecutionArgs;
 import org.apache.falcon.workflow.WorkflowExecutionContext;
 import org.apache.hadoop.conf.Configuration;
@@ -66,7 +67,7 @@ public class FeedProducerTest {
 
         this.dfsCluster = EmbeddedCluster.newCluster("testCluster");
         conf = dfsCluster.getConf();
-        logFile = new Path(conf.get("fs.default.name"),
+        logFile = new Path(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY),
                 "/falcon/feed/agg-logs/instance-2012-01-01-10-00.csv");
 
         args = new String[] {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/oozie-el-extensions/pom.xml
----------------------------------------------------------------------
diff --git a/oozie-el-extensions/pom.xml b/oozie-el-extensions/pom.xml
index 8f7695e..c1d8c51 100644
--- a/oozie-el-extensions/pom.xml
+++ b/oozie-el-extensions/pom.xml
@@ -33,22 +33,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                 </dependency>
             </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/oozie/pom.xml
----------------------------------------------------------------------
diff --git a/oozie/pom.xml b/oozie/pom.xml
index ff79693..f20f061 100644
--- a/oozie/pom.xml
+++ b/oozie/pom.xml
@@ -33,45 +33,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-            </dependencies>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.codehaus.mojo</groupId>
-                        <artifactId>build-helper-maven-plugin</artifactId>
-                        <version>1.5</version>
-                        <executions>
-                            <execution>
-                                <id>add-source</id>
-                                <phase>generate-sources</phase>
-                                <goals>
-                                    <goal>add-source</goal>
-                                </goals>
-                                <configuration>
-                                    <sources>
-                                        <source>${project.basedir}/src/versioned-src/v1/java</source>
-                                    </sources>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                 </dependency>
             </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
index d232aaf..2339284 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
@@ -236,7 +236,7 @@ public abstract class OozieOrchestrationWorkflowBuilder<T extends Entity> extend
         }
 
         for (FileStatus lib : libs) {
-            if (lib.isDir()) {
+            if (lib.isDirectory()) {
                 continue;
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/oozie/src/main/resources/action/feed/replication-action.xml
----------------------------------------------------------------------
diff --git a/oozie/src/main/resources/action/feed/replication-action.xml b/oozie/src/main/resources/action/feed/replication-action.xml
index 9da0396..dffdd92 100644
--- a/oozie/src/main/resources/action/feed/replication-action.xml
+++ b/oozie/src/main/resources/action/feed/replication-action.xml
@@ -25,10 +25,6 @@
                 <name>oozie.launcher.mapreduce.job.user.classpath.first</name>
                 <value>true</value>
             </property>
-            <property> <!-- hadoop 1 parameter -->
-                <name>oozie.launcher.mapreduce.user.classpath.first</name>
-                <value>true</value>
-            </property>
             <property>
                 <name>mapred.job.queue.name</name>
                 <value>${queueName}</value>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
----------------------------------------------------------------------
diff --git a/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java b/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
index 379cf34..68911ce 100644
--- a/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
+++ b/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
@@ -33,6 +33,7 @@ import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.feed.Feed;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.oozie.OozieCoordinatorBuilder;
 import org.apache.falcon.oozie.OozieEntityBuilder;
 import org.apache.falcon.oozie.OozieOrchestrationWorkflowBuilder;
@@ -94,10 +95,10 @@ public class OozieFeedWorkflowBuilderTest extends AbstractTestBase {
         CurrentUser.authenticate("falcon");
 
         srcMiniDFS = EmbeddedCluster.newCluster("cluster1");
-        String srcHdfsUrl = srcMiniDFS.getConf().get("fs.default.name");
+        String srcHdfsUrl = srcMiniDFS.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
 
         trgMiniDFS = EmbeddedCluster.newCluster("cluster2");
-        String trgHdfsUrl = trgMiniDFS.getConf().get("fs.default.name");
+        String trgHdfsUrl = trgMiniDFS.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
 
         cleanupStore();
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
----------------------------------------------------------------------
diff --git a/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java b/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
index d6f9b54..3477258 100644
--- a/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
+++ b/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
@@ -40,6 +40,7 @@ import org.apache.falcon.entity.v0.process.Output;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.entity.v0.process.Validity;
 import org.apache.falcon.entity.v0.process.Workflow;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.oozie.OozieEntityBuilder;
 import org.apache.falcon.oozie.OozieOrchestrationWorkflowBuilder;
 import org.apache.falcon.oozie.bundle.BUNDLEAPP;
@@ -95,7 +96,7 @@ public class OozieProcessWorkflowBuilderTest extends AbstractTestBase {
         CurrentUser.authenticate("falcon");
 
         Configuration conf = EmbeddedCluster.newCluster("testCluster").getConf();
-        hdfsUrl = conf.get("fs.default.name");
+        hdfsUrl = conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
     }
 
     private void storeEntity(EntityType type, String name, String resource) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
----------------------------------------------------------------------
diff --git a/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java b/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
deleted file mode 100644
index 881b0c0..0000000
--- a/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.logging.v1;
-
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpStatus;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.falcon.logging.DefaultTaskLogRetriever;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.DefaultJobHistoryParser;
-import org.apache.hadoop.mapred.JobHistory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-/**
- * Hadoop v1 task log retriever based on job history
- */
-public final class TaskLogRetrieverV1 extends DefaultTaskLogRetriever {
-    private static final Logger LOG = LoggerFactory.getLogger(TaskLogRetrieverV1.class);
-
-    @Override
-    public String getFromHistory(String jobId) throws IOException {
-        Configuration conf = getConf();
-        String file = getHistoryFile(conf, jobId);
-        if (file == null) return null;
-        JobHistory.JobInfo jobInfo = new JobHistory.JobInfo(jobId);
-        DefaultJobHistoryParser.parseJobTasks(file, jobInfo, new Path(file).getFileSystem(conf));
-        LOG.info("History file: {}", file);
-        LOG.debug("Number of tasks in the history file: {}", jobInfo.getAllTasks().size());
-        for (JobHistory.Task task : jobInfo.getAllTasks().values()) {
-            if (task.get(JobHistory.Keys.TASK_TYPE).equals(JobHistory.Values.MAP.name()) &&
-                    task.get(JobHistory.Keys.TASK_STATUS).equals(JobHistory.Values.SUCCESS.name())) {
-                for (JobHistory.TaskAttempt attempt : task.getTaskAttempts().values()) {
-                    if (attempt.get(JobHistory.Keys.TASK_STATUS).equals(JobHistory.Values.SUCCESS.name())) {
-                        return JobHistory.getTaskLogsUrl(attempt);
-                    }
-                }
-            }
-        }
-        LOG.warn("Unable to find successful map task attempt");
-        return null;
-    }
-
-    private String getHistoryFile(Configuration conf, String jobId) throws IOException {
-        String jtAddress = "scheme://" + conf.get("mapred.job.tracker");
-        String jtHttpAddr = "scheme://" + conf.get("mapred.job.tracker.http.address");
-        try {
-            String host = new URI(jtAddress).getHost();
-            int port = new URI(jtHttpAddr).getPort();
-            HttpClient client = new HttpClient();
-            String jobUrl = "http://" + host + ":" + port + "/jobdetails.jsp";
-            GetMethod get = new GetMethod(jobUrl);
-            get.setQueryString("jobid=" + jobId);
-            get.setFollowRedirects(false);
-            int status = client.executeMethod(get);
-            String file = null;
-            if (status == HttpStatus.SC_MOVED_PERMANENTLY || status == HttpStatus.SC_MOVED_TEMPORARILY) {
-                file = get.getResponseHeader("Location").toString();
-                file = file.substring(file.lastIndexOf('=') + 1);
-                file = JobHistory.JobInfo.decodeJobHistoryFileName(file);
-            } else {
-                LOG.warn("JobURL {} for id: {} returned {}", jobUrl, jobId, status);
-            }
-            return file;
-        } catch (URISyntaxException e) {
-            throw new IOException("JT Address: " + jtAddress + ", http Address: " + jtHttpAddr, e);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index effab13..94eaf7a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -118,100 +118,10 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
-
-            <properties>
-                <hadoop.version>1.1.2</hadoop.version>
-            </properties>
-
-           <dependencyManagement>
-                <dependencies>
-		            <dependency>
-		                <groupId>org.apache.hadoop</groupId>
-		                <artifactId>hadoop-core</artifactId>
-		                <version>${hadoop.version}</version>
-                        <scope>provided</scope>
-		                <exclusions>
-                            <exclusion>
-                                <groupId>com.sun.jersey</groupId>
-                                <artifactId>jersey-server</artifactId>
-                            </exclusion>
-                            <exclusion>
-                                <groupId>com.sun.jersey</groupId>
-                                <artifactId>jersey-core</artifactId>
-                            </exclusion>
-                            <exclusion>
-                                <groupId>com.sun.jersey</groupId>
-                                <artifactId>jersey-json</artifactId>
-                            </exclusion>
-		                    <exclusion>
-		                        <groupId>org.eclipse.jdt</groupId>
-		                        <artifactId>core</artifactId>
-		                    </exclusion>
-		                    <exclusion>
-		                        <groupId>tomcat</groupId>
-		                        <artifactId>jasper-runtime</artifactId>
-		                    </exclusion>
-		                    <exclusion>
-		                        <groupId>tomcat</groupId>
-		                        <artifactId>jasper-compiler</artifactId>
-		                    </exclusion>
-		                </exclusions>
-		            </dependency>
-
-		            <dependency>
-		                <groupId>org.apache.hadoop</groupId>
-		                <artifactId>hadoop-test</artifactId>
-		                <version>${hadoop.version}</version>
-                        <scope>test</scope>
-		                <exclusions>
-		                    <exclusion>
-		                        <groupId>org.slf4j</groupId>
-		                        <artifactId>slf4j-api</artifactId>
-		                    </exclusion>
-		                    <exclusion>
-		                        <groupId>org.apache.ftpserver</groupId>
-		                        <artifactId>ftpserver-core</artifactId>
-		                    </exclusion>
-		                    <exclusion>
-		                        <groupId>org.apache.ftpserver</groupId>
-		                        <artifactId>ftpserver-deprecated</artifactId>
-		                    </exclusion>
-		                </exclusions>
-		            </dependency>
-
-		            <dependency>
-		                <groupId>org.apache.hadoop</groupId>
-		                <artifactId>hadoop-client</artifactId>
-		                <version>${hadoop1.version}</version>
-		                <exclusions>
-                            <exclusion>
-                                <groupId>com.sun.jersey</groupId>
-                                <artifactId>jersey-server</artifactId>
-                            </exclusion>
-                            <exclusion>
-                                <groupId>com.sun.jersey</groupId>
-                                <artifactId>jersey-core</artifactId>
-                            </exclusion>
-                            <exclusion>
-                                <groupId>com.sun.jersey</groupId>
-                                <artifactId>jersey-json</artifactId>
-                            </exclusion>
-		                    <exclusion>
-		                        <groupId>org.glassfish</groupId>
-		                        <artifactId>javax.servlet</artifactId>
-		                    </exclusion>
-		                </exclusions>
-		            </dependency>
-                </dependencies>
-          </dependencyManagement>
-        </profile>
-
-        <profile>
-            <id>hadoop-2</id>
             <properties>
                 <hadoop.version>2.5.0</hadoop.version>
             </properties>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/prism/pom.xml
----------------------------------------------------------------------
diff --git a/prism/pom.xml b/prism/pom.xml
index 0fa4124..9072d1b 100644
--- a/prism/pom.xml
+++ b/prism/pom.xml
@@ -34,24 +34,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                    <scope>provided</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                     <scope>provided</scope>
                 </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/replication/pom.xml
----------------------------------------------------------------------
diff --git a/replication/pom.xml b/replication/pom.xml
index 0318abf..9dba461 100644
--- a/replication/pom.xml
+++ b/replication/pom.xml
@@ -33,21 +33,11 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-    
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
                  <dependency>
                     <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/rerun/pom.xml
----------------------------------------------------------------------
diff --git a/rerun/pom.xml b/rerun/pom.xml
index dfb6c59..74d6761 100644
--- a/rerun/pom.xml
+++ b/rerun/pom.xml
@@ -34,23 +34,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                 </dependency>
             </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateData.java
----------------------------------------------------------------------
diff --git a/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateData.java b/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateData.java
index efd51b1..263c18f 100644
--- a/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateData.java
+++ b/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateData.java
@@ -27,6 +27,7 @@ import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.process.Process;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.mockito.MockitoAnnotations;
@@ -79,7 +80,8 @@ public class TestLateData {
         case CLUSTER:
             Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(CLUSTER_XML));
             cluster.setName(name);
-            ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(conf.get("fs.default.name"));
+            ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(
+                    conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
             store.publish(type, cluster);
             break;
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/retention/pom.xml
----------------------------------------------------------------------
diff --git a/retention/pom.xml b/retention/pom.xml
index 92302fb..d9f96a8 100644
--- a/retention/pom.xml
+++ b/retention/pom.xml
@@ -34,26 +34,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-test</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-common</artifactId>
                 </dependency>
                 <dependency>


Mime
View raw message