falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From venkat...@apache.org
Subject [1/5] FALCON-757 Discontinue support for Hadoop-1.x. Contributed by Sowmya Ramesh
Date Tue, 07 Oct 2014 00:15:43 GMT
Repository: incubator-falcon
Updated Branches:
  refs/heads/master a00b2ef9f -> 74b206319


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
----------------------------------------------------------------------
diff --git a/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java b/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
index b2f4821..970d381 100644
--- a/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
+++ b/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
@@ -22,6 +22,7 @@ import org.apache.falcon.Pair;
 import org.apache.falcon.cluster.util.EmbeddedCluster;
 import org.apache.falcon.entity.Storage;
 import org.apache.falcon.entity.v0.feed.LocationType;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -59,7 +60,7 @@ public class FeedEvictorTest {
     @BeforeClass
     public void start() throws Exception {
         cluster = EmbeddedCluster.newCluster("test");
-        hdfsUrl = cluster.getConf().get("fs.default.name");
+        hdfsUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
         FeedEvictor.OUT.set(stream);
     }
 
@@ -104,7 +105,7 @@ public class FeedEvictorTest {
 
             Pair<List<String>, List<String>> pair =
                     createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS,
"/data");
-            final String storageUrl = cluster.getConf().get("fs.default.name");
+            final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
             String dataPath = LocationType.DATA.name() + "="
                     + storageUrl + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-01-00.csv";
@@ -204,7 +205,7 @@ public class FeedEvictorTest {
 
             Pair<List<String>, List<String>> pair =
                     createTestData("feed2", "yyyyMMddHH/'more'/yyyy", 5, TimeUnit.HOURS,
"/data");
-            final String storageUrl = cluster.getConf().get("fs.default.name");
+            final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
             String dataPath = LocationType.DATA.name() + "="
                     + storageUrl + "/data/YYYY/feed2/mmHH/dd/MM/?{YEAR}?{MONTH}?{DAY}?{HOUR}/more/?{YEAR}";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-02-00.csv";
@@ -241,12 +242,14 @@ public class FeedEvictorTest {
             Pair<List<String>, List<String>> pair = createTestData("/data");
             FeedEvictor.main(new String[] {
                 "-feedBasePath", LocationType.DATA.name() + "="
-                    + cluster.getConf().get("fs.default.name") + "/data/YYYY/feed3/dd/MM/?{MONTH}/more/?{HOUR}",
+                    + cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
+                    + "/data/YYYY/feed3/dd/MM/?{MONTH}/more/?{HOUR}",
                 "-retentionType", "instance",
                 "-retentionLimit", "months(5)",
                 "-timeZone", "UTC",
                 "-frequency", "hourly",
-                "-logFile", conf.get("fs.default.name") + "/falcon/staging/feed/2012-01-01-04-00",
+                "-logFile", conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
+                + "/falcon/staging/feed/2012-01-01-04-00",
                 "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
             });
             Assert.assertEquals("instances=NULL", stream.getBuffer());
@@ -255,7 +258,8 @@ public class FeedEvictorTest {
             String dataPath = "/data/YYYY/feed4/dd/MM/02/more/hello";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-02-00.csv";
             FeedEvictor.main(new String[] {
-                "-feedBasePath", LocationType.DATA.name() + "=" + cluster.getConf().get("fs.default.name")
+ dataPath,
+                "-feedBasePath", LocationType.DATA.name() + "="
+                    + cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY) + dataPath,
                 "-retentionType", "instance",
                 "-retentionLimit", "hours(5)",
                 "-timeZone", "UTC",
@@ -285,7 +289,7 @@ public class FeedEvictorTest {
             createTestData("/stats");
             createTestData("/meta");
             createTestData("/tmp");
-            final String storageUrl = cluster.getConf().get("fs.default.name");
+            final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
             FeedEvictor.main(new String[] {
                 "-feedBasePath", getFeedBasePath(LocationType.DATA, storageUrl)
                 + "#" + getFeedBasePath(LocationType.STATS, storageUrl)
@@ -295,14 +299,16 @@ public class FeedEvictorTest {
                 "-retentionLimit", "months(5)",
                 "-timeZone", "UTC",
                 "-frequency", "hourly",
-                "-logFile", conf.get("fs.default.name") + "/falcon/staging/feed/2012-01-01-04-00",
-                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+                "-logFile", conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
+                + "/falcon/staging/feed/2012-01-01-04-00", "-falconFeedStorageType",
+                Storage.TYPE.FILESYSTEM.name(),
             });
             Assert.assertEquals("instances=NULL", stream.getBuffer());
 
             stream.clear();
             String dataPath = LocationType.DATA.name() + "="
-                    + cluster.getConf().get("fs.default.name") + "/data/YYYY/feed4/dd/MM/02/more/hello";
+                    + cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
+                    + "/data/YYYY/feed4/dd/MM/02/more/hello";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-02-00.csv";
             FeedEvictor.main(new String[]{
                 "-feedBasePath", dataPath,
@@ -336,7 +342,7 @@ public class FeedEvictorTest {
             createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/stats");
             createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/meta");
 
-            final String storageUrl = cluster.getConf().get("fs.default.name");
+            final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
             String dataPath =
                     "DATA=" + storageUrl + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
                     + "#STATS=" + storageUrl + "/stats/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
@@ -373,7 +379,7 @@ public class FeedEvictorTest {
 
             Pair<List<String>, List<String>> pair = generateInstances(fs,
"feed1",
                 "yyyy/MM/dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data", false);
-            final String storageUrl = cluster.getConf().get("fs.default.name");
+            final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
             String dataPath = LocationType.DATA.name() + "="
                 + storageUrl + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}/?{MONTH}/?{DAY}/more/?{YEAR}";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-01-00.csv";

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/test-util/pom.xml
----------------------------------------------------------------------
diff --git a/test-util/pom.xml b/test-util/pom.xml
index 4ade8be..4f388b1 100644
--- a/test-util/pom.xml
+++ b/test-util/pom.xml
@@ -33,28 +33,13 @@
 
     <profiles>
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-test</artifactId>
-                    <scope>compile</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                 </dependency>
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
----------------------------------------------------------------------
diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
index 29c2ec4..b1e518d 100644
--- a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
+++ b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
@@ -84,9 +84,9 @@ public class EmbeddedCluster {
     private static EmbeddedCluster createClusterAsUser(String name, boolean global, String
colo,
                                                        String tags) throws IOException {
         EmbeddedCluster cluster = new EmbeddedCluster();
-        cluster.conf.set("fs.default.name", "jail://" + (global ? "global" : name) + ":00");
+        cluster.conf.set("fs.defaultFS", "jail://" + (global ? "global" : name) + ":00");
 
-        String hdfsUrl = cluster.conf.get("fs.default.name");
+        String hdfsUrl = cluster.conf.get("fs.defaultFS");
         LOG.info("Cluster Namenode = {}", hdfsUrl);
         cluster.buildClusterObject(name, colo, tags);
         return cluster;
@@ -109,7 +109,7 @@ public class EmbeddedCluster {
         Interfaces interfaces = new Interfaces();
         interfaces.getInterfaces().add(newInterface(Interfacetype.WORKFLOW,
                 "http://localhost:41000/oozie", "0.1"));
-        String fsUrl = conf.get("fs.default.name");
+        String fsUrl = conf.get("fs.defaultFS");
         interfaces.getInterfaces().add(newInterface(Interfacetype.READONLY, fsUrl, "0.1"));
         interfaces.getInterfaces().add(newInterface(Interfacetype.WRITE, fsUrl, "0.1"));
         interfaces.getInterfaces().add(newInterface(Interfacetype.EXECUTE,

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
----------------------------------------------------------------------
diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
b/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
index e4b5592..0ce0eec 100644
--- a/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
+++ b/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
@@ -43,12 +43,12 @@ public final class StandAloneCluster extends EmbeddedCluster {
 
         for (Interface inter : cluster.getCluster().getInterfaces().getInterfaces()) {
             if (inter.getType() == Interfacetype.WRITE) {
-                cluster.getConf().set("fs.default.name", inter.getEndpoint());
+                cluster.getConf().set("fs.defaultFS", inter.getEndpoint());
                 break;
             }
         }
 
-        LOG.info("Cluster Namenode = {}", cluster.getConf().get("fs.default.name"));
+        LOG.info("Cluster Namenode = {}", cluster.getConf().get("fs.defaultFS"));
         return cluster;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b9c7ffd5/webapp/pom.xml
----------------------------------------------------------------------
diff --git a/webapp/pom.xml b/webapp/pom.xml
index d96f7e3..ed5df55 100644
--- a/webapp/pom.xml
+++ b/webapp/pom.xml
@@ -53,26 +53,14 @@
                 </plugins>
             </build>
         </profile>
-
         <profile>
-            <id>hadoop-1</id>
+            <id>hadoop-2</id>
             <activation>
                 <activeByDefault>true</activeByDefault>
             </activation>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                    <scope>test</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <profile>
-            <id>hadoop-2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-client</artifactId>
                     <scope>test</scope>
                 </dependency>


Mime
View raw message