falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shweth...@apache.org
Subject [2/3] FALCON-123 Improve build speeds in falcon. Contributed by Srikanth Sundarrajan
Date Wed, 22 Jan 2014 10:23:33 GMT
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 36c5f7a..4e8db45 100644
--- a/pom.xml
+++ b/pom.xml
@@ -111,8 +111,9 @@
         <hcatalog.version>0.11.0</hcatalog.version>
         <hadoop-distcp.version>0.9</hadoop-distcp.version>
         <jetty.version>6.1.26</jetty.version>
+        <jersey.version>1.9</jersey.version>
         <internal.maven.repo>file:///tmp/falcontemprepo</internal.maven.repo>
-        <skipCheck>false</skipCheck>
+        <excluded.test.groups>exhaustive</excluded.test.groups>
     </properties>
 
     <profiles>
@@ -134,6 +135,18 @@
 		                <version>${hadoop.version}</version>
                         <scope>provided</scope>
 		                <exclusions>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-server</artifactId>
+                            </exclusion>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-core</artifactId>
+                            </exclusion>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-json</artifactId>
+                            </exclusion>
 		                    <exclusion>
 		                        <groupId>org.eclipse.jdt</groupId>
 		                        <artifactId>core</artifactId>
@@ -175,6 +188,18 @@
 		                <artifactId>hadoop-client</artifactId>
 		                <version>${hadoop1.version}</version>
 		                <exclusions>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-server</artifactId>
+                            </exclusion>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-core</artifactId>
+                            </exclusion>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-json</artifactId>
+                            </exclusion>
 		                    <exclusion>
 		                        <groupId>org.glassfish</groupId>
 		                        <artifactId>javax.servlet</artifactId>
@@ -197,6 +222,18 @@
 		                <artifactId>hadoop-client</artifactId>
 		                <version>${hadoop.version}</version>
 		                <exclusions>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-server</artifactId>
+                            </exclusion>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-core</artifactId>
+                            </exclusion>
+                            <exclusion>
+                                <groupId>com.sun.jersey</groupId>
+                                <artifactId>jersey-json</artifactId>
+                            </exclusion>
 		                    <exclusion>
 		                        <groupId>org.glassfish</groupId>
 		                        <artifactId>javax.servlet</artifactId>
@@ -217,6 +254,12 @@
 
 		            <dependency>
 		                <groupId>org.apache.hadoop</groupId>
+		                <artifactId>hadoop-mapreduce-client-common</artifactId>
+		                <version>${hadoop.version}</version>
+		            </dependency>
+
+		            <dependency>
+		                <groupId>org.apache.hadoop</groupId>
 		                <artifactId>hadoop-hdfs</artifactId>
 		                <version>${hadoop.version}</version>
 		                <classifier>tests</classifier>
@@ -247,7 +290,7 @@
 		                <version>${hadoop.version}</version>
 		            </dependency>
                 </dependencies>
-            </dependencyManagement>
+          </dependencyManagement>
         </profile>
 
         <profile>
@@ -266,6 +309,110 @@
                 </plugins>
             </build>
         </profile>
+
+        <profile>
+            <id>test-patch</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.rat</groupId>
+                        <artifactId>apache-rat-plugin</artifactId>
+                        <configuration>
+                            <useDefaultExcludes>true</useDefaultExcludes>
+                            <useMavenDefaultExcludes>true</useMavenDefaultExcludes>
+                            <useIdeaDefaultExcludes>true</useIdeaDefaultExcludes>
+                            <useEclipseDefaultExcludes>true</useEclipseDefaultExcludes>
+                            <excludeSubProjects>true</excludeSubProjects>
+                            <excludes>
+                                <exclude>*.txt</exclude>
+                                <exclude>.git/**</exclude>
+                                <exclude>**/.idea/**</exclude>
+                                <exclude>**/*.twiki</exclude>
+                                <exclude>**/*.iml</exclude>
+                                <exclude>**/target/**</exclude>
+                                <exclude>**/activemq-data/**</exclude>
+                                <exclude>**/build/**</exclude>
+                                <exclude>**/*.patch</exclude>
+                                <exclude>derby.log</exclude>
+                                <exclude>**/logs/**</exclude>
+                                <exclude>**/.classpath</exclude>
+                                <exclude>**/.project</exclude>
+                                <exclude>**/.settings/**</exclude>
+                                <exclude>**/test-output/**</exclude>
+                                <exclude>**/data.txt</exclude>
+                                <exclude>**/maven-eclipse.xml</exclude>
+                                <exclude>**/.externalToolBuilders/**</exclude>
+                                <exclude>html5-ui/**</exclude>
+                            </excludes>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <id>rat-check</id>
+                                <goals>
+                                    <goal>check</goal>
+                                </goals>
+                                <phase>verify</phase>
+                            </execution>
+                        </executions>
+                    </plugin>
+
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-checkstyle-plugin</artifactId>
+                        <dependencies>
+                            <dependency>
+                                <groupId>org.apache.falcon</groupId>
+                                <artifactId>checkstyle</artifactId>
+                                <version>${project.version}</version>
+                            </dependency>
+                        </dependencies>
+                        <executions>
+                            <execution>
+                                <id>checkstyle-check</id>
+                                <goals>
+                                    <goal>check</goal>
+                                </goals>
+                                <phase>verify</phase>
+                                <configuration>
+                                    <consoleOutput>true</consoleOutput>
+                                    <includeTestSourceDirectory>true</includeTestSourceDirectory>
+                                    <configLocation>falcon/checkstyle.xml</configLocation>
+                                    <failOnViolation>true</failOnViolation>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>findbugs-maven-plugin</artifactId>
+                        <configuration>
+                            <!--debug>true</debug -->
+                            <xmlOutput>true</xmlOutput>
+                            <excludeFilterFile>${basedir}/../checkstyle/src/main/resources/falcon/findbugs-exclude.xml</excludeFilterFile>
+                            <failOnError>true</failOnError>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <id>findbugs-check</id>
+                                <goals>
+                                    <goal>check</goal>
+                                </goals>
+                                <phase>verify</phase>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <!-- Source code metrics: mvn javancss:report or mvn site -->
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>javancss-maven-plugin</artifactId>
+                    </plugin>
+                </plugins>
+            </build>
+            <properties>
+                <excluded.test.groups/>
+            </properties>
+         </profile>
     </profiles>
 
     <modules>
@@ -443,13 +590,13 @@
             <dependency>
                 <groupId>com.sun.jersey</groupId>
                 <artifactId>jersey-client</artifactId>
-                <version>1.8</version>
+                <version>${jersey.version}</version>
             </dependency>
 
             <dependency>
                 <groupId>com.sun.jersey</groupId>
                 <artifactId>jersey-json</artifactId>
-                <version>1.8</version>
+                <version>${jersey.version}</version>
             </dependency>
 
             <dependency>
@@ -466,6 +613,12 @@
 
             <dependency>
                 <groupId>org.apache.falcon</groupId>
+                <artifactId>falcon-hadoop-dependencies</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.apache.falcon</groupId>
                 <artifactId>falcon-metrics</artifactId>
                 <version>${project.version}</version>
             </dependency>
@@ -511,7 +664,7 @@
             <dependency>
                 <groupId>com.sun.jersey</groupId>
                 <artifactId>jersey-server</artifactId>
-                <version>1.8</version>
+                <version>${jersey.version}</version>
             </dependency>
 
             <dependency>
@@ -663,6 +816,12 @@
             </dependency>
 
             <dependency>
+                <groupId>javax.servlet.jsp</groupId>
+                <artifactId>jsp-api</artifactId>
+                <version>2.0</version>
+            </dependency>
+
+            <dependency>
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-auth</artifactId>
                 <version>${hadoop.version}</version>
@@ -845,12 +1004,12 @@
                 </includes>
             </resource>
             <resource>
-              <directory>..</directory>
-              <targetPath>META-INF</targetPath>
-              <includes>
-                <include>LICENSE.txt</include>
-                <include>NOTICE.txt</include>
-              </includes>
+                <directory>..</directory>
+                <targetPath>META-INF</targetPath>
+                <includes>
+                    <include>LICENSE.txt</include>
+                    <include>NOTICE.txt</include>
+                </includes>
             </resource>
         </resources>
         <testResources>
@@ -868,12 +1027,6 @@
                 </plugin>
 
                 <plugin>
-                    <groupId>org.codehaus.mojo</groupId>
-                    <artifactId>build-helper-maven-plugin</artifactId>
-                    <version>1.5</version>
-                </plugin>
-
-                <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-compiler-plugin</artifactId>
                     <version>2.3.2</version>
@@ -891,10 +1044,16 @@
                     <version>2.8.1</version>
                 </plugin>
 
-                <plugin>
+               <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-surefire-plugin</artifactId>
-                    <version>2.14</version>
+                    <version>2.16</version>
+                </plugin>
+
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-failsafe-plugin</artifactId>
+                    <version>2.16</version>
                 </plugin>
 
                 <plugin>
@@ -985,11 +1144,6 @@
             </plugin>
 
             <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>build-helper-maven-plugin</artifactId>
-            </plugin>
-
-            <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-compiler-plugin</artifactId>
                 <configuration>
@@ -1004,7 +1158,7 @@
                 <executions>
                     <execution>
                         <id>attach-sources</id>
-                        <phase>package</phase>
+                        <phase>site</phase>
                         <goals>
                             <goal>jar-no-fork</goal>
                         </goals>
@@ -1018,19 +1172,13 @@
                 <executions>
                     <execution>
                         <id>attach-javadocs</id>
-                        <phase>package</phase>
+                        <phase>site</phase>
                         <goals>
                             <goal>javadoc</goal>
                             <goal>jar</goal>
                         </goals>
-                        <configuration>
-                            <skip>${skipCheck}</skip>
-                        </configuration>
-                    </execution>
+                     </execution>
                 </executions>
-                <configuration>
-                    <skip>${skipCheck}</skip>
-                </configuration>
             </plugin>
 
             <plugin>
@@ -1047,122 +1195,41 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-surefire-plugin</artifactId>
+                <version>2.16</version>
                 <configuration>
                     <redirectTestOutputToFile>true</redirectTestOutputToFile>
                     <forkMode>always</forkMode>
                     <argLine>-Djava.awt.headless=true -Djava.security.krb5.realm= -Djava.security.krb5.kdc=</argLine>
+                    <excludedGroups>${excluded.test.groups}</excludedGroups>
                 </configuration>
             </plugin>
 
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-deploy-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>deploy</id>
-                        <phase>deploy</phase>
-                        <goals>
-                            <goal>deploy</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.rat</groupId>
-                <artifactId>apache-rat-plugin</artifactId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <version>2.16</version>
                 <configuration>
-                    <useDefaultExcludes>true</useDefaultExcludes>
-                    <useMavenDefaultExcludes>true</useMavenDefaultExcludes>
-                    <useIdeaDefaultExcludes>true</useIdeaDefaultExcludes>
-                    <useEclipseDefaultExcludes>true</useEclipseDefaultExcludes>
-                    <excludeSubProjects>true</excludeSubProjects>
-                    <excludes>
-                        <exclude>*.txt</exclude>
-                        <exclude>.git/**</exclude>
-                        <exclude>.idea/**</exclude>
-                        <exclude>**/*.twiki</exclude>
-                        <exclude>**/*.iml</exclude>
-                        <exclude>**/target/**</exclude>
-                        <exclude>**/activemq-data/**</exclude>
-                        <exclude>**/build/**</exclude>
-                        <exclude>**/*.patch</exclude>
-                        <exclude>derby.log</exclude>
-                        <exclude>**/logs/**</exclude>
-                        <exclude>**/.classpath</exclude>
-                        <exclude>**/.project</exclude>
-                        <exclude>**/.settings/**</exclude>
-                        <exclude>**/test-output/**</exclude>
-                        <exclude>**/data.txt</exclude>
-                        <exclude>**/maven-eclipse.xml</exclude>
-                        <exclude>**/.externalToolBuilders/**</exclude>
-                        <exclude>html5-ui/**</exclude>
-                    </excludes>
+                    <redirectTestOutputToFile>true</redirectTestOutputToFile>
+                    <forkMode>always</forkMode>
+                    <argLine>-Djava.security.krb5.realm= -Djava.security.krb5.kdc=
+                        -Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}</argLine>
+                    <excludedGroups>${excluded.test.groups}</excludedGroups>
                 </configuration>
                 <executions>
                     <execution>
-                        <id>rat-check</id>
-                        <goals>
-                            <goal>check</goal>
-                        </goals>
-                        <phase>verify</phase>
-                    </execution>
-                </executions>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-checkstyle-plugin</artifactId>
-                <dependencies>
-                    <dependency>
-                        <groupId>org.apache.falcon</groupId>
-                        <artifactId>checkstyle</artifactId>
-                        <version>${project.version}</version>
-                    </dependency>
-                </dependencies>
-                <executions>
-                    <execution>
-                        <id>checkstyle-check</id>
+                        <id>integration-test</id>
                         <goals>
-                            <goal>check</goal>
+                            <goal>integration-test</goal>
                         </goals>
-                        <phase>verify</phase>
-                        <configuration>
-                            <consoleOutput>true</consoleOutput>
-                            <includeTestSourceDirectory>true</includeTestSourceDirectory>
-                            <configLocation>falcon/checkstyle.xml</configLocation>
-                            <failOnViolation>true</failOnViolation>
-                            <skip>${skipCheck}</skip>
-                        </configuration>
                     </execution>
-                </executions>
-            </plugin>
-
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>findbugs-maven-plugin</artifactId>
-                <configuration>
-                    <!--debug>true</debug -->
-                    <xmlOutput>true</xmlOutput>
-                    <excludeFilterFile>${basedir}/../checkstyle/src/main/resources/falcon/findbugs-exclude.xml</excludeFilterFile>
-                    <failOnError>true</failOnError>
-                    <skip>${skipCheck}</skip>
-                </configuration>
-                <executions>
                     <execution>
-                        <id>findbugs-check</id>
+                        <id>verify</id>
                         <goals>
-                            <goal>check</goal>
+                            <goal>verify</goal>
                         </goals>
-                        <phase>verify</phase>
                     </execution>
                 </executions>
             </plugin>
-            <!-- Source code metrics: mvn javancss:report or mvn site -->
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>javancss-maven-plugin</artifactId>
-            </plugin>
 
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
----------------------------------------------------------------------
diff --git a/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java b/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
index 794e585..61ddbdc 100644
--- a/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
+++ b/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
@@ -95,12 +95,12 @@ public class OozieProcessMapperTest extends AbstractTestBase {
         Cluster cluster = store.get(EntityType.CLUSTER, "corp");
         ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(hdfsUrl);
         ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).setEndpoint("thrift://localhost:49083");
-        fs = new Path(hdfsUrl).getFileSystem(new Configuration());
+        fs = new Path(hdfsUrl).getFileSystem(EmbeddedCluster.newConfiguration());
         fs.create(new Path(ClusterHelper.getLocation(cluster, "working"), "libext/PROCESS/ext.jar")).close();
 
         Process process = store.get(EntityType.PROCESS, "clicksummary");
         Path wfpath = new Path(process.getWorkflow().getPath());
-        assert new Path(hdfsUrl).getFileSystem(new Configuration()).mkdirs(wfpath);
+        assert new Path(hdfsUrl).getFileSystem(EmbeddedCluster.newConfiguration()).mkdirs(wfpath);
     }
 
     public void testDefCoordMap(Process process, COORDINATORAPP coord) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/test-util/pom.xml
----------------------------------------------------------------------
diff --git a/test-util/pom.xml b/test-util/pom.xml
index 6bd4129..4fe72f6 100644
--- a/test-util/pom.xml
+++ b/test-util/pom.xml
@@ -90,6 +90,11 @@
         </dependency>
 
         <dependency>
+            <groupId>org.apache.falcon</groupId>
+            <artifactId>falcon-hadoop-dependencies</artifactId>
+        </dependency>
+
+        <dependency>
             <groupId>org.testng</groupId>
             <artifactId>testng</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
----------------------------------------------------------------------
diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
index c443e05..2b55407 100644
--- a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
+++ b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
@@ -18,23 +18,21 @@
 
 package org.apache.falcon.cluster.util;
 
-import java.io.File;
-import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
-
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interface;
 import org.apache.falcon.entity.v0.cluster.Interfaces;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.cluster.Location;
 import org.apache.falcon.entity.v0.cluster.Locations;
+import org.apache.falcon.hadoop.JailedFileSystem;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.log4j.Logger;
 
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+
 /**
  * A utility class that doles out an embedded Hadoop cluster with DFS and/or MR.
  */
@@ -45,16 +43,26 @@ public class EmbeddedCluster {
     protected EmbeddedCluster() {
     }
 
-    private Configuration conf = new Configuration();
-    private MiniDFSCluster dfsCluster;
+    //private MiniDFSCluster dfsCluster;
+    protected Configuration conf = newConfiguration();
     protected Cluster clusterEntity;
 
     public Configuration getConf() {
         return conf;
     }
 
+    public static Configuration newConfiguration() {
+        Configuration configuration = new Configuration();
+        configuration.set("fs.jail.impl", JailedFileSystem.class.getName());
+        return configuration;
+    }
+
     public static EmbeddedCluster newCluster(final String name) throws Exception {
-        return createClusterAsUser(name);
+        return createClusterAsUser(name, false);
+    }
+
+    public static EmbeddedCluster newCluster(final String name, boolean global) throws Exception {
+        return createClusterAsUser(name, global);
     }
 
     public static EmbeddedCluster newCluster(final String name,
@@ -63,30 +71,16 @@ public class EmbeddedCluster {
         return hdfsUser.doAs(new PrivilegedExceptionAction<EmbeddedCluster>() {
             @Override
             public EmbeddedCluster run() throws Exception {
-                return createClusterAsUser(name);
+                return createClusterAsUser(name, false);
             }
         });
     }
 
-    private static EmbeddedCluster createClusterAsUser(String name) throws IOException {
+    private static EmbeddedCluster createClusterAsUser(String name, boolean global) throws IOException {
         EmbeddedCluster cluster = new EmbeddedCluster();
-        File target = new File("webapp/target");
-        if (!target.exists()) {
-            target = new File("target");
-            System.setProperty("test.build.data", "target/" + name + "/data");
-        } else {
-            System.setProperty("test.build.data", "webapp/target/" + name + "/data");
-        }
-        cluster.conf.set("hadoop.tmp.dir", target.getAbsolutePath());
-        cluster.conf.set("hadoop.log.dir", new File(target, "tmp").getAbsolutePath());
-        cluster.conf.set("hadoop.proxyuser.oozie.groups", "*");
-        cluster.conf.set("hadoop.proxyuser.oozie.hosts", "127.0.0.1");
-        cluster.conf.set("hadoop.proxyuser.hdfs.groups", "*");
-        cluster.conf.set("hadoop.proxyuser.hdfs.hosts", "127.0.0.1");
-        cluster.conf.set("mapreduce.jobtracker.kerberos.principal", "");
-        cluster.conf.set("dfs.namenode.kerberos.principal", "");
-        cluster.dfsCluster = new MiniDFSCluster(cluster.conf, 1, true, null);
-        ProxyUsers.refreshSuperUserGroupsConfiguration(cluster.conf);
+        cluster.conf.set("jail.base", System.getProperty("hadoop.tmp.dir",
+                cluster.conf.get("hadoop.tmp.dir", "/tmp")));
+        cluster.conf.set("fs.default.name", "jail://" + (global ? "global" : name) + ":00");
         String hdfsUrl = cluster.conf.get("fs.default.name");
         LOG.info("Cluster Namenode = " + hdfsUrl);
         cluster.buildClusterObject(name);
@@ -97,7 +91,7 @@ public class EmbeddedCluster {
         return FileSystem.get(conf);
     }
 
-    private void buildClusterObject(String name) {
+    protected void buildClusterObject(String name) {
         clusterEntity = new Cluster();
         clusterEntity.setName(name);
         clusterEntity.setColo("local");
@@ -105,17 +99,16 @@ public class EmbeddedCluster {
 
         Interfaces interfaces = new Interfaces();
         interfaces.getInterfaces().add(newInterface(Interfacetype.WORKFLOW,
-                "http://localhost:11000/oozie", "0.1"));
+                "http://localhost:41000/oozie", "0.1"));
         String fsUrl = conf.get("fs.default.name");
         interfaces.getInterfaces().add(newInterface(Interfacetype.READONLY, fsUrl, "0.1"));
         interfaces.getInterfaces().add(newInterface(Interfacetype.WRITE, fsUrl, "0.1"));
         interfaces.getInterfaces().add(newInterface(Interfacetype.EXECUTE,
-                conf.get("mapred.job.tracker"), "0.1"));
+                "localhost:41021", "0.1"));
         interfaces.getInterfaces().add(
                 newInterface(Interfacetype.REGISTRY, "thrift://localhost:49083", "0.1"));
         interfaces.getInterfaces().add(
                 newInterface(Interfacetype.MESSAGING, "vm://localhost", "0.1"));
-
         clusterEntity.setInterfaces(interfaces);
 
         Location location = new Location();
@@ -125,7 +118,7 @@ public class EmbeddedCluster {
         locs.getLocations().add(location);
         location = new Location();
         location.setName("working");
-        location.setPath("/projects/falcon/working");
+        location.setPath("/project/falcon/working");
         locs.getLocations().add(location);
         clusterEntity.setLocations(locs);
     }
@@ -140,7 +133,7 @@ public class EmbeddedCluster {
     }
 
     public void shutdown() {
-        dfsCluster.shutdown();
+        //dfsCluster.shutdown();
     }
 
     public Cluster getCluster() {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/test-util/src/main/resources/core-site.xml
----------------------------------------------------------------------
diff --git a/test-util/src/main/resources/core-site.xml b/test-util/src/main/resources/core-site.xml
new file mode 100644
index 0000000..da00644
--- /dev/null
+++ b/test-util/src/main/resources/core-site.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<configuration>
+    <property>
+        <name>fs.jail.impl</name>
+        <value>org.apache.falcon.hadoop.JailedFileSystem</value>
+    </property>
+
+    <property>
+        <name>mapreduce.framework.name</name>
+        <value>unittests</value>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/pom.xml
----------------------------------------------------------------------
diff --git a/webapp/pom.xml b/webapp/pom.xml
index 0c2d844..8c37409 100644
--- a/webapp/pom.xml
+++ b/webapp/pom.xml
@@ -143,6 +143,10 @@
                     <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-core</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-test</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
 
@@ -218,19 +222,17 @@
                 <executions>
                     <execution>
                         <id>uber-javadocs</id>
-                        <phase>package</phase>
+                        <phase>site</phase>
                         <goals>
                             <goal>javadoc</goal>
                             <goal>jar</goal>
                         </goals>
                         <configuration>
-                            <skip>${skipCheck}</skip>
                             <includeTransitiveDependencySources>false</includeTransitiveDependencySources>
                             <includeDependencySources>true</includeDependencySources>
                             <dependencySourceIncludes>
                                 <dependencySourceInclude>org.apache.falcon:*</dependencySourceInclude>
                             </dependencySourceIncludes>
-                            <skip>${skipCheck}</skip>
                         </configuration>
                     </execution>
                 </executions>
@@ -317,6 +319,23 @@
                                     <outputDirectory>${project.build.directory}/libext</outputDirectory>
                                     <destFileName>kahadb.jar</destFileName>
                                 </artifactItem>
+                                <artifactItem>
+                                    <groupId>org.apache.falcon</groupId>
+                                    <artifactId>falcon-hadoop-dependencies</artifactId>
+                                    <version>${project.version}</version>
+                                    <overWrite>true</overWrite>
+                                    <outputDirectory>${project.build.directory}/falcon-webapp-${project.version}/WEB-INF/lib</outputDirectory>
+                                    <destFileName>falcon-hadoop-dependencies-${project.version}.jar</destFileName>
+                                </artifactItem>
+                                <artifactItem>
+                                    <groupId>org.apache.pig</groupId>
+                                    <artifactId>pig</artifactId>
+                                    <version>0.11.1</version>
+                                    <type>jar</type>
+                                    <overWrite>false</overWrite>
+                                    <outputDirectory>${project.build.directory}/sharelib</outputDirectory>
+                                    <destFileName>pig.jar</destFileName>
+                                </artifactItem>
                             </artifactItems>
                         </configuration>
                     </execution>
@@ -346,31 +365,6 @@
             </plugin>
 
             <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-failsafe-plugin</artifactId>
-                <version>2.15</version>
-                <configuration>
-                    <redirectTestOutputToFile>true</redirectTestOutputToFile>
-                    <forkMode>always</forkMode>
-                    <argLine>-Djava.security.krb5.realm= -Djava.security.krb5.kdc=</argLine>                    
-                </configuration>
-                <executions>
-                    <execution>
-                        <id>integration-test</id>
-                        <goals>
-                            <goal>integration-test</goal>
-                        </goals>
-                    </execution>
-                    <execution>
-                        <id>verify</id>
-                        <goals>
-                            <goal>verify</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-
-            <plugin>
                 <groupId>org.mortbay.jetty</groupId>
                 <artifactId>maven-jetty-plugin</artifactId>
                 <version>${jetty.version}</version>
@@ -434,7 +428,6 @@
                             <goal>run</goal>
                         </goals>
                         <configuration>
-                            <skip>${skipCheck}</skip>
                             <daemon>true</daemon>
                         </configuration>
                     </execution>
@@ -447,6 +440,7 @@
                     </execution>
                 </executions>
             </plugin>
+
         </plugins>
     </build>
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/conf/oozie/conf/action-conf/hive.xml
----------------------------------------------------------------------
diff --git a/webapp/src/conf/oozie/conf/action-conf/hive.xml b/webapp/src/conf/oozie/conf/action-conf/hive.xml
index e5aef7d..e734089 100644
--- a/webapp/src/conf/oozie/conf/action-conf/hive.xml
+++ b/webapp/src/conf/oozie/conf/action-conf/hive.xml
@@ -30,7 +30,7 @@
 
     <property>
         <name>fs.default.name</name>
-        <value>hdfs://localhost:41020</value>
+        <value>jail://global:00</value>
     </property>
 
     <!-- Forcing the creation of the db dir under target so mvn clean will clean up -->

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml
----------------------------------------------------------------------
diff --git a/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml b/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml
index 35078c7..bc8fa99 100644
--- a/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml
+++ b/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml
@@ -36,7 +36,7 @@
 
     <property>
         <name>mapreduce.framework.name</name>
-        <value>yarn</value>
+        <value>unittests</value>
     </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/conf/oozie/conf/oozie-site.xml
----------------------------------------------------------------------
diff --git a/webapp/src/conf/oozie/conf/oozie-site.xml b/webapp/src/conf/oozie/conf/oozie-site.xml
index 48408ba..e5f404a 100644
--- a/webapp/src/conf/oozie/conf/oozie-site.xml
+++ b/webapp/src/conf/oozie/conf/oozie-site.xml
@@ -473,6 +473,15 @@
         </description>
     </property>
 
+    <property>
+   		<name>oozie.service.HadoopAccessorService.supported.filesystems</name>
+   		<value>hdfs,hftp,webhdfs,jail</value>
+   		<description>
+   			Enlist the different filesystems supported for federation. If wildcard "*" is specified,
+   			then ALL file schemes will be allowed.
+   		</description>
+   	</property>
+
     <!-- Proxyuser Configuration -->
     <property>
         <name>oozie.service.ProxyUserService.proxyuser.${user.name}.hosts</name>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
index c4d6671..9909140 100644
--- a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
+++ b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
@@ -48,7 +48,7 @@ public class HiveCatalogServiceIT {
     private static final String DATABASE_NAME = "falcon_db";
     private static final String TABLE_NAME = "falcon_table";
     private static final String EXTERNAL_TABLE_NAME = "falcon_external";
-    private static final String EXTERNAL_TABLE_LOCATION = "hdfs://localhost:41020/falcon/staging/falcon_external";
+    private static final String EXTERNAL_TABLE_LOCATION = "jail://global:00/falcon/staging/falcon_external";
 
     private HiveCatalogService hiveCatalogService;
     private HCatClient client;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
index 4730728..0767a76 100644
--- a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
+++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
@@ -35,6 +35,7 @@ import java.util.Map;
  *
  * todo: Refactor both the classes to move this methods to helper;
  */
+@Test(groups = {"exhaustive"})
 public class FalconCLIIT {
 
     private InMemoryWriter stream = new InMemoryWriter(System.out);
@@ -47,7 +48,6 @@ public class FalconCLIIT {
         TestContext.prepare();
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testSubmitEntityValidCommands() throws Exception {
 
         FalconCLI.OUT.set(stream);
@@ -60,7 +60,7 @@ public class FalconCLIIT {
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type cluster -file " + filePath));
-        context.setCluster(filePath);
+        context.setCluster(overlay.get("cluster"));
         Assert.assertEquals(stream.buffer.toString().trim(),
                 "default/Submit successful (cluster) " + context.getClusterName());
 
@@ -90,17 +90,14 @@ public class FalconCLIIT {
                         + overlay.get("processName"));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testListWithEmptyConfigStore() throws Exception {
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -list -type process "));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testSubmitAndScheduleEntityValidCommands() throws Exception {
 
-        Thread.sleep(5000);
         String filePath;
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -109,7 +106,7 @@ public class FalconCLIIT {
         Assert.assertEquals(-1,
                 executeWithURL("entity -submitAndSchedule -type cluster -file "
                         + filePath));
-        context.setCluster(filePath);
+        context.setCluster(overlay.get("cluster"));
 
         filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
@@ -131,11 +128,8 @@ public class FalconCLIIT {
         Assert.assertEquals(0,
                 executeWithURL("entity -submitAndSchedule -type process -file "
                         + filePath));
-
-        Thread.sleep(5000);
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testValidateValidCommands() throws Exception {
 
         String filePath;
@@ -146,11 +140,11 @@ public class FalconCLIIT {
         Assert.assertEquals(0,
                 executeWithURL("entity -validate -type cluster -file "
                         + filePath));
-        context.setCluster(filePath);
+        context.setCluster(overlay.get("cluster"));
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type cluster -file " + filePath));
-        context.setCluster(filePath);
+        context.setCluster(overlay.get("cluster"));
 
         filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
@@ -168,13 +162,12 @@ public class FalconCLIIT {
         Assert.assertEquals(0,
                 executeWithURL("entity -validate -type process -file "
                         + filePath));
+
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type process -file " + filePath));
-
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testDefinitionEntityValidCommands() throws Exception {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -200,7 +193,6 @@ public class FalconCLIIT {
 
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testScheduleEntityValidCommands() throws Exception {
 
         TestContext context = new TestContext();
@@ -222,10 +214,8 @@ public class FalconCLIIT {
 
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testSuspendResumeStatusEntityValidCommands() throws Exception {
 
-        Thread.sleep(5000);
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
         submitTestFiles(context, overlay);
@@ -291,15 +281,12 @@ public class FalconCLIIT {
                 executeWithURL("entity -status -type process -name "
                         + overlay.get("processName")));
 
-        Thread.sleep(5000);
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testSubCommandPresence() throws Exception {
         Assert.assertEquals(-1, executeWithURL("entity -type cluster "));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testDeleteEntityValidCommands() throws Exception {
 
         TestContext context = new TestContext();
@@ -338,7 +325,6 @@ public class FalconCLIIT {
 
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testInvalidCLIEntitycommands() throws Exception {
 
         TestContext context = new TestContext();
@@ -351,7 +337,6 @@ public class FalconCLIIT {
                 executeWithURL("entity -schedule -type feed -file " + "name"));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testInstanceRunningAndStatusCommands() throws Exception {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -381,9 +366,7 @@ public class FalconCLIIT {
                         + " -start " + START_INSTANCE));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testInstanceSuspendAndResume() throws Exception {
-        Thread.sleep(5000);
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
         submitTestFiles(context, overlay);
@@ -402,12 +385,10 @@ public class FalconCLIIT {
                 executeWithURL("instance -resume -type process -name "
                         + overlay.get("processName")
                         + " -start " + START_INSTANCE + " -end " + START_INSTANCE));
-        Thread.sleep(5000);
     }
 
     private static final String START_INSTANCE = "2012-04-20T00:00Z";
 
-    @Test(enabled = TEST_ENABLED)
     public void testInstanceKillAndRerun() throws Exception {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -432,7 +413,6 @@ public class FalconCLIIT {
                         + createTempJobPropertiesFile()));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testContinue() throws Exception {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -456,7 +436,6 @@ public class FalconCLIIT {
                         + " -start " + START_INSTANCE));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testInvalidCLIInstanceCommands() throws Exception {
         // no command
         Assert.assertEquals(-1, executeWithURL(" -kill -type process -name "
@@ -475,7 +454,6 @@ public class FalconCLIIT {
 
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testFalconURL() throws Exception {
         Assert.assertEquals(-1, new FalconCLI()
                 .run(("instance -status -type process -name " + "processName"
@@ -491,7 +469,6 @@ public class FalconCLIIT {
 
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testClientProperties() throws Exception {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -510,7 +487,6 @@ public class FalconCLIIT {
 
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testGetVersion() throws Exception {
         Assert.assertEquals(0,
                 new FalconCLI().run("admin -version".split("\\s")));
@@ -519,7 +495,6 @@ public class FalconCLIIT {
                 new FalconCLI().run("admin -stack".split("\\s")));
     }
 
-    @Test(enabled = TEST_ENABLED)
     public void testInstanceGetLogs() throws Exception {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -536,7 +511,6 @@ public class FalconCLIIT {
 
     }
 
-
     private int executeWithURL(String command) throws Exception {
         return new FalconCLI()
                 .run((command + " -url " + TestContext.BASE_URL).split("\\s+"));
@@ -554,14 +528,14 @@ public class FalconCLIIT {
         return tmpFile.getAbsolutePath();
     }
 
-    public void submitTestFiles(TestContext context, Map<String, String> overlay) throws Exception {
+    private void submitTestFiles(TestContext context, Map<String, String> overlay) throws Exception {
 
         String filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(),
                 overlay);
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type cluster -file " + filePath));
-        context.setCluster(filePath);
+        context.setCluster(overlay.get("cluster"));
 
         filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java b/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java
new file mode 100644
index 0000000..55f240f
--- /dev/null
+++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.cli;
+
+import org.apache.falcon.resource.TestContext;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.util.Map;
+
+/**
+ * Smoke Test for Falcon CLI.
+ */
+public class FalconCLISmokeIT {
+
+    private static final String START_INSTANCE = "2012-04-20T00:00Z";
+
+    @BeforeClass
+    public void prepare() throws Exception {
+        TestContext.prepare();
+    }
+
+    @Test
+    public void testSubmitAndScheduleEntityValidCommands() throws Exception {
+
+        String filePath;
+        TestContext context = new TestContext();
+        Map<String, String> overlay = context.getUniqueOverlay();
+
+        filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay);
+        Assert.assertEquals(-1,
+                executeWithURL("entity -submitAndSchedule -type cluster -file "
+                        + filePath));
+        context.setCluster(overlay.get("cluster"));
+
+        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        Assert.assertEquals(0,
+                executeWithURL("entity -submitAndSchedule -type feed -file "
+                        + filePath));
+        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        Assert.assertEquals(0,
+                executeWithURL("entity -submitAndSchedule -type feed -file "
+                        + filePath));
+        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        Assert.assertEquals(0,
+                executeWithURL("entity -submit -type feed -file " + filePath));
+
+        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        Assert.assertEquals(0,
+                executeWithURL("entity -submit -type feed -file " + filePath));
+
+        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        Assert.assertEquals(0,
+                executeWithURL("entity -validate -type process -file "
+                        + filePath));
+
+        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        Assert.assertEquals(0,
+                executeWithURL("entity -submitAndSchedule -type process -file "
+                        + filePath));
+
+        context.waitForProcessWFtoStart();
+
+        Assert.assertEquals(0,
+                executeWithURL("entity -definition -type cluster -name "
+                        + overlay.get("cluster")));
+
+        Assert.assertEquals(0,
+                executeWithURL("instance -status -type feed -name "
+                        + overlay.get("outputFeedName")
+                        + " -start " + START_INSTANCE));
+
+        Assert.assertEquals(0,
+                executeWithURL("instance -running -type process -name "
+                        + overlay.get("processName")));
+
+    }
+
+    private int executeWithURL(String command) throws Exception {
+        return new FalconCLI()
+                .run((command + " -url " + TestContext.BASE_URL).split("\\s+"));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
index 9b672f4..37226e2 100644
--- a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
+++ b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
@@ -71,7 +71,7 @@ public class TableStorageFeedEvictorIT {
     private static final String DATABASE_NAME = "falcon_db";
     private static final String TABLE_NAME = "clicks";
     private static final String EXTERNAL_TABLE_NAME = "clicks_external";
-    private static final String STORAGE_URL = "hdfs://localhost:41020";
+    private static final String STORAGE_URL = "jail://global:00";
     private static final String EXTERNAL_TABLE_LOCATION = STORAGE_URL + "/falcon/staging/clicks_external/";
 
     private final InMemoryWriter stream = new InMemoryWriter(System.out);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java b/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java
deleted file mode 100644
index e3cd914..0000000
--- a/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.falcon.logging;
-
-import org.apache.falcon.FalconException;
-import org.apache.falcon.cluster.util.EmbeddedCluster;
-import org.apache.falcon.cluster.util.StandAloneCluster;
-import org.apache.falcon.entity.ClusterHelper;
-import org.apache.falcon.entity.EntityUtil;
-import org.apache.falcon.entity.parser.ProcessEntityParser;
-import org.apache.falcon.entity.store.ConfigurationStore;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.process.Process;
-import org.apache.falcon.resource.TestContext;
-import org.apache.falcon.security.CurrentUser;
-import org.apache.falcon.util.StartupProperties;
-import org.apache.falcon.workflow.engine.OozieWorkflowEngine;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.oozie.client.OozieClient;
-import org.apache.oozie.client.WorkflowJob;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test for LogMover.
- * Requires Oozie to be running on localhost.
- */
-@Test
-public class LogMoverIT {
-
-    private static final ConfigurationStore STORE = ConfigurationStore.get();
-    private static final String PROCESS_NAME = "testProcess" + System.currentTimeMillis();
-    private static EmbeddedCluster testCluster = null;
-    private static Process testProcess = null;
-    private static FileSystem fs;
-
-    @BeforeClass
-    public void setup() throws Exception {
-        Map<String, String> overlay = new HashMap<String, String>();
-        overlay.put("cluster", "testCluster");
-        TestContext context = new TestContext();
-        String file = context.
-                overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
-        testCluster = StandAloneCluster.newCluster(file);
-        STORE.publish(EntityType.CLUSTER, testCluster.getCluster());
-/*
-        new File("target/libs").mkdirs();
-        StartupProperties.get().setProperty("system.lib.location", "target/libs");
-        SharedLibraryHostingService listener = new SharedLibraryHostingService();
-        listener.onAdd(testCluster.getCluster());
-*/
-        fs = FileSystem.get(testCluster.getConf());
-        fs.mkdirs(new Path("/workflow/lib"));
-
-        fs.copyFromLocalFile(
-                new Path(LogMoverIT.class.getResource(
-                        "/org/apache/falcon/logging/workflow.xml").toURI()),
-                new Path("/workflow"));
-        fs.copyFromLocalFile(
-                new Path(LogMoverIT.class.getResource(
-                        "/org/apache/falcon/logging/java-test.jar").toURI()),
-                new Path("/workflow/lib"));
-
-        testProcess = new ProcessEntityParser().parse(LogMoverIT.class
-                .getResourceAsStream("/org/apache/falcon/logging/process.xml"));
-        testProcess.setName(PROCESS_NAME);
-    }
-
-    @AfterClass
-    public void tearDown() {
-        testCluster.shutdown();
-    }
-
-    @Test (enabled = false)
-    public void testLogMover() throws Exception {
-        CurrentUser.authenticate(System.getProperty("user.name"));
-        OozieWorkflowEngine engine = new OozieWorkflowEngine();
-        String path = StartupProperties.get().getProperty("system.lib.location");
-        if (!new File("target/libs").exists()) {
-            Assert.assertTrue(new File("target/libs").mkdirs());
-        }
-        StartupProperties.get().setProperty("system.lib.location", "target/libs");
-        engine.schedule(testProcess);
-        StartupProperties.get().setProperty("system.lib.location", path);
-
-        OozieClient client = new OozieClient(
-                ClusterHelper.getOozieUrl(testCluster.getCluster()));
-        List<WorkflowJob> jobs;
-        while (true) {
-            jobs = client.getJobsInfo(OozieClient.FILTER_NAME + "="
-                    + "FALCON_PROCESS_DEFAULT_" + PROCESS_NAME);
-            if (jobs.size() > 0) {
-                break;
-            } else {
-                Thread.sleep(1000);
-            }
-        }
-
-        WorkflowJob job = jobs.get(0);
-        while (true) {
-            if (!(job.getStatus() == WorkflowJob.Status.RUNNING || job
-                    .getStatus() == WorkflowJob.Status.PREP)) {
-                break;
-            } else {
-                Thread.sleep(1000);
-                job = client.getJobInfo(job.getId());
-            }
-        }
-
-        Path oozieLogPath = new Path(getLogPath(),
-                "job-2010-01-01-01-00/000/oozie.log");
-        Assert.assertTrue(fs.exists(oozieLogPath));
-
-        testLogMoverWithNextRunId(job.getId());
-        testLogMoverWithNextRunIdWithEngine(job.getId());
-    }
-
-    private Path getLogPath() throws FalconException {
-        Path stagingPath = EntityUtil.getLogPath(testCluster.getCluster(), testProcess);
-        return new Path(ClusterHelper.getStorageUrl(testCluster
-                .getCluster()), stagingPath);
-    }
-
-    private void testLogMoverWithNextRunId(String jobId) throws Exception {
-        LogMover.main(new String[]{"-workflowEngineUrl",
-                                   ClusterHelper.getOozieUrl(testCluster.getCluster()),
-                                   "-subflowId", jobId + "@user-workflow", "-runId", "1",
-                                   "-logDir", getLogPath().toString() + "/job-2010-01-01-01-00",
-                                   "-status", "SUCCEEDED", "-entityType", "process", });
-
-        Path oozieLogPath = new Path(getLogPath(),
-                "job-2010-01-01-01-00/001/oozie.log");
-        Assert.assertTrue(fs.exists(oozieLogPath));
-    }
-
-    private void testLogMoverWithNextRunIdWithEngine(String jobId) throws Exception {
-        LogMover.main(new String[]{"-workflowEngineUrl",
-                                   ClusterHelper.getOozieUrl(testCluster.getCluster()),
-                                   "-subflowId", jobId + "@user-workflow", "-runId", "1",
-                                   "-logDir", getLogPath().toString() + "/job-2010-01-01-01-00",
-                                   "-status", "SUCCEEDED", "-entityType", "process",
-                                   "-userWorkflowEngine", "oozie", });
-
-        Path oozieLogPath = new Path(getLogPath(),
-                "job-2010-01-01-01-00/001/oozie.log");
-        Assert.assertTrue(fs.exists(oozieLogPath));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java b/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java
deleted file mode 100644
index 4c3ce97..0000000
--- a/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.falcon.logging;
-
-import org.apache.falcon.FalconException;
-import org.apache.falcon.cluster.util.EmbeddedCluster;
-import org.apache.falcon.cluster.util.StandAloneCluster;
-import org.apache.falcon.entity.parser.ProcessEntityParser;
-import org.apache.falcon.entity.store.ConfigurationStore;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.process.Process;
-import org.apache.falcon.resource.InstancesResult.Instance;
-import org.apache.falcon.resource.InstancesResult.InstanceAction;
-import org.apache.falcon.resource.InstancesResult.WorkflowStatus;
-import org.apache.falcon.resource.TestContext;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Test for LogProvider.
- */
-public class LogProviderIT {
-
-    private static final ConfigurationStore STORE = ConfigurationStore.get();
-    private static EmbeddedCluster testCluster = null;
-    private static Process testProcess = null;
-    private static final String PROCESS_NAME = "testProcess";
-    private static FileSystem fs;
-    private Instance instance;
-
-    @BeforeClass
-    public void setup() throws Exception {
-        Map<String, String> overlay = new HashMap<String, String>();
-        overlay.put("cluster", "logProviderTest");
-        overlay.put("colo", "gs");
-        TestContext context = new TestContext();
-        String file = context.
-                overlayParametersOverTemplate(context.CLUSTER_TEMPLATE, overlay);
-        testCluster = StandAloneCluster.newCluster(file);
-        cleanupStore();
-        STORE.publish(EntityType.CLUSTER, testCluster.getCluster());
-        fs = FileSystem.get(testCluster.getConf());
-        Path instanceLogPath = new Path(
-                "/projects/falcon/staging/falcon/workflows/process/" + PROCESS_NAME
-                        + "/logs/job-2010-01-01-01-00/000");
-        fs.mkdirs(instanceLogPath);
-        fs.createNewFile(new Path(instanceLogPath, "oozie.log"));
-        fs.createNewFile(new Path(instanceLogPath, "pigAction_SUCCEEDED.log"));
-        fs.createNewFile(new Path(instanceLogPath, "mr_Action_FAILED.log"));
-        fs.createNewFile(new Path(instanceLogPath, "mr_Action2_SUCCEEDED.log"));
-
-        fs.mkdirs(new Path("/projects/falcon/staging/falcon/workflows/process/"
-                + PROCESS_NAME + "/logs/job-2010-01-01-01-00/001"));
-        fs.mkdirs(new Path("/projects/falcon/staging/falcon/workflows/process/"
-                + PROCESS_NAME + "/logs/job-2010-01-01-01-00/002"));
-        Path run3 = new Path("/projects/falcon/staging/falcon/workflows/process/"
-                + PROCESS_NAME + "/logs/job-2010-01-01-01-00/003");
-        fs.mkdirs(run3);
-        fs.createNewFile(new Path(run3, "oozie.log"));
-
-        testProcess = new ProcessEntityParser().parse(LogProviderIT.class
-                .getResourceAsStream("/org/apache/falcon/logging/process.xml"));
-        testProcess.setName(PROCESS_NAME);
-        STORE.publish(EntityType.PROCESS, testProcess);
-    }
-
-    @BeforeMethod
-    public void setInstance() {
-        instance = new Instance();
-        instance.status = WorkflowStatus.SUCCEEDED;
-        instance.instance = "2010-01-01T01:00Z";
-        instance.cluster = "logProviderTest";
-        instance.logFile = "http://localhost:41000/oozie/wflog";
-    }
-
-    private void cleanupStore() throws FalconException {
-        for (EntityType type : EntityType.values()) {
-            Collection<String> entities = STORE.getEntities(type);
-            for (String entity : entities) {
-                STORE.remove(type, entity);
-            }
-        }
-    }
-
-    @Test
-    public void testLogProviderWithValidRunId() throws FalconException {
-        LogProvider provider = new LogProvider();
-        Instance instanceWithLog = provider.populateLogUrls(testProcess,
-                instance, "0");
-        Assert.assertEquals(
-                instance.logFile,
-                "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/"
-                        + "job-2010-01-01-01-00/000/oozie.log");
-
-        InstanceAction action = instanceWithLog.actions[0];
-        Assert.assertEquals(action.action, "mr_Action2");
-        Assert.assertEquals(action.status, "SUCCEEDED");
-        Assert.assertEquals(
-                action.logFile,
-                "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/"
-                        + "job-2010-01-01-01-00/000/mr_Action2_SUCCEEDED.log");
-
-        action = instanceWithLog.actions[1];
-        Assert.assertEquals(action.action, "mr_Action");
-        Assert.assertEquals(action.status, "FAILED");
-        Assert.assertEquals(
-                action.logFile,
-                "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/"
-                        + "job-2010-01-01-01-00/000/mr_Action_FAILED.log");
-    }
-
-    @Test
-    public void testLogProviderWithInvalidRunId() throws FalconException {
-        LogProvider provider = new LogProvider();
-        provider.populateLogUrls(testProcess, instance, "x");
-        Assert.assertEquals(instance.logFile,
-                "http://localhost:41000/oozie/wflog");
-    }
-
-    @Test
-    public void testLogProviderWithUnavailableRunId() throws FalconException {
-        LogProvider provider = new LogProvider();
-        instance.logFile = null;
-        provider.populateLogUrls(testProcess, instance, "7");
-        Assert.assertEquals(instance.logFile, "-");
-    }
-
-    @Test
-    public void testLogProviderWithEmptyRunId() throws FalconException {
-        LogProvider provider = new LogProvider();
-        instance.logFile = null;
-        provider.populateLogUrls(testProcess, instance, null);
-        Assert.assertEquals(
-                instance.logFile,
-                "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/"
-                        + "job-2010-01-01-01-00/003/oozie.log");
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java b/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
index 58ae4ba..1f4e9e8 100644
--- a/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
+++ b/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
@@ -58,8 +58,8 @@ public class PigProcessIT {
 
         overlay = context.getUniqueOverlay();
 
-        String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
-        context.setCluster(filePath);
+        String filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        context.setCluster(overlay.get("cluster"));
 
         final Cluster cluster = context.getCluster().getCluster();
         final String storageUrl = ClusterHelper.getStorageUrl(cluster);
@@ -88,7 +88,7 @@ public class PigProcessIT {
     public void testSubmitAndSchedulePigProcess() throws Exception {
         overlay.put("cluster", "primary-cluster");
 
-        String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
+        String filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
         // context.setCluster(filePath);
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
index aa059bd..1ceaabf 100644
--- a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
+++ b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
@@ -17,10 +17,15 @@
  */
 package org.apache.falcon.resource;
 
-import java.io.*;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
 import java.util.regex.Pattern;
 
 import javax.servlet.ServletInputStream;
@@ -28,11 +33,16 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import javax.xml.bind.JAXBException;
 
+import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.api.client.WebResource;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.SchemaHelper;
-import org.apache.falcon.entity.v0.feed.*;
+import org.apache.falcon.entity.v0.feed.Cluster;
+import org.apache.falcon.entity.v0.feed.Feed;
+import org.apache.falcon.entity.v0.feed.Location;
+import org.apache.falcon.entity.v0.feed.LocationType;
+import org.apache.falcon.entity.v0.feed.Locations;
 import org.apache.falcon.entity.v0.process.Input;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.entity.v0.process.Property;
@@ -44,7 +54,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
 import org.apache.oozie.client.BundleJob;
-import org.apache.oozie.client.CoordinatorJob;
 import org.apache.oozie.client.Job;
 import org.apache.oozie.client.Job.Status;
 import org.apache.oozie.client.OozieClient;
@@ -53,13 +62,17 @@ import org.testng.annotations.AfterMethod;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
 
-import com.sun.jersey.api.client.ClientResponse;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringReader;
 
 /**
  * Test class for Entity REST APIs.
  *
  * Tests should be enabled only in local environments as they need running instance of the web server.
  */
+@Test(groups = {"exhaustive"})
 public class EntityManagerJerseyIT {
 
     private static final int ONE_HR = 2 * 24 * 60 * 60 * 1000;
@@ -69,7 +82,7 @@ public class EntityManagerJerseyIT {
         TestContext.prepare();
     }
 
-    private void assertLibs(FileSystem fs, Path path) throws IOException {
+    static void assertLibs(FileSystem fs, Path path) throws IOException {
         FileStatus[] libs = fs.listStatus(path);
         Assert.assertNotNull(libs);
         Assert.assertEquals(libs.length, 1);
@@ -102,7 +115,7 @@ public class EntityManagerJerseyIT {
         String tmpFileName = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Feed feed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(new File(tmpFileName));
         Location location = new Location();
-        location.setPath("fsext://localhost:41020/falcon/test/input/${YEAR}/${MONTH}/${DAY}/${HOUR}");
+        location.setPath("fsext://global:00/falcon/test/input/${YEAR}/${MONTH}/${DAY}/${HOUR}");
         location.setType(LocationType.DATA);
         Cluster cluster = feed.getClusters().getClusters().get(0);
         cluster.setLocations(new Locations());
@@ -138,7 +151,8 @@ public class EntityManagerJerseyIT {
         Map<String, String> overlay = context.getUniqueOverlay();
         String tmpFileName = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
-        updateEndtime(process);
+        Validity processValidity = process.getClusters().getClusters().get(0).getValidity();
+        processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000));
         File tmpFile = context.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
         context.scheduleProcess(tmpFile.getAbsolutePath(), overlay);
@@ -148,11 +162,22 @@ public class EntityManagerJerseyIT {
         Assert.assertEquals(bundles.size(), 1);
         Assert.assertEquals(bundles.get(0).getUser(), TestContext.REMOTE_USER);
 
-        Feed feed = (Feed) getDefinition(context, EntityType.FEED, context.outputFeedName);
+        ClientResponse response = context.service.path("api/entities/definition/feed/"
+                + context.outputFeedName).header(
+                "Remote-User", TestContext.REMOTE_USER)
+                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
+        Feed feed = (Feed) EntityType.FEED.getUnmarshaller()
+                .unmarshal(new StringReader(response.getEntity(String.class)));
 
         //change output feed path and update feed as another user
         feed.getLocations().getLocations().get(0).setPath("/falcon/test/output2/${YEAR}/${MONTH}/${DAY}");
-        update(context, feed);
+        tmpFile = context.getTempFile();
+        EntityType.FEED.getMarshaller().marshal(feed, tmpFile);
+        response = context.service.path("api/entities/update/feed/"
+                + context.outputFeedName).header("Remote-User",
+                TestContext.REMOTE_USER).accept(MediaType.TEXT_XML)
+                .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath()));
+        context.assertSuccessful(response);
 
         bundles = context.getBundles();
         Assert.assertEquals(bundles.size(), 2);
@@ -176,7 +201,6 @@ public class EntityManagerJerseyIT {
         contexts.remove();
     }
 
-    @Test(enabled = false)
     public void testOptionalInput() throws Exception {
         TestContext context = newContext();
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -200,7 +224,6 @@ public class EntityManagerJerseyIT {
         context.waitForWorkflowStart(context.processName);
     }
 
-    @Test
     public void testProcessDeleteAndSchedule() throws Exception {
         //Submit process with invalid property so that coord submit fails and bundle goes to failed state
         TestContext context = newContext();
@@ -214,7 +237,7 @@ public class EntityManagerJerseyIT {
         File tmpFile = context.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
         context.scheduleProcess(tmpFile.getAbsolutePath(), overlay);
-        context.waitForBundleStart(Status.FAILED);
+        context.waitForBundleStart(Status.FAILED, Status.KILLED);
 
         //Delete and re-submit the process with correct workflow
         ClientResponse clientRepsonse = context.service.path("api/entities/delete/process/"
@@ -267,12 +290,18 @@ public class EntityManagerJerseyIT {
         OozieClient ozClient = context.getOozieClient();
         String coordId = ozClient.getBundleJobInfo(bundles.get(0).getId()).getCoordinators().get(0).getId();
 
-        Process process = (Process) getDefinition(context, EntityType.PROCESS, context.processName);
+        ClientResponse response = context.service.path("api/entities/definition/process/"
+                + context.processName).header(
+                "Remote-User", TestContext.REMOTE_USER)
+                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
+        Process process = (Process) EntityType.PROCESS.getUnmarshaller()
+                .unmarshal(new StringReader(response.getEntity(String.class)));
+
         String feed3 = "f3" + System.currentTimeMillis();
         Map<String, String> overlay = new HashMap<String, String>();
         overlay.put("inputFeedName", feed3);
         overlay.put("cluster", context.clusterName);
-        ClientResponse response = context.submitToFalcon(TestContext.FEED_TEMPLATE1, overlay, EntityType.FEED);
+        response = context.submitToFalcon(TestContext.FEED_TEMPLATE1, overlay, EntityType.FEED);
         context.assertSuccessful(response);
 
         Input input = new Input();
@@ -282,34 +311,48 @@ public class EntityManagerJerseyIT {
         input.setEnd("today(20,20)");
         process.getInputs().getInputs().add(input);
 
-        Date endTime = getEndTime();
-        updateEndtime(process);
-        update(context, process, endTime);
+        Validity processValidity = process.getClusters().getClusters().get(0).getValidity();
+        processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000));
+        File tmpFile = context.getTempFile();
+        EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
+        response = context.service.path("api/entities/update/process/"
+                + context.processName).header("Remote-User",
+                TestContext.REMOTE_USER).accept(MediaType.TEXT_XML)
+                .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath()));
+        context.assertSuccessful(response);
 
         //Assert that update creates new bundle and old coord is running
         bundles = context.getBundles();
         Assert.assertEquals(bundles.size(), 2);
-        CoordinatorJob coord = ozClient.getCoordJobInfo(coordId);
-        Assert.assertEquals(coord.getStatus(), Status.RUNNING);
-        Assert.assertEquals(coord.getEndTime(), endTime);
+        Assert.assertEquals(ozClient.getCoordJobInfo(coordId).getStatus(), Status.RUNNING);
     }
 
-    @Test
     public void testProcessEndtimeUpdate() throws Exception {
         TestContext context = newContext();
         context.scheduleProcess();
         context.waitForBundleStart(Job.Status.RUNNING);
 
-        Process process = (Process) getDefinition(context, EntityType.PROCESS, context.processName);
-        updateEndtime(process);
-        update(context, process);
+        ClientResponse response = context.service.path("api/entities/definition/process/"
+                + context.processName).header(
+                "Remote-User", TestContext.REMOTE_USER)
+                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
+        Process process = (Process) EntityType.PROCESS.getUnmarshaller()
+                .unmarshal(new StringReader(response.getEntity(String.class)));
+
+        Validity processValidity = process.getClusters().getClusters().get(0).getValidity();
+        processValidity.setEnd(new Date(new Date().getTime() + 60 * 60 * 1000));
+        File tmpFile = context.getTempFile();
+        EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
+        response = context.service.path("api/entities/update/process/" + context.processName).header("Remote-User",
+                TestContext.REMOTE_USER).accept(MediaType.TEXT_XML)
+                .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath()));
+        context.assertSuccessful(response);
 
         //Assert that update does not create new bundle
         List<BundleJob> bundles = context.getBundles();
         Assert.assertEquals(bundles.size(), 1);
     }
 
-    @Test
     public void testStatus() throws Exception {
         TestContext context = newContext();
         ClientResponse response;
@@ -332,7 +375,6 @@ public class EntityManagerJerseyIT {
 
     }
 
-    @Test
     public void testIdempotentSubmit() throws Exception {
         TestContext context = newContext();
         ClientResponse response;
@@ -345,7 +387,6 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(response);
     }
 
-    @Test
     public void testNotFoundStatus() {
         TestContext context = newContext();
         ClientResponse response;
@@ -358,7 +399,6 @@ public class EntityManagerJerseyIT {
         Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
     }
 
-    @Test
     public void testVersion() {
         TestContext context = newContext();
         ClientResponse response;
@@ -379,7 +419,6 @@ public class EntityManagerJerseyIT {
                 "No deploy.mode found in /api/admin/version");
     }
 
-    @Test
     public void testValidate() {
         TestContext context = newContext();
         ServletInputStream stream = context.getServletInputStream(getClass().
@@ -394,7 +433,6 @@ public class EntityManagerJerseyIT {
         context.assertFailure(clientRepsonse);
     }
 
-    @Test
     public void testClusterValidate() throws Exception {
         TestContext context = newContext();
         ClientResponse clientRepsonse;
@@ -410,7 +448,6 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(clientRepsonse);
     }
 
-    @Test
     public void testClusterSubmitScheduleSuspendResumeDelete() throws Exception {
         TestContext context = newContext();
         ClientResponse clientRepsonse;
@@ -448,7 +485,6 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(clientRepsonse);
     }
 
-    @Test
     public void testSubmit() throws Exception {
         TestContext context = newContext();
         ClientResponse response;
@@ -467,7 +503,6 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(response);
     }
 
-    @Test
     public void testGetEntityDefinition() throws Exception {
         TestContext context = newContext();
         ClientResponse response;
@@ -494,7 +529,6 @@ public class EntityManagerJerseyIT {
         }
     }
 
-    @Test
     public void testInvalidGetEntityDefinition() {
         TestContext context = newContext();
         ClientResponse clientRepsonse = context.service
@@ -504,7 +538,6 @@ public class EntityManagerJerseyIT {
         context.assertFailure(clientRepsonse);
     }
 
-    @Test
     public void testScheduleSuspendResume() throws Exception {
         TestContext context = newContext();
         context.scheduleProcess();
@@ -522,7 +555,6 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(clientRepsonse);
     }
 
-    @Test(enabled = true)
     public void testFeedSchedule() throws Exception {
         TestContext context = newContext();
         ClientResponse response;
@@ -543,7 +575,7 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(clientRepsonse);
     }
 
-    private List<Path> createTestData(TestContext context) throws Exception {
+    static List<Path> createTestData(TestContext context) throws Exception {
         List<Path> list = new ArrayList<Path>();
         FileSystem fs = context.cluster.getFileSystem();
         fs.mkdirs(new Path("/user/guest"));
@@ -593,7 +625,6 @@ public class EntityManagerJerseyIT {
         return list;
     }
 
-    @Test
     public void testDeleteDataSet() throws Exception {
         TestContext context = newContext();
         ClientResponse response;
@@ -612,7 +643,6 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(response);
     }
 
-    @Test
     public void testDelete() throws Exception {
         TestContext context = newContext();
         ClientResponse response;


Mime
View raw message