Return-Path: X-Original-To: apmail-falcon-commits-archive@minotaur.apache.org Delivered-To: apmail-falcon-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 9ABAA10EE9 for ; Wed, 22 Jan 2014 10:24:10 +0000 (UTC) Received: (qmail 3013 invoked by uid 500); 22 Jan 2014 10:24:09 -0000 Delivered-To: apmail-falcon-commits-archive@falcon.apache.org Received: (qmail 2978 invoked by uid 500); 22 Jan 2014 10:24:09 -0000 Mailing-List: contact commits-help@falcon.incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@falcon.incubator.apache.org Delivered-To: mailing list commits@falcon.incubator.apache.org Received: (qmail 2971 invoked by uid 99); 22 Jan 2014 10:24:08 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 22 Jan 2014 10:24:08 +0000 X-ASF-Spam-Status: No, hits=-2000.5 required=5.0 tests=ALL_TRUSTED,RP_MATCHES_RCVD,T_FILL_THIS_FORM_SHORT X-Spam-Check-By: apache.org Received: from [140.211.11.3] (HELO mail.apache.org) (140.211.11.3) by apache.org (qpsmtpd/0.29) with SMTP; Wed, 22 Jan 2014 10:23:57 +0000 Received: (qmail 2365 invoked by uid 99); 22 Jan 2014 10:23:33 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 22 Jan 2014 10:23:33 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id CF0E08187AC; Wed, 22 Jan 2014 10:23:32 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: shwethags@apache.org To: commits@falcon.incubator.apache.org Date: Wed, 22 Jan 2014 10:23:33 -0000 Message-Id: <56a9fb01e9604399a2cb110c11cc63cc@git.apache.org> In-Reply-To: <7b60865792454d6bb31705ea2466e120@git.apache.org> References: <7b60865792454d6bb31705ea2466e120@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [2/3] FALCON-123 Improve build speeds in falcon. Contributed by Srikanth Sundarrajan X-Virus-Checked: Checked by ClamAV on apache.org http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index 36c5f7a..4e8db45 100644 --- a/pom.xml +++ b/pom.xml @@ -111,8 +111,9 @@ 0.11.0 0.9 6.1.26 + 1.9 file:///tmp/falcontemprepo - false + exhaustive @@ -134,6 +135,18 @@ ${hadoop.version} provided + + com.sun.jersey + jersey-server + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-json + org.eclipse.jdt core @@ -175,6 +188,18 @@ hadoop-client ${hadoop1.version} + + com.sun.jersey + jersey-server + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-json + org.glassfish javax.servlet @@ -197,6 +222,18 @@ hadoop-client ${hadoop.version} + + com.sun.jersey + jersey-server + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-json + org.glassfish javax.servlet @@ -217,6 +254,12 @@ org.apache.hadoop + hadoop-mapreduce-client-common + ${hadoop.version} + + + + org.apache.hadoop hadoop-hdfs ${hadoop.version} tests @@ -247,7 +290,7 @@ ${hadoop.version} - + @@ -266,6 +309,110 @@ + + + test-patch + + + + org.apache.rat + apache-rat-plugin + + true + true + true + true + true + + *.txt + .git/** + **/.idea/** + **/*.twiki + **/*.iml + **/target/** + **/activemq-data/** + **/build/** + **/*.patch + derby.log + **/logs/** + **/.classpath + **/.project + **/.settings/** + **/test-output/** + **/data.txt + **/maven-eclipse.xml + **/.externalToolBuilders/** + html5-ui/** + + + + + rat-check + + check + + verify + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + org.apache.falcon + checkstyle + ${project.version} + + + + + checkstyle-check + + check + + verify + + true + true + falcon/checkstyle.xml + true + + + + + + + org.codehaus.mojo + findbugs-maven-plugin + + + true + ${basedir}/../checkstyle/src/main/resources/falcon/findbugs-exclude.xml + true + + + + findbugs-check + + check + + verify + + + + + + org.codehaus.mojo + javancss-maven-plugin + + + + + + + @@ -443,13 +590,13 @@ com.sun.jersey jersey-client - 1.8 + ${jersey.version} com.sun.jersey jersey-json - 1.8 + ${jersey.version} @@ -466,6 +613,12 @@ org.apache.falcon + falcon-hadoop-dependencies + ${project.version} + + + + org.apache.falcon falcon-metrics ${project.version} @@ -511,7 +664,7 @@ com.sun.jersey jersey-server - 1.8 + ${jersey.version} @@ -663,6 +816,12 @@ + javax.servlet.jsp + jsp-api + 2.0 + + + org.apache.hadoop hadoop-auth ${hadoop.version} @@ -845,12 +1004,12 @@ - .. - META-INF - - LICENSE.txt - NOTICE.txt - + .. + META-INF + + LICENSE.txt + NOTICE.txt + @@ -868,12 +1027,6 @@ - org.codehaus.mojo - build-helper-maven-plugin - 1.5 - - - org.apache.maven.plugins maven-compiler-plugin 2.3.2 @@ -891,10 +1044,16 @@ 2.8.1 - + org.apache.maven.plugins maven-surefire-plugin - 2.14 + 2.16 + + + + org.apache.maven.plugins + maven-failsafe-plugin + 2.16 @@ -985,11 +1144,6 @@ - org.codehaus.mojo - build-helper-maven-plugin - - - org.apache.maven.plugins maven-compiler-plugin @@ -1004,7 +1158,7 @@ attach-sources - package + site jar-no-fork @@ -1018,19 +1172,13 @@ attach-javadocs - package + site javadoc jar - - ${skipCheck} - - + - - ${skipCheck} - @@ -1047,122 +1195,41 @@ org.apache.maven.plugins maven-surefire-plugin + 2.16 true always -Djava.awt.headless=true -Djava.security.krb5.realm= -Djava.security.krb5.kdc= + ${excluded.test.groups} org.apache.maven.plugins - maven-deploy-plugin - - - deploy - deploy - - deploy - - - - - - - org.apache.rat - apache-rat-plugin + maven-failsafe-plugin + 2.16 - true - true - true - true - true - - *.txt - .git/** - .idea/** - **/*.twiki - **/*.iml - **/target/** - **/activemq-data/** - **/build/** - **/*.patch - derby.log - **/logs/** - **/.classpath - **/.project - **/.settings/** - **/test-output/** - **/data.txt - **/maven-eclipse.xml - **/.externalToolBuilders/** - html5-ui/** - + true + always + -Djava.security.krb5.realm= -Djava.security.krb5.kdc= + -Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name} + ${excluded.test.groups} - rat-check - - check - - verify - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - - - org.apache.falcon - checkstyle - ${project.version} - - - - - checkstyle-check + integration-test - check + integration-test - verify - - true - true - falcon/checkstyle.xml - true - ${skipCheck} - - - - - - org.codehaus.mojo - findbugs-maven-plugin - - - true - ${basedir}/../checkstyle/src/main/resources/falcon/findbugs-exclude.xml - true - ${skipCheck} - - - findbugs-check + verify - check + verify - verify - - - org.codehaus.mojo - javancss-maven-plugin - org.apache.maven.plugins http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java ---------------------------------------------------------------------- diff --git a/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java b/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java index 794e585..61ddbdc 100644 --- a/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java +++ b/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java @@ -95,12 +95,12 @@ public class OozieProcessMapperTest extends AbstractTestBase { Cluster cluster = store.get(EntityType.CLUSTER, "corp"); ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(hdfsUrl); ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).setEndpoint("thrift://localhost:49083"); - fs = new Path(hdfsUrl).getFileSystem(new Configuration()); + fs = new Path(hdfsUrl).getFileSystem(EmbeddedCluster.newConfiguration()); fs.create(new Path(ClusterHelper.getLocation(cluster, "working"), "libext/PROCESS/ext.jar")).close(); Process process = store.get(EntityType.PROCESS, "clicksummary"); Path wfpath = new Path(process.getWorkflow().getPath()); - assert new Path(hdfsUrl).getFileSystem(new Configuration()).mkdirs(wfpath); + assert new Path(hdfsUrl).getFileSystem(EmbeddedCluster.newConfiguration()).mkdirs(wfpath); } public void testDefCoordMap(Process process, COORDINATORAPP coord) throws Exception { http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/test-util/pom.xml ---------------------------------------------------------------------- diff --git a/test-util/pom.xml b/test-util/pom.xml index 6bd4129..4fe72f6 100644 --- a/test-util/pom.xml +++ b/test-util/pom.xml @@ -90,6 +90,11 @@ + org.apache.falcon + falcon-hadoop-dependencies + + + org.testng testng http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java ---------------------------------------------------------------------- diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java index c443e05..2b55407 100644 --- a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java +++ b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java @@ -18,23 +18,21 @@ package org.apache.falcon.cluster.util; -import java.io.File; -import java.io.IOException; -import java.security.PrivilegedExceptionAction; - import org.apache.falcon.entity.v0.cluster.Cluster; import org.apache.falcon.entity.v0.cluster.Interface; import org.apache.falcon.entity.v0.cluster.Interfaces; import org.apache.falcon.entity.v0.cluster.Interfacetype; import org.apache.falcon.entity.v0.cluster.Location; import org.apache.falcon.entity.v0.cluster.Locations; +import org.apache.falcon.hadoop.JailedFileSystem; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.log4j.Logger; +import java.io.IOException; +import java.security.PrivilegedExceptionAction; + /** * A utility class that doles out an embedded Hadoop cluster with DFS and/or MR. */ @@ -45,16 +43,26 @@ public class EmbeddedCluster { protected EmbeddedCluster() { } - private Configuration conf = new Configuration(); - private MiniDFSCluster dfsCluster; + //private MiniDFSCluster dfsCluster; + protected Configuration conf = newConfiguration(); protected Cluster clusterEntity; public Configuration getConf() { return conf; } + public static Configuration newConfiguration() { + Configuration configuration = new Configuration(); + configuration.set("fs.jail.impl", JailedFileSystem.class.getName()); + return configuration; + } + public static EmbeddedCluster newCluster(final String name) throws Exception { - return createClusterAsUser(name); + return createClusterAsUser(name, false); + } + + public static EmbeddedCluster newCluster(final String name, boolean global) throws Exception { + return createClusterAsUser(name, global); } public static EmbeddedCluster newCluster(final String name, @@ -63,30 +71,16 @@ public class EmbeddedCluster { return hdfsUser.doAs(new PrivilegedExceptionAction() { @Override public EmbeddedCluster run() throws Exception { - return createClusterAsUser(name); + return createClusterAsUser(name, false); } }); } - private static EmbeddedCluster createClusterAsUser(String name) throws IOException { + private static EmbeddedCluster createClusterAsUser(String name, boolean global) throws IOException { EmbeddedCluster cluster = new EmbeddedCluster(); - File target = new File("webapp/target"); - if (!target.exists()) { - target = new File("target"); - System.setProperty("test.build.data", "target/" + name + "/data"); - } else { - System.setProperty("test.build.data", "webapp/target/" + name + "/data"); - } - cluster.conf.set("hadoop.tmp.dir", target.getAbsolutePath()); - cluster.conf.set("hadoop.log.dir", new File(target, "tmp").getAbsolutePath()); - cluster.conf.set("hadoop.proxyuser.oozie.groups", "*"); - cluster.conf.set("hadoop.proxyuser.oozie.hosts", "127.0.0.1"); - cluster.conf.set("hadoop.proxyuser.hdfs.groups", "*"); - cluster.conf.set("hadoop.proxyuser.hdfs.hosts", "127.0.0.1"); - cluster.conf.set("mapreduce.jobtracker.kerberos.principal", ""); - cluster.conf.set("dfs.namenode.kerberos.principal", ""); - cluster.dfsCluster = new MiniDFSCluster(cluster.conf, 1, true, null); - ProxyUsers.refreshSuperUserGroupsConfiguration(cluster.conf); + cluster.conf.set("jail.base", System.getProperty("hadoop.tmp.dir", + cluster.conf.get("hadoop.tmp.dir", "/tmp"))); + cluster.conf.set("fs.default.name", "jail://" + (global ? "global" : name) + ":00"); String hdfsUrl = cluster.conf.get("fs.default.name"); LOG.info("Cluster Namenode = " + hdfsUrl); cluster.buildClusterObject(name); @@ -97,7 +91,7 @@ public class EmbeddedCluster { return FileSystem.get(conf); } - private void buildClusterObject(String name) { + protected void buildClusterObject(String name) { clusterEntity = new Cluster(); clusterEntity.setName(name); clusterEntity.setColo("local"); @@ -105,17 +99,16 @@ public class EmbeddedCluster { Interfaces interfaces = new Interfaces(); interfaces.getInterfaces().add(newInterface(Interfacetype.WORKFLOW, - "http://localhost:11000/oozie", "0.1")); + "http://localhost:41000/oozie", "0.1")); String fsUrl = conf.get("fs.default.name"); interfaces.getInterfaces().add(newInterface(Interfacetype.READONLY, fsUrl, "0.1")); interfaces.getInterfaces().add(newInterface(Interfacetype.WRITE, fsUrl, "0.1")); interfaces.getInterfaces().add(newInterface(Interfacetype.EXECUTE, - conf.get("mapred.job.tracker"), "0.1")); + "localhost:41021", "0.1")); interfaces.getInterfaces().add( newInterface(Interfacetype.REGISTRY, "thrift://localhost:49083", "0.1")); interfaces.getInterfaces().add( newInterface(Interfacetype.MESSAGING, "vm://localhost", "0.1")); - clusterEntity.setInterfaces(interfaces); Location location = new Location(); @@ -125,7 +118,7 @@ public class EmbeddedCluster { locs.getLocations().add(location); location = new Location(); location.setName("working"); - location.setPath("/projects/falcon/working"); + location.setPath("/project/falcon/working"); locs.getLocations().add(location); clusterEntity.setLocations(locs); } @@ -140,7 +133,7 @@ public class EmbeddedCluster { } public void shutdown() { - dfsCluster.shutdown(); + //dfsCluster.shutdown(); } public Cluster getCluster() { http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/test-util/src/main/resources/core-site.xml ---------------------------------------------------------------------- diff --git a/test-util/src/main/resources/core-site.xml b/test-util/src/main/resources/core-site.xml new file mode 100644 index 0000000..da00644 --- /dev/null +++ b/test-util/src/main/resources/core-site.xml @@ -0,0 +1,31 @@ + + + + + + + fs.jail.impl + org.apache.falcon.hadoop.JailedFileSystem + + + + mapreduce.framework.name + unittests + + http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/pom.xml ---------------------------------------------------------------------- diff --git a/webapp/pom.xml b/webapp/pom.xml index 0c2d844..8c37409 100644 --- a/webapp/pom.xml +++ b/webapp/pom.xml @@ -143,6 +143,10 @@ org.apache.hadoop hadoop-core + + org.apache.hadoop + hadoop-test + @@ -218,19 +222,17 @@ uber-javadocs - package + site javadoc jar - ${skipCheck} false true org.apache.falcon:* - ${skipCheck} @@ -317,6 +319,23 @@ ${project.build.directory}/libext kahadb.jar + + org.apache.falcon + falcon-hadoop-dependencies + ${project.version} + true + ${project.build.directory}/falcon-webapp-${project.version}/WEB-INF/lib + falcon-hadoop-dependencies-${project.version}.jar + + + org.apache.pig + pig + 0.11.1 + jar + false + ${project.build.directory}/sharelib + pig.jar + @@ -346,31 +365,6 @@ - org.apache.maven.plugins - maven-failsafe-plugin - 2.15 - - true - always - -Djava.security.krb5.realm= -Djava.security.krb5.kdc= - - - - integration-test - - integration-test - - - - verify - - verify - - - - - - org.mortbay.jetty maven-jetty-plugin ${jetty.version} @@ -434,7 +428,6 @@ run - ${skipCheck} true @@ -447,6 +440,7 @@ + http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/conf/oozie/conf/action-conf/hive.xml ---------------------------------------------------------------------- diff --git a/webapp/src/conf/oozie/conf/action-conf/hive.xml b/webapp/src/conf/oozie/conf/action-conf/hive.xml index e5aef7d..e734089 100644 --- a/webapp/src/conf/oozie/conf/action-conf/hive.xml +++ b/webapp/src/conf/oozie/conf/action-conf/hive.xml @@ -30,7 +30,7 @@ fs.default.name - hdfs://localhost:41020 + jail://global:00 http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml ---------------------------------------------------------------------- diff --git a/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml b/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml index 35078c7..bc8fa99 100644 --- a/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml +++ b/webapp/src/conf/oozie/conf/hadoop-conf/core-site.xml @@ -36,7 +36,7 @@ mapreduce.framework.name - yarn + unittests http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/conf/oozie/conf/oozie-site.xml ---------------------------------------------------------------------- diff --git a/webapp/src/conf/oozie/conf/oozie-site.xml b/webapp/src/conf/oozie/conf/oozie-site.xml index 48408ba..e5f404a 100644 --- a/webapp/src/conf/oozie/conf/oozie-site.xml +++ b/webapp/src/conf/oozie/conf/oozie-site.xml @@ -473,6 +473,15 @@ + + oozie.service.HadoopAccessorService.supported.filesystems + hdfs,hftp,webhdfs,jail + + Enlist the different filesystems supported for federation. If wildcard "*" is specified, + then ALL file schemes will be allowed. + + + oozie.service.ProxyUserService.proxyuser.${user.name}.hosts http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java index c4d6671..9909140 100644 --- a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java +++ b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java @@ -48,7 +48,7 @@ public class HiveCatalogServiceIT { private static final String DATABASE_NAME = "falcon_db"; private static final String TABLE_NAME = "falcon_table"; private static final String EXTERNAL_TABLE_NAME = "falcon_external"; - private static final String EXTERNAL_TABLE_LOCATION = "hdfs://localhost:41020/falcon/staging/falcon_external"; + private static final String EXTERNAL_TABLE_LOCATION = "jail://global:00/falcon/staging/falcon_external"; private HiveCatalogService hiveCatalogService; private HCatClient client; http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java index 4730728..0767a76 100644 --- a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java +++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java @@ -35,6 +35,7 @@ import java.util.Map; * * todo: Refactor both the classes to move this methods to helper; */ +@Test(groups = {"exhaustive"}) public class FalconCLIIT { private InMemoryWriter stream = new InMemoryWriter(System.out); @@ -47,7 +48,6 @@ public class FalconCLIIT { TestContext.prepare(); } - @Test(enabled = TEST_ENABLED) public void testSubmitEntityValidCommands() throws Exception { FalconCLI.OUT.set(stream); @@ -60,7 +60,7 @@ public class FalconCLIIT { Assert.assertEquals( 0, executeWithURL("entity -submit -type cluster -file " + filePath)); - context.setCluster(filePath); + context.setCluster(overlay.get("cluster")); Assert.assertEquals(stream.buffer.toString().trim(), "default/Submit successful (cluster) " + context.getClusterName()); @@ -90,17 +90,14 @@ public class FalconCLIIT { + overlay.get("processName")); } - @Test(enabled = TEST_ENABLED) public void testListWithEmptyConfigStore() throws Exception { Assert.assertEquals( 0, executeWithURL("entity -list -type process ")); } - @Test(enabled = TEST_ENABLED) public void testSubmitAndScheduleEntityValidCommands() throws Exception { - Thread.sleep(5000); String filePath; TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); @@ -109,7 +106,7 @@ public class FalconCLIIT { Assert.assertEquals(-1, executeWithURL("entity -submitAndSchedule -type cluster -file " + filePath)); - context.setCluster(filePath); + context.setCluster(overlay.get("cluster")); filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(0, @@ -131,11 +128,8 @@ public class FalconCLIIT { Assert.assertEquals(0, executeWithURL("entity -submitAndSchedule -type process -file " + filePath)); - - Thread.sleep(5000); } - @Test(enabled = TEST_ENABLED) public void testValidateValidCommands() throws Exception { String filePath; @@ -146,11 +140,11 @@ public class FalconCLIIT { Assert.assertEquals(0, executeWithURL("entity -validate -type cluster -file " + filePath)); - context.setCluster(filePath); + context.setCluster(overlay.get("cluster")); Assert.assertEquals( 0, executeWithURL("entity -submit -type cluster -file " + filePath)); - context.setCluster(filePath); + context.setCluster(overlay.get("cluster")); filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(0, @@ -168,13 +162,12 @@ public class FalconCLIIT { Assert.assertEquals(0, executeWithURL("entity -validate -type process -file " + filePath)); + Assert.assertEquals( 0, executeWithURL("entity -submit -type process -file " + filePath)); - } - @Test(enabled = TEST_ENABLED) public void testDefinitionEntityValidCommands() throws Exception { TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); @@ -200,7 +193,6 @@ public class FalconCLIIT { } - @Test(enabled = TEST_ENABLED) public void testScheduleEntityValidCommands() throws Exception { TestContext context = new TestContext(); @@ -222,10 +214,8 @@ public class FalconCLIIT { } - @Test(enabled = TEST_ENABLED) public void testSuspendResumeStatusEntityValidCommands() throws Exception { - Thread.sleep(5000); TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); @@ -291,15 +281,12 @@ public class FalconCLIIT { executeWithURL("entity -status -type process -name " + overlay.get("processName"))); - Thread.sleep(5000); } - @Test(enabled = TEST_ENABLED) public void testSubCommandPresence() throws Exception { Assert.assertEquals(-1, executeWithURL("entity -type cluster ")); } - @Test(enabled = TEST_ENABLED) public void testDeleteEntityValidCommands() throws Exception { TestContext context = new TestContext(); @@ -338,7 +325,6 @@ public class FalconCLIIT { } - @Test(enabled = TEST_ENABLED) public void testInvalidCLIEntitycommands() throws Exception { TestContext context = new TestContext(); @@ -351,7 +337,6 @@ public class FalconCLIIT { executeWithURL("entity -schedule -type feed -file " + "name")); } - @Test(enabled = TEST_ENABLED) public void testInstanceRunningAndStatusCommands() throws Exception { TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); @@ -381,9 +366,7 @@ public class FalconCLIIT { + " -start " + START_INSTANCE)); } - @Test(enabled = TEST_ENABLED) public void testInstanceSuspendAndResume() throws Exception { - Thread.sleep(5000); TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); @@ -402,12 +385,10 @@ public class FalconCLIIT { executeWithURL("instance -resume -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE)); - Thread.sleep(5000); } private static final String START_INSTANCE = "2012-04-20T00:00Z"; - @Test(enabled = TEST_ENABLED) public void testInstanceKillAndRerun() throws Exception { TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); @@ -432,7 +413,6 @@ public class FalconCLIIT { + createTempJobPropertiesFile())); } - @Test(enabled = TEST_ENABLED) public void testContinue() throws Exception { TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); @@ -456,7 +436,6 @@ public class FalconCLIIT { + " -start " + START_INSTANCE)); } - @Test(enabled = TEST_ENABLED) public void testInvalidCLIInstanceCommands() throws Exception { // no command Assert.assertEquals(-1, executeWithURL(" -kill -type process -name " @@ -475,7 +454,6 @@ public class FalconCLIIT { } - @Test(enabled = TEST_ENABLED) public void testFalconURL() throws Exception { Assert.assertEquals(-1, new FalconCLI() .run(("instance -status -type process -name " + "processName" @@ -491,7 +469,6 @@ public class FalconCLIIT { } - @Test(enabled = TEST_ENABLED) public void testClientProperties() throws Exception { TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); @@ -510,7 +487,6 @@ public class FalconCLIIT { } - @Test(enabled = TEST_ENABLED) public void testGetVersion() throws Exception { Assert.assertEquals(0, new FalconCLI().run("admin -version".split("\\s"))); @@ -519,7 +495,6 @@ public class FalconCLIIT { new FalconCLI().run("admin -stack".split("\\s"))); } - @Test(enabled = TEST_ENABLED) public void testInstanceGetLogs() throws Exception { TestContext context = new TestContext(); Map overlay = context.getUniqueOverlay(); @@ -536,7 +511,6 @@ public class FalconCLIIT { } - private int executeWithURL(String command) throws Exception { return new FalconCLI() .run((command + " -url " + TestContext.BASE_URL).split("\\s+")); @@ -554,14 +528,14 @@ public class FalconCLIIT { return tmpFile.getAbsolutePath(); } - public void submitTestFiles(TestContext context, Map overlay) throws Exception { + private void submitTestFiles(TestContext context, Map overlay) throws Exception { String filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay); Assert.assertEquals( 0, executeWithURL("entity -submit -type cluster -file " + filePath)); - context.setCluster(filePath); + context.setCluster(overlay.get("cluster")); filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(0, http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java b/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java new file mode 100644 index 0000000..55f240f --- /dev/null +++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.falcon.cli; + +import org.apache.falcon.resource.TestContext; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.util.Map; + +/** + * Smoke Test for Falcon CLI. + */ +public class FalconCLISmokeIT { + + private static final String START_INSTANCE = "2012-04-20T00:00Z"; + + @BeforeClass + public void prepare() throws Exception { + TestContext.prepare(); + } + + @Test + public void testSubmitAndScheduleEntityValidCommands() throws Exception { + + String filePath; + TestContext context = new TestContext(); + Map overlay = context.getUniqueOverlay(); + + filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay); + Assert.assertEquals(-1, + executeWithURL("entity -submitAndSchedule -type cluster -file " + + filePath)); + context.setCluster(overlay.get("cluster")); + + filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); + Assert.assertEquals(0, + executeWithURL("entity -submitAndSchedule -type feed -file " + + filePath)); + filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); + Assert.assertEquals(0, + executeWithURL("entity -submitAndSchedule -type feed -file " + + filePath)); + filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); + Assert.assertEquals(0, + executeWithURL("entity -submit -type feed -file " + filePath)); + + filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); + Assert.assertEquals(0, + executeWithURL("entity -submit -type feed -file " + filePath)); + + filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); + Assert.assertEquals(0, + executeWithURL("entity -validate -type process -file " + + filePath)); + + filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); + Assert.assertEquals(0, + executeWithURL("entity -submitAndSchedule -type process -file " + + filePath)); + + context.waitForProcessWFtoStart(); + + Assert.assertEquals(0, + executeWithURL("entity -definition -type cluster -name " + + overlay.get("cluster"))); + + Assert.assertEquals(0, + executeWithURL("instance -status -type feed -name " + + overlay.get("outputFeedName") + + " -start " + START_INSTANCE)); + + Assert.assertEquals(0, + executeWithURL("instance -running -type process -name " + + overlay.get("processName"))); + + } + + private int executeWithURL(String command) throws Exception { + return new FalconCLI() + .run((command + " -url " + TestContext.BASE_URL).split("\\s+")); + } +} http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java index 9b672f4..37226e2 100644 --- a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java +++ b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java @@ -71,7 +71,7 @@ public class TableStorageFeedEvictorIT { private static final String DATABASE_NAME = "falcon_db"; private static final String TABLE_NAME = "clicks"; private static final String EXTERNAL_TABLE_NAME = "clicks_external"; - private static final String STORAGE_URL = "hdfs://localhost:41020"; + private static final String STORAGE_URL = "jail://global:00"; private static final String EXTERNAL_TABLE_LOCATION = STORAGE_URL + "/falcon/staging/clicks_external/"; private final InMemoryWriter stream = new InMemoryWriter(System.out); http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java b/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java deleted file mode 100644 index e3cd914..0000000 --- a/webapp/src/test/java/org/apache/falcon/logging/LogMoverIT.java +++ /dev/null @@ -1,171 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.falcon.logging; - -import org.apache.falcon.FalconException; -import org.apache.falcon.cluster.util.EmbeddedCluster; -import org.apache.falcon.cluster.util.StandAloneCluster; -import org.apache.falcon.entity.ClusterHelper; -import org.apache.falcon.entity.EntityUtil; -import org.apache.falcon.entity.parser.ProcessEntityParser; -import org.apache.falcon.entity.store.ConfigurationStore; -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.process.Process; -import org.apache.falcon.resource.TestContext; -import org.apache.falcon.security.CurrentUser; -import org.apache.falcon.util.StartupProperties; -import org.apache.falcon.workflow.engine.OozieWorkflowEngine; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.oozie.client.OozieClient; -import org.apache.oozie.client.WorkflowJob; -import org.testng.Assert; -import org.testng.annotations.AfterClass; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import java.io.File; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Test for LogMover. - * Requires Oozie to be running on localhost. - */ -@Test -public class LogMoverIT { - - private static final ConfigurationStore STORE = ConfigurationStore.get(); - private static final String PROCESS_NAME = "testProcess" + System.currentTimeMillis(); - private static EmbeddedCluster testCluster = null; - private static Process testProcess = null; - private static FileSystem fs; - - @BeforeClass - public void setup() throws Exception { - Map overlay = new HashMap(); - overlay.put("cluster", "testCluster"); - TestContext context = new TestContext(); - String file = context. - overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay); - testCluster = StandAloneCluster.newCluster(file); - STORE.publish(EntityType.CLUSTER, testCluster.getCluster()); -/* - new File("target/libs").mkdirs(); - StartupProperties.get().setProperty("system.lib.location", "target/libs"); - SharedLibraryHostingService listener = new SharedLibraryHostingService(); - listener.onAdd(testCluster.getCluster()); -*/ - fs = FileSystem.get(testCluster.getConf()); - fs.mkdirs(new Path("/workflow/lib")); - - fs.copyFromLocalFile( - new Path(LogMoverIT.class.getResource( - "/org/apache/falcon/logging/workflow.xml").toURI()), - new Path("/workflow")); - fs.copyFromLocalFile( - new Path(LogMoverIT.class.getResource( - "/org/apache/falcon/logging/java-test.jar").toURI()), - new Path("/workflow/lib")); - - testProcess = new ProcessEntityParser().parse(LogMoverIT.class - .getResourceAsStream("/org/apache/falcon/logging/process.xml")); - testProcess.setName(PROCESS_NAME); - } - - @AfterClass - public void tearDown() { - testCluster.shutdown(); - } - - @Test (enabled = false) - public void testLogMover() throws Exception { - CurrentUser.authenticate(System.getProperty("user.name")); - OozieWorkflowEngine engine = new OozieWorkflowEngine(); - String path = StartupProperties.get().getProperty("system.lib.location"); - if (!new File("target/libs").exists()) { - Assert.assertTrue(new File("target/libs").mkdirs()); - } - StartupProperties.get().setProperty("system.lib.location", "target/libs"); - engine.schedule(testProcess); - StartupProperties.get().setProperty("system.lib.location", path); - - OozieClient client = new OozieClient( - ClusterHelper.getOozieUrl(testCluster.getCluster())); - List jobs; - while (true) { - jobs = client.getJobsInfo(OozieClient.FILTER_NAME + "=" - + "FALCON_PROCESS_DEFAULT_" + PROCESS_NAME); - if (jobs.size() > 0) { - break; - } else { - Thread.sleep(1000); - } - } - - WorkflowJob job = jobs.get(0); - while (true) { - if (!(job.getStatus() == WorkflowJob.Status.RUNNING || job - .getStatus() == WorkflowJob.Status.PREP)) { - break; - } else { - Thread.sleep(1000); - job = client.getJobInfo(job.getId()); - } - } - - Path oozieLogPath = new Path(getLogPath(), - "job-2010-01-01-01-00/000/oozie.log"); - Assert.assertTrue(fs.exists(oozieLogPath)); - - testLogMoverWithNextRunId(job.getId()); - testLogMoverWithNextRunIdWithEngine(job.getId()); - } - - private Path getLogPath() throws FalconException { - Path stagingPath = EntityUtil.getLogPath(testCluster.getCluster(), testProcess); - return new Path(ClusterHelper.getStorageUrl(testCluster - .getCluster()), stagingPath); - } - - private void testLogMoverWithNextRunId(String jobId) throws Exception { - LogMover.main(new String[]{"-workflowEngineUrl", - ClusterHelper.getOozieUrl(testCluster.getCluster()), - "-subflowId", jobId + "@user-workflow", "-runId", "1", - "-logDir", getLogPath().toString() + "/job-2010-01-01-01-00", - "-status", "SUCCEEDED", "-entityType", "process", }); - - Path oozieLogPath = new Path(getLogPath(), - "job-2010-01-01-01-00/001/oozie.log"); - Assert.assertTrue(fs.exists(oozieLogPath)); - } - - private void testLogMoverWithNextRunIdWithEngine(String jobId) throws Exception { - LogMover.main(new String[]{"-workflowEngineUrl", - ClusterHelper.getOozieUrl(testCluster.getCluster()), - "-subflowId", jobId + "@user-workflow", "-runId", "1", - "-logDir", getLogPath().toString() + "/job-2010-01-01-01-00", - "-status", "SUCCEEDED", "-entityType", "process", - "-userWorkflowEngine", "oozie", }); - - Path oozieLogPath = new Path(getLogPath(), - "job-2010-01-01-01-00/001/oozie.log"); - Assert.assertTrue(fs.exists(oozieLogPath)); - } -} http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java b/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java deleted file mode 100644 index 4c3ce97..0000000 --- a/webapp/src/test/java/org/apache/falcon/logging/LogProviderIT.java +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.falcon.logging; - -import org.apache.falcon.FalconException; -import org.apache.falcon.cluster.util.EmbeddedCluster; -import org.apache.falcon.cluster.util.StandAloneCluster; -import org.apache.falcon.entity.parser.ProcessEntityParser; -import org.apache.falcon.entity.store.ConfigurationStore; -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.process.Process; -import org.apache.falcon.resource.InstancesResult.Instance; -import org.apache.falcon.resource.InstancesResult.InstanceAction; -import org.apache.falcon.resource.InstancesResult.WorkflowStatus; -import org.apache.falcon.resource.TestContext; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -/** - * Test for LogProvider. - */ -public class LogProviderIT { - - private static final ConfigurationStore STORE = ConfigurationStore.get(); - private static EmbeddedCluster testCluster = null; - private static Process testProcess = null; - private static final String PROCESS_NAME = "testProcess"; - private static FileSystem fs; - private Instance instance; - - @BeforeClass - public void setup() throws Exception { - Map overlay = new HashMap(); - overlay.put("cluster", "logProviderTest"); - overlay.put("colo", "gs"); - TestContext context = new TestContext(); - String file = context. - overlayParametersOverTemplate(context.CLUSTER_TEMPLATE, overlay); - testCluster = StandAloneCluster.newCluster(file); - cleanupStore(); - STORE.publish(EntityType.CLUSTER, testCluster.getCluster()); - fs = FileSystem.get(testCluster.getConf()); - Path instanceLogPath = new Path( - "/projects/falcon/staging/falcon/workflows/process/" + PROCESS_NAME - + "/logs/job-2010-01-01-01-00/000"); - fs.mkdirs(instanceLogPath); - fs.createNewFile(new Path(instanceLogPath, "oozie.log")); - fs.createNewFile(new Path(instanceLogPath, "pigAction_SUCCEEDED.log")); - fs.createNewFile(new Path(instanceLogPath, "mr_Action_FAILED.log")); - fs.createNewFile(new Path(instanceLogPath, "mr_Action2_SUCCEEDED.log")); - - fs.mkdirs(new Path("/projects/falcon/staging/falcon/workflows/process/" - + PROCESS_NAME + "/logs/job-2010-01-01-01-00/001")); - fs.mkdirs(new Path("/projects/falcon/staging/falcon/workflows/process/" - + PROCESS_NAME + "/logs/job-2010-01-01-01-00/002")); - Path run3 = new Path("/projects/falcon/staging/falcon/workflows/process/" - + PROCESS_NAME + "/logs/job-2010-01-01-01-00/003"); - fs.mkdirs(run3); - fs.createNewFile(new Path(run3, "oozie.log")); - - testProcess = new ProcessEntityParser().parse(LogProviderIT.class - .getResourceAsStream("/org/apache/falcon/logging/process.xml")); - testProcess.setName(PROCESS_NAME); - STORE.publish(EntityType.PROCESS, testProcess); - } - - @BeforeMethod - public void setInstance() { - instance = new Instance(); - instance.status = WorkflowStatus.SUCCEEDED; - instance.instance = "2010-01-01T01:00Z"; - instance.cluster = "logProviderTest"; - instance.logFile = "http://localhost:41000/oozie/wflog"; - } - - private void cleanupStore() throws FalconException { - for (EntityType type : EntityType.values()) { - Collection entities = STORE.getEntities(type); - for (String entity : entities) { - STORE.remove(type, entity); - } - } - } - - @Test - public void testLogProviderWithValidRunId() throws FalconException { - LogProvider provider = new LogProvider(); - Instance instanceWithLog = provider.populateLogUrls(testProcess, - instance, "0"); - Assert.assertEquals( - instance.logFile, - "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/" - + "job-2010-01-01-01-00/000/oozie.log"); - - InstanceAction action = instanceWithLog.actions[0]; - Assert.assertEquals(action.action, "mr_Action2"); - Assert.assertEquals(action.status, "SUCCEEDED"); - Assert.assertEquals( - action.logFile, - "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/" - + "job-2010-01-01-01-00/000/mr_Action2_SUCCEEDED.log"); - - action = instanceWithLog.actions[1]; - Assert.assertEquals(action.action, "mr_Action"); - Assert.assertEquals(action.status, "FAILED"); - Assert.assertEquals( - action.logFile, - "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/" - + "job-2010-01-01-01-00/000/mr_Action_FAILED.log"); - } - - @Test - public void testLogProviderWithInvalidRunId() throws FalconException { - LogProvider provider = new LogProvider(); - provider.populateLogUrls(testProcess, instance, "x"); - Assert.assertEquals(instance.logFile, - "http://localhost:41000/oozie/wflog"); - } - - @Test - public void testLogProviderWithUnavailableRunId() throws FalconException { - LogProvider provider = new LogProvider(); - instance.logFile = null; - provider.populateLogUrls(testProcess, instance, "7"); - Assert.assertEquals(instance.logFile, "-"); - } - - @Test - public void testLogProviderWithEmptyRunId() throws FalconException { - LogProvider provider = new LogProvider(); - instance.logFile = null; - provider.populateLogUrls(testProcess, instance, null); - Assert.assertEquals( - instance.logFile, - "http://localhost:50070/data/projects/falcon/staging/falcon/workflows/process/testProcess/logs/" - + "job-2010-01-01-01-00/003/oozie.log"); - } -} http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java b/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java index 58ae4ba..1f4e9e8 100644 --- a/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java +++ b/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java @@ -58,8 +58,8 @@ public class PigProcessIT { overlay = context.getUniqueOverlay(); - String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay); - context.setCluster(filePath); + String filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay); + context.setCluster(overlay.get("cluster")); final Cluster cluster = context.getCluster().getCluster(); final String storageUrl = ClusterHelper.getStorageUrl(cluster); @@ -88,7 +88,7 @@ public class PigProcessIT { public void testSubmitAndSchedulePigProcess() throws Exception { overlay.put("cluster", "primary-cluster"); - String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay); + String filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay); Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath)); // context.setCluster(filePath); http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/d1642bea/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java index aa059bd..1ceaabf 100644 --- a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java +++ b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java @@ -17,10 +17,15 @@ */ package org.apache.falcon.resource; -import java.io.*; import java.text.DateFormat; import java.text.SimpleDateFormat; -import java.util.*; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TimeZone; import java.util.regex.Pattern; import javax.servlet.ServletInputStream; @@ -28,11 +33,16 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.xml.bind.JAXBException; +import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import org.apache.falcon.entity.v0.Entity; import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.entity.v0.SchemaHelper; -import org.apache.falcon.entity.v0.feed.*; +import org.apache.falcon.entity.v0.feed.Cluster; +import org.apache.falcon.entity.v0.feed.Feed; +import org.apache.falcon.entity.v0.feed.Location; +import org.apache.falcon.entity.v0.feed.LocationType; +import org.apache.falcon.entity.v0.feed.Locations; import org.apache.falcon.entity.v0.process.Input; import org.apache.falcon.entity.v0.process.Process; import org.apache.falcon.entity.v0.process.Property; @@ -44,7 +54,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.Path; import org.apache.oozie.client.BundleJob; -import org.apache.oozie.client.CoordinatorJob; import org.apache.oozie.client.Job; import org.apache.oozie.client.Job.Status; import org.apache.oozie.client.OozieClient; @@ -53,13 +62,17 @@ import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; -import com.sun.jersey.api.client.ClientResponse; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.StringReader; /** * Test class for Entity REST APIs. * * Tests should be enabled only in local environments as they need running instance of the web server. */ +@Test(groups = {"exhaustive"}) public class EntityManagerJerseyIT { private static final int ONE_HR = 2 * 24 * 60 * 60 * 1000; @@ -69,7 +82,7 @@ public class EntityManagerJerseyIT { TestContext.prepare(); } - private void assertLibs(FileSystem fs, Path path) throws IOException { + static void assertLibs(FileSystem fs, Path path) throws IOException { FileStatus[] libs = fs.listStatus(path); Assert.assertNotNull(libs); Assert.assertEquals(libs.length, 1); @@ -102,7 +115,7 @@ public class EntityManagerJerseyIT { String tmpFileName = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Feed feed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(new File(tmpFileName)); Location location = new Location(); - location.setPath("fsext://localhost:41020/falcon/test/input/${YEAR}/${MONTH}/${DAY}/${HOUR}"); + location.setPath("fsext://global:00/falcon/test/input/${YEAR}/${MONTH}/${DAY}/${HOUR}"); location.setType(LocationType.DATA); Cluster cluster = feed.getClusters().getClusters().get(0); cluster.setLocations(new Locations()); @@ -138,7 +151,8 @@ public class EntityManagerJerseyIT { Map overlay = context.getUniqueOverlay(); String tmpFileName = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName)); - updateEndtime(process); + Validity processValidity = process.getClusters().getClusters().get(0).getValidity(); + processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000)); File tmpFile = context.getTempFile(); EntityType.PROCESS.getMarshaller().marshal(process, tmpFile); context.scheduleProcess(tmpFile.getAbsolutePath(), overlay); @@ -148,11 +162,22 @@ public class EntityManagerJerseyIT { Assert.assertEquals(bundles.size(), 1); Assert.assertEquals(bundles.get(0).getUser(), TestContext.REMOTE_USER); - Feed feed = (Feed) getDefinition(context, EntityType.FEED, context.outputFeedName); + ClientResponse response = context.service.path("api/entities/definition/feed/" + + context.outputFeedName).header( + "Remote-User", TestContext.REMOTE_USER) + .accept(MediaType.TEXT_XML).get(ClientResponse.class); + Feed feed = (Feed) EntityType.FEED.getUnmarshaller() + .unmarshal(new StringReader(response.getEntity(String.class))); //change output feed path and update feed as another user feed.getLocations().getLocations().get(0).setPath("/falcon/test/output2/${YEAR}/${MONTH}/${DAY}"); - update(context, feed); + tmpFile = context.getTempFile(); + EntityType.FEED.getMarshaller().marshal(feed, tmpFile); + response = context.service.path("api/entities/update/feed/" + + context.outputFeedName).header("Remote-User", + TestContext.REMOTE_USER).accept(MediaType.TEXT_XML) + .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath())); + context.assertSuccessful(response); bundles = context.getBundles(); Assert.assertEquals(bundles.size(), 2); @@ -176,7 +201,6 @@ public class EntityManagerJerseyIT { contexts.remove(); } - @Test(enabled = false) public void testOptionalInput() throws Exception { TestContext context = newContext(); Map overlay = context.getUniqueOverlay(); @@ -200,7 +224,6 @@ public class EntityManagerJerseyIT { context.waitForWorkflowStart(context.processName); } - @Test public void testProcessDeleteAndSchedule() throws Exception { //Submit process with invalid property so that coord submit fails and bundle goes to failed state TestContext context = newContext(); @@ -214,7 +237,7 @@ public class EntityManagerJerseyIT { File tmpFile = context.getTempFile(); EntityType.PROCESS.getMarshaller().marshal(process, tmpFile); context.scheduleProcess(tmpFile.getAbsolutePath(), overlay); - context.waitForBundleStart(Status.FAILED); + context.waitForBundleStart(Status.FAILED, Status.KILLED); //Delete and re-submit the process with correct workflow ClientResponse clientRepsonse = context.service.path("api/entities/delete/process/" @@ -267,12 +290,18 @@ public class EntityManagerJerseyIT { OozieClient ozClient = context.getOozieClient(); String coordId = ozClient.getBundleJobInfo(bundles.get(0).getId()).getCoordinators().get(0).getId(); - Process process = (Process) getDefinition(context, EntityType.PROCESS, context.processName); + ClientResponse response = context.service.path("api/entities/definition/process/" + + context.processName).header( + "Remote-User", TestContext.REMOTE_USER) + .accept(MediaType.TEXT_XML).get(ClientResponse.class); + Process process = (Process) EntityType.PROCESS.getUnmarshaller() + .unmarshal(new StringReader(response.getEntity(String.class))); + String feed3 = "f3" + System.currentTimeMillis(); Map overlay = new HashMap(); overlay.put("inputFeedName", feed3); overlay.put("cluster", context.clusterName); - ClientResponse response = context.submitToFalcon(TestContext.FEED_TEMPLATE1, overlay, EntityType.FEED); + response = context.submitToFalcon(TestContext.FEED_TEMPLATE1, overlay, EntityType.FEED); context.assertSuccessful(response); Input input = new Input(); @@ -282,34 +311,48 @@ public class EntityManagerJerseyIT { input.setEnd("today(20,20)"); process.getInputs().getInputs().add(input); - Date endTime = getEndTime(); - updateEndtime(process); - update(context, process, endTime); + Validity processValidity = process.getClusters().getClusters().get(0).getValidity(); + processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000)); + File tmpFile = context.getTempFile(); + EntityType.PROCESS.getMarshaller().marshal(process, tmpFile); + response = context.service.path("api/entities/update/process/" + + context.processName).header("Remote-User", + TestContext.REMOTE_USER).accept(MediaType.TEXT_XML) + .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath())); + context.assertSuccessful(response); //Assert that update creates new bundle and old coord is running bundles = context.getBundles(); Assert.assertEquals(bundles.size(), 2); - CoordinatorJob coord = ozClient.getCoordJobInfo(coordId); - Assert.assertEquals(coord.getStatus(), Status.RUNNING); - Assert.assertEquals(coord.getEndTime(), endTime); + Assert.assertEquals(ozClient.getCoordJobInfo(coordId).getStatus(), Status.RUNNING); } - @Test public void testProcessEndtimeUpdate() throws Exception { TestContext context = newContext(); context.scheduleProcess(); context.waitForBundleStart(Job.Status.RUNNING); - Process process = (Process) getDefinition(context, EntityType.PROCESS, context.processName); - updateEndtime(process); - update(context, process); + ClientResponse response = context.service.path("api/entities/definition/process/" + + context.processName).header( + "Remote-User", TestContext.REMOTE_USER) + .accept(MediaType.TEXT_XML).get(ClientResponse.class); + Process process = (Process) EntityType.PROCESS.getUnmarshaller() + .unmarshal(new StringReader(response.getEntity(String.class))); + + Validity processValidity = process.getClusters().getClusters().get(0).getValidity(); + processValidity.setEnd(new Date(new Date().getTime() + 60 * 60 * 1000)); + File tmpFile = context.getTempFile(); + EntityType.PROCESS.getMarshaller().marshal(process, tmpFile); + response = context.service.path("api/entities/update/process/" + context.processName).header("Remote-User", + TestContext.REMOTE_USER).accept(MediaType.TEXT_XML) + .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath())); + context.assertSuccessful(response); //Assert that update does not create new bundle List bundles = context.getBundles(); Assert.assertEquals(bundles.size(), 1); } - @Test public void testStatus() throws Exception { TestContext context = newContext(); ClientResponse response; @@ -332,7 +375,6 @@ public class EntityManagerJerseyIT { } - @Test public void testIdempotentSubmit() throws Exception { TestContext context = newContext(); ClientResponse response; @@ -345,7 +387,6 @@ public class EntityManagerJerseyIT { context.assertSuccessful(response); } - @Test public void testNotFoundStatus() { TestContext context = newContext(); ClientResponse response; @@ -358,7 +399,6 @@ public class EntityManagerJerseyIT { Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode()); } - @Test public void testVersion() { TestContext context = newContext(); ClientResponse response; @@ -379,7 +419,6 @@ public class EntityManagerJerseyIT { "No deploy.mode found in /api/admin/version"); } - @Test public void testValidate() { TestContext context = newContext(); ServletInputStream stream = context.getServletInputStream(getClass(). @@ -394,7 +433,6 @@ public class EntityManagerJerseyIT { context.assertFailure(clientRepsonse); } - @Test public void testClusterValidate() throws Exception { TestContext context = newContext(); ClientResponse clientRepsonse; @@ -410,7 +448,6 @@ public class EntityManagerJerseyIT { context.assertSuccessful(clientRepsonse); } - @Test public void testClusterSubmitScheduleSuspendResumeDelete() throws Exception { TestContext context = newContext(); ClientResponse clientRepsonse; @@ -448,7 +485,6 @@ public class EntityManagerJerseyIT { context.assertSuccessful(clientRepsonse); } - @Test public void testSubmit() throws Exception { TestContext context = newContext(); ClientResponse response; @@ -467,7 +503,6 @@ public class EntityManagerJerseyIT { context.assertSuccessful(response); } - @Test public void testGetEntityDefinition() throws Exception { TestContext context = newContext(); ClientResponse response; @@ -494,7 +529,6 @@ public class EntityManagerJerseyIT { } } - @Test public void testInvalidGetEntityDefinition() { TestContext context = newContext(); ClientResponse clientRepsonse = context.service @@ -504,7 +538,6 @@ public class EntityManagerJerseyIT { context.assertFailure(clientRepsonse); } - @Test public void testScheduleSuspendResume() throws Exception { TestContext context = newContext(); context.scheduleProcess(); @@ -522,7 +555,6 @@ public class EntityManagerJerseyIT { context.assertSuccessful(clientRepsonse); } - @Test(enabled = true) public void testFeedSchedule() throws Exception { TestContext context = newContext(); ClientResponse response; @@ -543,7 +575,7 @@ public class EntityManagerJerseyIT { context.assertSuccessful(clientRepsonse); } - private List createTestData(TestContext context) throws Exception { + static List createTestData(TestContext context) throws Exception { List list = new ArrayList(); FileSystem fs = context.cluster.getFileSystem(); fs.mkdirs(new Path("/user/guest")); @@ -593,7 +625,6 @@ public class EntityManagerJerseyIT { return list; } - @Test public void testDeleteDataSet() throws Exception { TestContext context = newContext(); ClientResponse response; @@ -612,7 +643,6 @@ public class EntityManagerJerseyIT { context.assertSuccessful(response); } - @Test public void testDelete() throws Exception { TestContext context = newContext(); ClientResponse response;