falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From samar...@apache.org
Subject git commit: Reverse : FALCON-633 RetryTests and Retentions tests should stop using root dir
Date Thu, 28 Aug 2014 08:04:56 GMT
Repository: incubator-falcon
Updated Branches:
  refs/heads/master ffe18b0ce -> b1c13df45


Reverse : FALCON-633 RetryTests and Retentions tests should stop using root dir


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/b1c13df4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/b1c13df4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/b1c13df4

Branch: refs/heads/master
Commit: b1c13df454a46c3df0f6b1041147694721b770d7
Parents: ffe18b0
Author: Samarth Gupta <samarth.gupta@inmobi.com>
Authored: Thu Aug 28 13:34:04 2014 +0530
Committer: Samarth Gupta <samarth.gupta@inmobi.com>
Committed: Thu Aug 28 13:34:04 2014 +0530

----------------------------------------------------------------------
 falcon-regression/CHANGES.txt                   |  6 +-
 .../falcon/regression/core/util/BundleUtil.java |  9 ++-
 .../apache/falcon/regression/NewRetryTest.java  | 79 +++++++++-----------
 .../falcon/regression/prism/RetentionTest.java  |  2 +-
 4 files changed, 45 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b1c13df4/falcon-regression/CHANGES.txt
----------------------------------------------------------------------
diff --git a/falcon-regression/CHANGES.txt b/falcon-regression/CHANGES.txt
index 7a3b84f..52e697e 100644
--- a/falcon-regression/CHANGES.txt
+++ b/falcon-regression/CHANGES.txt
@@ -7,11 +7,9 @@ Trunk (Unreleased)
   NEW FEATURES
    FALCON-589 Add test cases for various feed operations on Hcat feeds (Karishma G 
    via Samarth Gupta)
-  IMPROVEMENTS
-
-   FALCON-633 RetryTests and Retentions tests should stop using root dir
-   (Raghav Kumar Gautam via Samarth Gupta)
 
+  IMPROVEMENTS
+   
    FALCON-632 Refactoring, documentation stuff (Paul Isaychuk via Samarth Gupta)
 
    FALCON-609 UpdateAtSpecificTimeTest, InstanceSummaryTest tagged, fixed, refactored

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b1c13df4/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/BundleUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/BundleUtil.java
b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/BundleUtil.java
index d5790c4..1f73523 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/BundleUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/BundleUtil.java
@@ -48,13 +48,14 @@ public final class BundleUtil {
         return readBundleFromFolder("LateDataBundles");
     }
 
-    public static Bundle readRetryBundle(String appPath, String testName) throws IOException
{
-        return generateBundleFromTemplate("RetryTests", appPath, testName);
+    public static Bundle readRetryBundle() throws IOException {
+        return readBundleFromFolder("RetryTests");
     }
 
-    public static Bundle readRetentionBundle(String appPath, String testName) throws IOException
{
-        return generateBundleFromTemplate("RetentionBundles", appPath, testName);
+    public static Bundle readRetentionBundle() throws IOException {
+        return readBundleFromFolder("RetentionBundles");
     }
+
     public static Bundle readELBundle() throws IOException {
         return readBundleFromFolder("ELbundle");
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b1c13df4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/NewRetryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/NewRetryTest.java
b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/NewRetryTest.java
index 8bcc797..5ab3dfe 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/NewRetryTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/NewRetryTest.java
@@ -74,11 +74,8 @@ public class NewRetryTest extends BaseTestClass {
     DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy/MM/dd/HH/mm");
     final private String baseTestDir = baseHDFSDir + "/NewRetryTest";
     final private String aggregateWorkflowDir = baseTestDir + "/aggregator";
-    final private String lateInputDir = baseTestDir + "/lateDataTest/inputFolders/";
-    final private String lateInputPath = lateInputDir + "${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}";
-    final private String lateOutputDir = baseTestDir + "/lateDataTest/outputFolders/";
-    final private String lateOutputPath = lateOutputDir
-        + "${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}";
+    final private String lateDir = baseTestDir + "/lateDataTest/testFolders/";
+    final private String latePath = lateDir + "${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}";
     private DateTime startDate;
     private DateTime endDate;
 
@@ -89,17 +86,15 @@ public class NewRetryTest extends BaseTestClass {
 
     @BeforeMethod(alwaysRun = true)
     public void setUp(Method method) throws Exception {
-        bundles[0] = new Bundle(
-            BundleUtil.readRetryBundle(baseAppHDFSDir, this.getClass().getSimpleName()),
cluster);
+        bundles[0] = new Bundle(BundleUtil.readRetryBundle(), cluster);
         bundles[0].generateUniqueBundle();
         bundles[0].setProcessWorkflow(aggregateWorkflowDir);
         startDate = new DateTime(DateTimeZone.UTC).plusMinutes(1);
         endDate = new DateTime(DateTimeZone.UTC).plusMinutes(2);
         bundles[0].setProcessValidity(startDate, endDate);
 
-        bundles[0].setOutputFeedLocationData(lateOutputPath);
         String feed =
-            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), lateInputPath);
+            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), latePath);
         feed = Util.insertLateFeedValue(feed, new Frequency("minutes(8)"));
         bundles[0].getDataSets().remove(bundles[0].getInputFeedFromBundle());
         bundles[0].getDataSets().add(feed);
@@ -131,8 +126,8 @@ public class NewRetryTest extends BaseTestClass {
         } else {
             AssertUtil.assertSucceeded(response);
             // lets create data now:
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
 
             //schedule process
             AssertUtil.assertSucceeded(
@@ -182,8 +177,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             //now wait till the process is over
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
@@ -240,8 +235,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
 
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
@@ -293,8 +288,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
 
@@ -349,8 +344,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             //now wait till the process is over
@@ -397,8 +392,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             //now wait till the process is over
@@ -449,8 +444,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             //now wait till the process is over
@@ -503,8 +498,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             //now wait till the process is over
@@ -555,8 +550,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             //now wait till the process is over
@@ -594,8 +589,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
 
@@ -648,8 +643,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             //now wait till the process is over
@@ -685,7 +680,7 @@ public class NewRetryTest extends BaseTestClass {
     public void testRetryInSuspendedAndResumeCaseWithLateData(Retry retry) throws Exception
{
 
         String feed =
-            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), lateInputPath);
+            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), latePath);
         feed = Util.insertLateFeedValue(feed, new Frequency("minutes(10)"));
         bundles[0].getDataSets().remove(bundles[0].getInputFeedFromBundle());
         bundles[0].getDataSets().add(feed);
@@ -703,8 +698,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             String bundleId = OozieUtil.getBundles(clusterOC,
@@ -779,7 +774,7 @@ public class NewRetryTest extends BaseTestClass {
     public void testRetryInLateDataCase(Retry retry) throws Exception {
 
         String feed =
-            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), lateInputPath);
+            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), latePath);
 
         feed = Util.insertLateFeedValue(feed, getFrequency(retry));
 
@@ -800,11 +795,11 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             List<String> initialData =
                 Util.getHadoopDataFromDir(clusterFS, bundles[0].getInputFeedFromBundle(),
-                    lateInputDir);
+                    lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             String bundleId = OozieUtil.getBundles(clusterOC,
@@ -842,7 +837,7 @@ public class NewRetryTest extends BaseTestClass {
             String insertionFolder =
                 Util.findFolderBetweenGivenTimeStamps(now, now.plusMinutes(5), initialData);
             logger.info("inserting data in folder " + insertionFolder + " at " + DateTime.now());
-            HadoopUtil.injectMoreData(clusterFS, lateInputDir + insertionFolder,
+            HadoopUtil.injectMoreData(clusterFS, lateDir + insertionFolder,
                     OSUtil.OOZIE_EXAMPLE_INPUT_DATA + "lateData");
             //now to validate all failed instances to check if they were retried or not.
             validateRetry(clusterOC, bundleId,
@@ -859,7 +854,7 @@ public class NewRetryTest extends BaseTestClass {
     public void testRetryInDeleteAfterPartialRetryCase(Retry retry) throws Exception {
 
         String feed =
-            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), lateInputPath);
+            Util.setFeedPathValue(bundles[0].getInputFeedFromBundle(), latePath);
         feed = Util.insertLateFeedValue(feed, new Frequency("minutes(1)"));
         bundles[0].getDataSets().remove(bundles[0].getInputFeedFromBundle());
         bundles[0].getDataSets().add(feed);
@@ -878,8 +873,8 @@ public class NewRetryTest extends BaseTestClass {
             AssertUtil.assertFailed(response);
         } else {
             AssertUtil.assertSucceeded(response);
-            HadoopUtil.deleteDirIfExists(lateInputDir, clusterFS);
-            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateInputDir);
+            HadoopUtil.deleteDirIfExists(lateDir, clusterFS);
+            HadoopUtil.lateDataReplenish(clusterFS, 20, 0, lateDir);
             AssertUtil.assertSucceeded(
                 prism.getProcessHelper().schedule(URLS.SCHEDULE_URL, bundles[0].getProcessData()));
             //now wait till the process is over

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b1c13df4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RetentionTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RetentionTest.java
b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RetentionTest.java
index b288b77..1d900d9 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RetentionTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RetentionTest.java
@@ -72,7 +72,7 @@ public class RetentionTest extends BaseTestClass {
     @BeforeMethod(alwaysRun = true)
     public void testName(Method method) throws Exception {
         logger.info("test name: " + method.getName());
-        Bundle bundle = BundleUtil.readRetentionBundle(baseAppHDFSDir, this.getClass().getSimpleName());
+        Bundle bundle = BundleUtil.readRetentionBundle();
         bundles[0] = new Bundle(bundle, cluster);
         bundles[0].setInputFeedDataPath(testHDFSDir);
         bundles[0].generateUniqueBundle();


Mime
View raw message