falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rostafiyc...@apache.org
Subject [8/8] incubator-falcon git commit: FALCON-698 Fix checkstyle bugs in test files in falcon-regression. Contributed by Ruslan Ostafiychuk and Raghav Kumar Gautam
Date Fri, 28 Nov 2014 13:10:52 GMT
FALCON-698 Fix checkstyle bugs in test files in falcon-regression. Contributed by Ruslan Ostafiychuk and Raghav Kumar Gautam


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/387604d1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/387604d1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/387604d1

Branch: refs/heads/master
Commit: 387604d18047acf6923b111bf75f661d94f0f960
Parents: 587f085
Author: Ruslan Ostafiychuk <rostafiychuk@apache.org>
Authored: Fri Nov 28 15:01:48 2014 +0200
Committer: Ruslan Ostafiychuk <rostafiychuk@apache.org>
Committed: Fri Nov 28 15:09:22 2014 +0200

----------------------------------------------------------------------
 falcon-regression/CHANGES.txt                   |   3 +
 .../falcon/regression/Entities/FeedMerlin.java  | 116 ++++--
 .../regression/Entities/ProcessMerlin.java      |  98 +++--
 .../falcon/regression/core/util/MathUtil.java   |  79 ----
 .../falcon/regression/core/util/MatrixUtil.java |  79 ++++
 .../falcon/regression/AuthorizationTest.java    |  28 +-
 .../regression/ELExpFutureAndLatestTest.java    | 122 ++++++
 .../regression/ELExp_FutureAndLatestTest.java   | 122 ------
 .../falcon/regression/ELValidationsTest.java    |  90 ++--
 .../regression/EmbeddedPigScriptTest.java       |  24 +-
 .../falcon/regression/ExternalFSTest.java       |  28 +-
 .../regression/FeedClusterUpdateTest.java       |  57 ++-
 .../regression/FeedInstanceStatusTest.java      |  32 +-
 .../falcon/regression/FeedLateRerunTest.java    | 146 +++----
 .../falcon/regression/FeedReplicationTest.java  |   4 +-
 .../regression/FeedSubmitAndScheduleTest.java   |   4 +-
 .../falcon/regression/InstanceParamTest.java    |  15 +-
 .../falcon/regression/InstanceSummaryTest.java  |  29 +-
 .../apache/falcon/regression/NewRetryTest.java  | 157 +++----
 .../falcon/regression/NoOutputProcessTest.java  |  20 +-
 .../falcon/regression/ProcessFrequencyTest.java |  36 +-
 .../ProcessInstanceColoMixedTest.java           | 188 ++++-----
 .../regression/ProcessInstanceStatusTest.java   |  10 +-
 .../falcon/regression/ProcessLateRerunTest.java | 109 ++---
 .../falcon/regression/ProcessLibPathTest.java   |  28 +-
 .../falcon/regression/TestngListener.java       |  21 +-
 .../regression/entity/ListEntitiesTest.java     |  16 +-
 .../regression/hcat/HCatFeedOperationsTest.java |  23 +-
 .../falcon/regression/hcat/HCatProcessTest.java | 287 ++++++-------
 .../regression/hcat/HCatReplicationTest.java    |  66 +--
 .../regression/hcat/HCatRetentionTest.java      | 105 ++---
 .../regression/lineage/EntitySummaryTest.java   |  10 +-
 .../lineage/LineageApiProcessInstanceTest.java  |  84 ++--
 .../regression/lineage/LineageApiTest.java      | 150 +++----
 .../lineage/ListFeedInstancesTest.java          |  20 +-
 .../regression/lineage/ProcessPipelineTest.java |  16 +-
 .../regression/prism/EntityDryRunTest.java      |  18 +-
 .../prism/FeedDelayParallelTimeoutTest.java     |  19 +-
 .../regression/prism/OptionalInputTest.java     | 127 +++---
 .../prism/PrismClusterDeleteTest.java           |  13 +-
 .../prism/PrismConcurrentRequestTest.java       |  45 +-
 .../regression/prism/PrismFeedDeleteTest.java   |  67 +--
 .../prism/PrismFeedLateReplicationTest.java     | 330 +++++++--------
 .../PrismFeedReplicationPartitionExpTest.java   | 126 +++---
 .../prism/PrismFeedReplicationUpdateTest.java   |  39 +-
 .../regression/prism/PrismFeedResumeTest.java   |  17 +-
 .../regression/prism/PrismFeedScheduleTest.java |  17 +-
 .../regression/prism/PrismFeedSnSTest.java      |  30 +-
 .../regression/prism/PrismFeedSuspendTest.java  |  17 +-
 .../regression/prism/PrismFeedUpdateTest.java   |  45 +-
 .../prism/PrismProcessDeleteTest.java           |  25 +-
 .../prism/PrismProcessResumeTest.java           |  17 +-
 .../prism/PrismProcessScheduleTest.java         | 125 +++---
 .../regression/prism/PrismProcessSnSTest.java   |  27 +-
 .../prism/PrismProcessSuspendTest.java          |  19 +-
 .../regression/prism/PrismSubmitTest.java       |  61 +--
 .../prism/ProcessPartitionExpVariableTest.java  |  33 +-
 .../prism/RescheduleKilledProcessTest.java      |  13 +-
 .../RescheduleProcessInFinalStatesTest.java     |  25 +-
 .../falcon/regression/prism/RetentionTest.java  |  51 +--
 .../prism/UpdateAtSpecificTimeTest.java         | 329 +++++++--------
 .../regression/security/AclValidationTest.java  |  29 +-
 .../regression/security/ClusterAclTest.java     |  49 ++-
 .../falcon/regression/security/EntityOp.java    |  18 +-
 .../regression/security/FalconClientTest.java   |  41 +-
 .../falcon/regression/security/FeedAclTest.java | 100 ++---
 .../regression/security/ProcessAclTest.java     | 109 ++---
 .../falcon/regression/ui/LineageGraphTest.java  |  54 +--
 .../falcon/regression/ui/ProcessUITest.java     |  29 +-
 .../2ndLateData/Configuration.java              |  73 ----
 .../2ndLateData/Configuration.java.ignore       |  73 ++++
 .../2ndLateData/DateValidator.java              |  87 ----
 .../2ndLateData/DateValidator.java.ignore       |  87 ++++
 .../2ndLateData/TimeUnit.java                   |  36 --
 .../2ndLateData/TimeUnit.java.ignore            |  36 ++
 .../EntityInstanceMessage.java.ignore           | 412 +++++++++++++++++++
 .../EntityInstanceMessage.java.ignore.java      | 412 -------------------
 .../EntityInstanceMessageCreator.java.ignore    |  61 +++
 ...ntityInstanceMessageCreator.java.ignore.java |  61 ---
 .../normalInput/MessageProducer.java.ignore     | 139 +++++++
 .../MessageProducer.java.ignore.java            | 139 -------
 81 files changed, 3119 insertions(+), 2983 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/CHANGES.txt
----------------------------------------------------------------------
diff --git a/falcon-regression/CHANGES.txt b/falcon-regression/CHANGES.txt
index 69da668..16b839d 100644
--- a/falcon-regression/CHANGES.txt
+++ b/falcon-regression/CHANGES.txt
@@ -33,6 +33,9 @@ Trunk (Unreleased)
    via Samarth Gupta)
 
   IMPROVEMENTS
+   FALCON-698 Fix checkstyle bugs in test files in falcon-regression (Ruslan Ostafiychuk and
+   Raghav Kumar Gautam via Ruslan Ostafiychuk)
+
    FALCON-684 Fix problems found by checkstyle in non-test files in falcon-regression (Ruslan
    Ostafiychuk)
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/FeedMerlin.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/FeedMerlin.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/FeedMerlin.java
index 02f572e..cec7684 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/FeedMerlin.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/FeedMerlin.java
@@ -25,19 +25,18 @@ import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.Frequency;
 import org.apache.falcon.entity.v0.feed.ACL;
 import org.apache.falcon.entity.v0.feed.ActionType;
+import org.apache.falcon.entity.v0.feed.CatalogTable;
 import org.apache.falcon.entity.v0.feed.Cluster;
-import org.apache.falcon.entity.v0.feed.Clusters;
+import org.apache.falcon.entity.v0.feed.ClusterType;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.Location;
 import org.apache.falcon.entity.v0.feed.LocationType;
 import org.apache.falcon.entity.v0.feed.Locations;
 import org.apache.falcon.entity.v0.feed.Property;
 import org.apache.falcon.entity.v0.feed.Retention;
-import org.apache.falcon.entity.v0.feed.RetentionType;
 import org.apache.falcon.entity.v0.feed.Validity;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.log4j.Logger;
 import org.testng.Assert;
 
 import javax.xml.bind.JAXBException;
@@ -51,8 +50,6 @@ import java.util.Map;
 /** Class for representing a feed xml. */
 public class FeedMerlin extends Feed {
 
-    private static final Logger LOGGER = Logger.getLogger(FeedMerlin.class);
-
     public FeedMerlin(String feedData) {
         this((Feed) TestEntityUtil.fromString(EntityType.FEED, feedData));
     }
@@ -60,6 +57,7 @@ public class FeedMerlin extends Feed {
     public FeedMerlin(final Feed feed) {
         try {
             PropertyUtils.copyProperties(this, feed);
+            this.setACL(feed.getACL());
         } catch (IllegalAccessException e) {
             Assert.fail("Can't create ClusterMerlin: " + ExceptionUtils.getStackTrace(e));
         } catch (InvocationTargetException e) {
@@ -72,11 +70,86 @@ public class FeedMerlin extends Feed {
     public static List<FeedMerlin> fromString(List<String> feedStrings) {
         List<FeedMerlin> feeds = new ArrayList<FeedMerlin>();
         for (String feedString : feedStrings) {
-            feeds.add(new FeedMerlin(feedString));
+            feeds.add(fromString(feedString));
         }
         return feeds;
     }
 
+    public static FeedMerlin fromString(String feedString) {
+        return new FeedMerlin(feedString);
+    }
+
+    /** clear clusters of this feed. */
+    public FeedMerlin clearFeedClusters() {
+        getClusters().getClusters().clear();
+        return this;
+    }
+
+    /** add a feed cluster to this feed. */
+    public FeedMerlin addFeedCluster(Cluster cluster) {
+        getClusters().getClusters().add(cluster);
+        return this;
+    }
+
+    /** Fluent builder wrapper for cluster fragment of feed entity . */
+    public static class FeedClusterBuilder {
+        private Cluster cluster = new Cluster();
+
+        public FeedClusterBuilder(String clusterName) {
+            cluster.setName(clusterName);
+        }
+
+        public Cluster build() {
+            Cluster retVal = cluster;
+            cluster = null;
+            return retVal;
+        }
+
+        public FeedClusterBuilder withRetention(String limit, ActionType action) {
+            Retention r = new Retention();
+            r.setLimit(new Frequency(limit));
+            r.setAction(action);
+            cluster.setRetention(r);
+            return this;
+        }
+
+        public FeedClusterBuilder withValidity(String startTime, String endTime) {
+            Validity v = new Validity();
+            v.setStart(TimeUtil.oozieDateToDate(startTime).toDate());
+            v.setEnd(TimeUtil.oozieDateToDate(endTime).toDate());
+            cluster.setValidity(v);
+            return this;
+        }
+
+        public FeedClusterBuilder withClusterType(ClusterType type) {
+            cluster.setType(type);
+            return this;
+        }
+
+        public FeedClusterBuilder withPartition(String partition) {
+            cluster.setPartition(partition);
+            return this;
+        }
+
+        public FeedClusterBuilder withTableUri(String tableUri) {
+            CatalogTable catalogTable = new CatalogTable();
+            catalogTable.setUri(tableUri);
+            cluster.setTable(catalogTable);
+            return this;
+        }
+
+        public FeedClusterBuilder withDataLocation(String dataLocation) {
+            Location oneLocation = new Location();
+            oneLocation.setPath(dataLocation);
+            oneLocation.setType(LocationType.DATA);
+
+            Locations feedLocations = new Locations();
+            feedLocations.getLocations().add(oneLocation);
+            cluster.setLocations(feedLocations);
+            return this;
+        }
+    }
+
     /**
      * Method sets a number of clusters to feed definition.
      *
@@ -87,33 +160,18 @@ public class FeedMerlin extends Feed {
      */
     public void setFeedClusters(List<String> newClusters, String location, String startTime,
                                 String endTime) {
-        Clusters cs = new Clusters();
+        clearFeedClusters();
         setFrequency(new Frequency("" + 5, Frequency.TimeUnit.minutes));
 
         for (String newCluster : newClusters) {
-            Cluster c = new Cluster();
-            c.setName(new ClusterMerlin(newCluster).getName());
-            Location l = new Location();
-            l.setType(LocationType.DATA);
-            l.setPath(location + "/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}");
-            Locations ls = new Locations();
-            ls.getLocations().add(l);
-            c.setLocations(ls);
-            Validity v = new Validity();
-            startTime = TimeUtil.addMinsToTime(startTime, -180);
-            endTime = TimeUtil.addMinsToTime(endTime, 180);
-            v.setStart(TimeUtil.oozieDateToDate(startTime).toDate());
-            v.setEnd(TimeUtil.oozieDateToDate(endTime).toDate());
-            c.setValidity(v);
-            Retention r = new Retention();
-            r.setAction(ActionType.DELETE);
-            Frequency f1 = new Frequency("" + 20, Frequency.TimeUnit.hours);
-            r.setLimit(f1);
-            r.setType(RetentionType.INSTANCE);
-            c.setRetention(r);
-            cs.getClusters().add(c);
+            Cluster feedCluster = new FeedClusterBuilder(new ClusterMerlin(newCluster).getName())
+                .withDataLocation(location + "/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}")
+                .withValidity(TimeUtil.addMinsToTime(startTime, -180),
+                    TimeUtil.addMinsToTime(endTime, 180))
+                .withRetention("hours(20)", ActionType.DELETE)
+                .build();
+            addFeedCluster(feedCluster);
         }
-        setClusters(cs);
     }
 
     public void setRetentionValue(String retentionValue) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/ProcessMerlin.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/ProcessMerlin.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/ProcessMerlin.java
index d81f577..9ffc15d 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/ProcessMerlin.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/ProcessMerlin.java
@@ -25,7 +25,6 @@ import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.process.ACL;
 import org.apache.falcon.entity.v0.process.Cluster;
-import org.apache.falcon.entity.v0.process.Clusters;
 import org.apache.falcon.entity.v0.process.Input;
 import org.apache.falcon.entity.v0.process.Inputs;
 import org.apache.falcon.entity.v0.process.Output;
@@ -33,18 +32,14 @@ import org.apache.falcon.entity.v0.process.Outputs;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.entity.v0.process.Properties;
 import org.apache.falcon.entity.v0.process.Property;
-import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.entity.v0.process.Validity;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.hadoop.fs.FileSystem;
 import org.testng.Assert;
 
 import javax.xml.bind.JAXBException;
 import java.io.StringWriter;
 import java.lang.reflect.InvocationTargetException;
-import java.text.Format;
-import java.text.SimpleDateFormat;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -66,6 +61,45 @@ public class ProcessMerlin extends Process {
         }
     }
 
+    public static ProcessMerlin fromString(String processString) {
+        return new ProcessMerlin(processString);
+    }
+
+
+    public ProcessMerlin clearProcessCluster() {
+        getClusters().getClusters().clear();
+        return this;
+    }
+
+    public ProcessMerlin addProcessCluster(Cluster cluster) {
+        getClusters().getClusters().add(cluster);
+        return this;
+    }
+
+    /** Fluent builder wrapper for cluster fragment of process entity . */
+    public static class ProcessClusterBuilder {
+        private Cluster cluster = new Cluster();
+
+        public ProcessClusterBuilder(String clusterName) {
+            cluster.setName(clusterName);
+        }
+
+        public Cluster build() {
+            Cluster retVal = cluster;
+            cluster = null;
+            return retVal;
+        }
+
+        public ProcessClusterBuilder withValidity(String startTime, String endTime) {
+            Validity v = new Validity();
+            v.setStart(TimeUtil.oozieDateToDate(startTime).toDate());
+            v.setEnd(TimeUtil.oozieDateToDate(endTime).toDate());
+            cluster.setValidity(v);
+            return this;
+        }
+
+    }
+
     /**
      * Method sets a number of clusters to process definition.
      *
@@ -75,50 +109,14 @@ public class ProcessMerlin extends Process {
      * @param endTime end of process validity on every cluster
      */
     public void setProcessClusters(List<String> newClusters, String startTime, String endTime) {
-        Clusters cs =  new Clusters();
+        clearProcessCluster();
         for (String newCluster : newClusters) {
-            Cluster c = new Cluster();
-            c.setName(new ClusterMerlin(newCluster).getName());
-            org.apache.falcon.entity.v0.process.Validity v =
-                new org.apache.falcon.entity.v0.process.Validity();
-            v.setStart(TimeUtil.oozieDateToDate(startTime).toDate());
-            v.setEnd(TimeUtil.oozieDateToDate(endTime).toDate());
-            c.setValidity(v);
-            cs.getClusters().add(c);
-        }
-        setClusters(cs);
-    }
-
-    public Bundle setFeedsToGenerateData(FileSystem fs, Bundle b) {
-        Date start = getClusters().getClusters().get(0).getValidity().getStart();
-        Format formatter = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm'Z'");
-        String startDate = formatter.format(start);
-        Date end = getClusters().getClusters().get(0).getValidity().getEnd();
-        String endDate = formatter.format(end);
-
-        Map<String, FeedMerlin> inpFeeds = getInputFeeds(b);
-        for (FeedMerlin feedElement : inpFeeds.values()) {
-            feedElement.getClusters().getClusters().get(0).getValidity()
-                .setStart(TimeUtil.oozieDateToDate(startDate).toDate());
-            feedElement.getClusters().getClusters().get(0).getValidity()
-                .setEnd(TimeUtil.oozieDateToDate(endDate).toDate());
-            b.writeFeedElement(feedElement, feedElement.getName());
-        }
-        return b;
-    }
-
-    public Map<String, FeedMerlin> getInputFeeds(Bundle b) {
-        Map<String, FeedMerlin> inpFeeds = new HashMap<String, FeedMerlin>();
-        for (Input input : getInputs().getInputs()) {
-            for (String feed : b.getDataSets()) {
-                if (Util.readEntityName(feed).equalsIgnoreCase(input.getFeed())) {
-                    FeedMerlin feedO = new FeedMerlin(feed);
-                    inpFeeds.put(Util.readEntityName(feed), feedO);
-                    break;
-                }
-            }
+            final Cluster processCluster = new ProcessClusterBuilder(
+                new ClusterMerlin(newCluster).getName())
+                .withValidity(startTime, endTime)
+                .build();
+            addProcessCluster(processCluster);
         }
-        return inpFeeds;
     }
 
     public final void setProperty(String name, String value) {
@@ -198,8 +196,8 @@ public class ProcessMerlin extends Process {
      * @param numberOfOutputs number of outputs
      */
     public void setProcessFeeds(List<String> newDataSets,
-                                  int numberOfInputs, int numberOfOptionalInput,
-                                  int numberOfOutputs) {
+                                int numberOfInputs, int numberOfOptionalInput,
+                                int numberOfOutputs) {
         int numberOfOptionalSet = 0;
         boolean isFirst = true;
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MathUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MathUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MathUtil.java
deleted file mode 100644
index f090356..0000000
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MathUtil.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.core.util;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.testng.Assert;
-
-import java.util.Arrays;
-
-/**
- * Util class for math operations.
- */
-public final class MathUtil {
-    private MathUtil() {
-        throw new AssertionError("Instantiating utility class...");
-    }
-
-    /**
-     * Cross product many arrays.
-     * @param firstArray first array that you want to cross product
-     * @param otherArrays other arrays that you want to cross product
-     * @return cross product
-     */
-    public static Object[][] crossProduct(Object[] firstArray, Object[]... otherArrays) {
-        if (otherArrays == null || otherArrays.length == 0) {
-            Object[][] result = new Object[firstArray.length][1];
-            for (int i = 0; i < firstArray.length; ++i) {
-                result[i][0] = firstArray[i];
-            }
-            return result;
-        }
-        // computing cross product for the rest of the arrays
-        Object[][] restArray = new Object[otherArrays.length-1][];
-        System.arraycopy(otherArrays, 1, restArray, 0, otherArrays.length - 1);
-        Object[][] restCrossProduct = crossProduct(otherArrays[0], restArray);
-        //creating and initializing result array
-        Object[][] result = new Object[firstArray.length * restCrossProduct.length][];
-        for(int i = 0; i < result.length; ++i) {
-            result[i] = new Object[otherArrays.length + 1];
-        }
-        //doing the final cross product
-        for (int i = 0; i < firstArray.length; ++i) {
-            for (int j = 0; j < restCrossProduct.length; ++j) {
-                //computing one row of result
-                final int rowIdx = i * restCrossProduct.length + j;
-                result[rowIdx][0] = firstArray[i];
-                System.arraycopy(restCrossProduct[j], 0, result[rowIdx], 1, otherArrays.length);
-            }
-        }
-        return result;
-    }
-
-    public static Object[][] append(Object[][] arr1, Object[][] arr2) {
-        Assert.assertFalse(ArrayUtils.isEmpty(arr1), "arr1 can't be empty:"
-            + Arrays.deepToString(arr1));
-        Assert.assertFalse(ArrayUtils.isEmpty(arr2), "arr2 can't be empty:"
-            + Arrays.deepToString(arr2));
-        Assert.assertEquals(arr1[0].length, arr2[0].length, "Array rows are not compatible. "
-            + "row of first array: " + Arrays.deepToString(arr1[0])
-            + "row of second array: " + Arrays.deepToString(arr2[0]));
-        return (Object[][]) ArrayUtils.addAll(arr1, arr2);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MatrixUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MatrixUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MatrixUtil.java
new file mode 100644
index 0000000..c68dd3c
--- /dev/null
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/MatrixUtil.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression.core.util;
+
+import org.apache.commons.lang.ArrayUtils;
+import org.testng.Assert;
+
+import java.util.Arrays;
+
+/**
+ * Util class for matrix operations.
+ */
+public final class MatrixUtil {
+    private MatrixUtil() {
+        throw new AssertionError("Instantiating utility class...");
+    }
+
+    /**
+     * Cross product many arrays.
+     * @param firstArray first array that you want to cross product
+     * @param otherArrays other arrays that you want to cross product
+     * @return cross product
+     */
+    public static Object[][] crossProduct(Object[] firstArray, Object[]... otherArrays) {
+        if (otherArrays == null || otherArrays.length == 0) {
+            Object[][] result = new Object[firstArray.length][1];
+            for (int i = 0; i < firstArray.length; ++i) {
+                result[i][0] = firstArray[i];
+            }
+            return result;
+        }
+        // computing cross product for the rest of the arrays
+        Object[][] restArray = new Object[otherArrays.length-1][];
+        System.arraycopy(otherArrays, 1, restArray, 0, otherArrays.length - 1);
+        Object[][] restCrossProduct = crossProduct(otherArrays[0], restArray);
+        //creating and initializing result array
+        Object[][] result = new Object[firstArray.length * restCrossProduct.length][];
+        for(int i = 0; i < result.length; ++i) {
+            result[i] = new Object[otherArrays.length + 1];
+        }
+        //doing the final cross product
+        for (int i = 0; i < firstArray.length; ++i) {
+            for (int j = 0; j < restCrossProduct.length; ++j) {
+                //computing one row of result
+                final int rowIdx = i * restCrossProduct.length + j;
+                result[rowIdx][0] = firstArray[i];
+                System.arraycopy(restCrossProduct[j], 0, result[rowIdx], 1, otherArrays.length);
+            }
+        }
+        return result;
+    }
+
+    public static Object[][] append(Object[][] arr1, Object[][] arr2) {
+        Assert.assertFalse(ArrayUtils.isEmpty(arr1), "arr1 can't be empty:"
+            + Arrays.deepToString(arr1));
+        Assert.assertFalse(ArrayUtils.isEmpty(arr2), "arr2 can't be empty:"
+            + Arrays.deepToString(arr2));
+        Assert.assertEquals(arr1[0].length, arr2[0].length, "Array rows are not compatible. "
+            + "row of first array: " + Arrays.deepToString(arr1[0])
+            + "row of second array: " + Arrays.deepToString(arr2[0]));
+        return (Object[][]) ArrayUtils.addAll(arr1, arr2);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/AuthorizationTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/AuthorizationTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/AuthorizationTest.java
index f4f97b6..2a33ac1 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/AuthorizationTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/AuthorizationTest.java
@@ -437,10 +437,8 @@ public class AuthorizationTest extends BaseTestClass {
     // .org/jira/browse/FALCON-388
     @Test(enabled = false)
     public void u1KillSomeU2RerunAllProcessInstances()
-            throws IOException, JAXBException,
-
-            AuthenticationException, URISyntaxException, OozieClientException,
-            InterruptedException {
+        throws IOException, JAXBException, AuthenticationException, URISyntaxException,
+        OozieClientException, InterruptedException {
         String startTime = TimeUtil
             .getTimeWrtSystemTime(0);
         String endTime = TimeUtil.addMinsToTime(startTime, 5);
@@ -503,8 +501,8 @@ public class AuthorizationTest extends BaseTestClass {
     // .org/jira/browse/FALCON-388
     @Test(enabled = false)
     public void u1SubmitU2UpdateFeed()
-            throws URISyntaxException, IOException, AuthenticationException, JAXBException,
-            InterruptedException {
+        throws URISyntaxException, IOException, AuthenticationException, JAXBException,
+        InterruptedException {
         String feed = bundles[0].getInputFeedFromBundle();
         //submit feed
         bundles[0].submitClusters(prism);
@@ -514,8 +512,8 @@ public class AuthorizationTest extends BaseTestClass {
                 .readEntityName(feed)) && !definition.contains("(feed) not found"),
             "Feed should be already submitted");
         //update feed definition
-        String newFeed = Util.setFeedPathValue(feed, baseHDFSDir + "/randomPath" +
-            MINUTE_DATE_PATTERN);
+        String newFeed = Util.setFeedPathValue(feed, baseHDFSDir + "/randomPath"
+            + MINUTE_DATE_PATTERN);
         //try to update feed by U2
         final ServiceResponse serviceResponse = prism.getFeedHelper().update(feed, newFeed,
             TimeUtil.getTimeWrtSystemTime(0),
@@ -534,8 +532,8 @@ public class AuthorizationTest extends BaseTestClass {
         AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
         AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.RUNNING);
         //update feed definition
-        String newFeed = Util.setFeedPathValue(feed, baseHDFSDir + "/randomPath" +
-            MINUTE_DATE_PATTERN);
+        String newFeed = Util.setFeedPathValue(feed, baseHDFSDir + "/randomPath"
+            + MINUTE_DATE_PATTERN);
         //try to update feed by U2
         final ServiceResponse serviceResponse = prism.getFeedHelper().update(feed, newFeed,
             TimeUtil.getTimeWrtSystemTime(0),
@@ -619,8 +617,8 @@ public class AuthorizationTest extends BaseTestClass {
             .getLatestBundleID(cluster, Util.readEntityName(feed), EntityType.FEED);
 
         //update feed definition
-        String newFeed = Util.setFeedPathValue(feed, baseHDFSDir + "/randomPath" +
-            MINUTE_DATE_PATTERN);
+        String newFeed = Util.setFeedPathValue(feed, baseHDFSDir + "/randomPath"
+            + MINUTE_DATE_PATTERN);
 
         //update feed by U1
         serviceResponse = prism.getFeedHelper().update(feed, newFeed,
@@ -657,12 +655,12 @@ public class AuthorizationTest extends BaseTestClass {
         AssertUtil.checkStatus(clusterOC, EntityType.PROCESS, process, Job.Status.RUNNING);
 
         //update feed definition
-        String newFeed = Util.setFeedPathValue(feed,
-        baseHDFSDir + "/randomPath/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}/");
+        String newFeed = Util.setFeedPathValue(feed, baseHDFSDir + "/randomPath"
+            + MINUTE_DATE_PATTERN);
 
         //update feed by U2
         serviceResponse = prism.getFeedHelper().update(feed, newFeed,
-        TimeUtil.getTimeWrtSystemTime(0), MerlinConstants.USER2_NAME);
+            TimeUtil.getTimeWrtSystemTime(0), MerlinConstants.USER2_NAME);
         AssertUtil.assertFailedWithStatus(serviceResponse, HttpStatus.SC_BAD_REQUEST,
                  "Feed scheduled by first user should not be updated by second user");
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExpFutureAndLatestTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExpFutureAndLatestTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExpFutureAndLatestTest.java
new file mode 100644
index 0000000..978d522
--- /dev/null
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExpFutureAndLatestTest.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression;
+
+import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.entity.v0.EntityType;
+import org.apache.falcon.entity.v0.Frequency.TimeUnit;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.falcon.regression.core.util.BundleUtil;
+import org.apache.falcon.regression.core.util.HadoopUtil;
+import org.apache.falcon.regression.core.util.InstanceUtil;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.falcon.regression.core.util.TimeUtil;
+import org.apache.falcon.regression.testHelper.BaseTestClass;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.log4j.Logger;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.OozieClient;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.List;
+
+/**
+ * EL Expression test.
+ */
+@Test(groups = "embedded")
+public class ELExpFutureAndLatestTest extends BaseTestClass {
+
+    private ColoHelper cluster = servers.get(0);
+    private FileSystem clusterFS = serverFS.get(0);
+    private OozieClient clusterOC = serverOC.get(0);
+    private String baseTestDir = baseHDFSDir + "/ELExp_FutureAndLatest";
+    private String aggregateWorkflowDir = baseTestDir + "/aggregator";
+    private static final Logger LOGGER = Logger.getLogger(ELExpFutureAndLatestTest.class);
+
+    @BeforeClass(alwaysRun = true)
+    public void createTestData() throws Exception {
+        LOGGER.info("in @BeforeClass");
+        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
+
+        Bundle b = BundleUtil.readELBundle();
+        b.generateUniqueBundle();
+        b = new Bundle(b, cluster);
+
+        String startDate = TimeUtil.getTimeWrtSystemTime(-20);
+        String endDate = TimeUtil.getTimeWrtSystemTime(70);
+
+        b.setInputFeedDataPath(baseTestDir + "/ELExp_latest/testData" + MINUTE_DATE_PATTERN);
+        b.setProcessWorkflow(aggregateWorkflowDir);
+
+        List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(startDate, endDate, 1);
+
+        HadoopUtil.flattenAndPutDataInFolder(clusterFS, OSUtil.NORMAL_INPUT,
+            b.getFeedDataPathPrefix(), dataDates);
+    }
+
+    @BeforeMethod(alwaysRun = true)
+    public void setUp(Method method) throws Exception {
+        LOGGER.info("test name: " + method.getName());
+        bundles[0] = BundleUtil.readELBundle();
+        bundles[0] = new Bundle(bundles[0], cluster);
+        bundles[0].generateUniqueBundle();
+        bundles[0].setInputFeedDataPath(baseTestDir + "/ELExp_latest/testData"
+            + MINUTE_DATE_PATTERN);
+        bundles[0].setInputFeedPeriodicity(5, TimeUnit.minutes);
+        bundles[0].setInputFeedValidity("2010-04-01T00:00Z", "2015-04-01T00:00Z");
+        String processStart = TimeUtil.getTimeWrtSystemTime(-3);
+        String processEnd = TimeUtil.getTimeWrtSystemTime(8);
+        LOGGER.info("processStart: " + processStart + " processEnd: " + processEnd);
+        bundles[0].setProcessValidity(processStart, processEnd);
+        bundles[0].setProcessPeriodicity(5, TimeUnit.minutes);
+        bundles[0].setProcessWorkflow(aggregateWorkflowDir);
+    }
+
+    @AfterMethod(alwaysRun = true)
+    public void tearDown() {
+        removeBundles();
+    }
+
+    @Test(groups = {"singleCluster"})
+    public void latestTest() throws Exception {
+        bundles[0].setDatasetInstances("latest(-3)", "latest(0)");
+        bundles[0].submitFeedsScheduleProcess(prism);
+        InstanceUtil.waitTillInstanceReachState(clusterOC, bundles[0].getProcessName(), 3,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+    }
+
+    @Test(groups = {"singleCluster"})
+    public void futureTest() throws Exception {
+        bundles[0].setDatasetInstances("future(0,10)", "future(3,10)");
+        bundles[0].submitFeedsScheduleProcess(prism);
+        InstanceUtil.waitTillInstanceReachState(clusterOC, bundles[0].getProcessName(), 3,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+    }
+
+    @AfterClass(alwaysRun = true)
+    public void tearDownClass() throws IOException {
+        cleanTestDirs();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExp_FutureAndLatestTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExp_FutureAndLatestTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExp_FutureAndLatestTest.java
deleted file mode 100644
index 25e2dfe..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELExp_FutureAndLatestTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression;
-
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.Frequency.TimeUnit;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.OSUtil;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.testHelper.BaseTestClass;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.CoordinatorAction;
-import org.apache.oozie.client.OozieClient;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.List;
-
-/**
- * EL Expression test.
- */
-@Test(groups = "embedded")
-public class ELExp_FutureAndLatestTest extends BaseTestClass {
-
-    ColoHelper cluster = servers.get(0);
-    FileSystem clusterFS = serverFS.get(0);
-    OozieClient clusterOC = serverOC.get(0);
-    private String baseTestDir = baseHDFSDir + "/ELExp_FutureAndLatest";
-    private String aggregateWorkflowDir = baseTestDir + "/aggregator";
-    private static final Logger logger = Logger.getLogger(ELExp_FutureAndLatestTest.class);
-
-    @BeforeClass(alwaysRun = true)
-    public void createTestData() throws Exception {
-        logger.info("in @BeforeClass");
-        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
-
-        Bundle b = BundleUtil.readELBundle();
-        b.generateUniqueBundle();
-        b = new Bundle(b, cluster);
-
-        String startDate = TimeUtil.getTimeWrtSystemTime(-20);
-        String endDate = TimeUtil.getTimeWrtSystemTime(70);
-
-        b.setInputFeedDataPath(baseTestDir + "/ELExp_latest/testData" + MINUTE_DATE_PATTERN);
-        b.setProcessWorkflow(aggregateWorkflowDir);
-
-        List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(startDate, endDate, 1);
-
-        HadoopUtil.flattenAndPutDataInFolder(clusterFS, OSUtil.NORMAL_INPUT,
-            b.getFeedDataPathPrefix(), dataDates);
-    }
-
-    @BeforeMethod(alwaysRun = true)
-    public void setUp(Method method) throws Exception {
-        logger.info("test name: " + method.getName());
-        bundles[0] = BundleUtil.readELBundle();
-        bundles[0] = new Bundle(bundles[0], cluster);
-        bundles[0].generateUniqueBundle();
-        bundles[0].setInputFeedDataPath(baseTestDir + "/ELExp_latest/testData" +
-            MINUTE_DATE_PATTERN);
-        bundles[0].setInputFeedPeriodicity(5, TimeUnit.minutes);
-        bundles[0].setInputFeedValidity("2010-04-01T00:00Z", "2015-04-01T00:00Z");
-        String processStart = TimeUtil.getTimeWrtSystemTime(-3);
-        String processEnd = TimeUtil.getTimeWrtSystemTime(8);
-        logger.info("processStart: " + processStart + " processEnd: " + processEnd);
-        bundles[0].setProcessValidity(processStart, processEnd);
-        bundles[0].setProcessPeriodicity(5, TimeUnit.minutes);
-        bundles[0].setProcessWorkflow(aggregateWorkflowDir);
-    }
-
-    @AfterMethod(alwaysRun = true)
-    public void tearDown() {
-        removeBundles();
-    }
-
-    @Test(groups = {"singleCluster"})
-    public void latestTest() throws Exception {
-        bundles[0].setDatasetInstances("latest(-3)", "latest(0)");
-        bundles[0].submitFeedsScheduleProcess(prism);
-        InstanceUtil.waitTillInstanceReachState(clusterOC, bundles[0].getProcessName(), 3,
-            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-    }
-
-    @Test(groups = {"singleCluster"})
-    public void futureTest() throws Exception {
-        bundles[0].setDatasetInstances("future(0,10)", "future(3,10)");
-        bundles[0].submitFeedsScheduleProcess(prism);
-        InstanceUtil.waitTillInstanceReachState(clusterOC, bundles[0].getProcessName(), 3,
-            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-    }
-
-    @AfterClass(alwaysRun = true)
-    public void tearDownClass() throws IOException {
-        cleanTestDirs();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELValidationsTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELValidationsTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELValidationsTest.java
index 7555c0c..8614a9c 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELValidationsTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ELValidationsTest.java
@@ -58,41 +58,41 @@ import java.util.TimeZone;
 @Test(groups = "embedded")
 public class ELValidationsTest extends BaseTestClass {
 
-    ColoHelper cluster = servers.get(0);
-    private static final Logger logger = Logger.getLogger(ELValidationsTest.class);
-    String aggregateWorkflowDir = baseHDFSDir + "/ELTest/aggregator";
+    private ColoHelper cluster = servers.get(0);
+    private static final Logger LOGGER = Logger.getLogger(ELValidationsTest.class);
+    private String aggregateWorkflowDir = baseHDFSDir + "/ELTest/aggregator";
 
     //test for instance when process time line is subset of feed time
     @BeforeMethod(alwaysRun = true)
     public void testName(Method method) {
-        logger.info("test name: " + method.getName());
+        LOGGER.info("test name: " + method.getName());
     }
 
     @Test(groups = {"0.1", "0.2"})
-    public void startInstBeforeFeedStart_today02() throws Exception {
+    public void startInstBeforeFeedStartToday02() throws Exception {
         String response =
-            testWith(prism, cluster, "2009-02-02T20:00Z", "2011-12-31T00:00Z", "2009-02-02T20:00Z",
+            testWith("2009-02-02T20:00Z", "2011-12-31T00:00Z", "2009-02-02T20:00Z",
                 "2011-12-31T00:00Z", "now(-40,0)", "currentYear(20,30,24,20)", false);
         validate(response);
     }
 
     @Test(groups = {"singleCluster"})
     public void startInstAfterFeedEnd() throws Exception {
-        String response = testWith(prism, cluster, null, null, null, null,
+        String response = testWith(null, null, null, null,
             "currentYear(10,0,22,0)", "now(4,20)", false);
         validate(response);
     }
 
     @Test(groups = {"singleCluster"})
     public void bothInstReverse() throws Exception {
-        String response = testWith(prism, cluster, null, null, null, null,
+        String response = testWith(null, null, null, null,
             "now(0,0)", "now(-100,0)", false);
         validate(response);
     }
 
     @Test(groups = {"singleCluster"}, dataProvider = "EL-DP")
-    public void ExpressionLanguageTest(String startInstance, String endInstance) throws Exception {
-        testWith(prism, cluster, null, null, null, null, startInstance, endInstance, true);
+    public void expressionLanguageTest(String startInstance, String endInstance) throws Exception {
+        testWith(null, null, null, null, startInstance, endInstance, true);
     }
 
     @DataProvider(name = "EL-DP")
@@ -105,39 +105,37 @@ public class ELValidationsTest extends BaseTestClass {
             {"currentYear(0,0,22,0)", "currentYear(1,1,22,0)"},
             {"currentMonth(0,22,0)", "currentMonth(1,22,20)"},
             {"lastMonth(30,22,0)", "lastMonth(60,2,40)"},
-            {"lastYear(12,0,22,0)", "lastYear(13,1,22,0)"}
+            {"lastYear(12,0,22,0)", "lastYear(13,1,22,0)"},
         };
     }
 
     private void validate(String response) {
         if ((response.contains("End instance ") || response.contains("Start instance"))
             && (response.contains("for feed") || response.contains("of feed"))
-            && (response.contains("is before the start of feed") ||
-            response.contains("is after the end of feed"))) {
+            && (response.contains("is before the start of feed")
+            || response.contains("is after the end of feed"))) {
             return;
         }
-        if (response.contains("End instance") &&
-            response.contains("is before the start instance")) {
+        if (response.contains("End instance")
+            && response.contains("is before the start instance")) {
             return;
         }
         Assert.fail("Response is not valid");
     }
 
-    private String testWith(ColoHelper prismHelper, ColoHelper server, String feedStart,
+    private String testWith(String feedStart,
                             String feedEnd, String processStart,
                             String processEnd,
                             String startInstance, String endInstance, boolean isMatch)
         throws IOException, JAXBException, ParseException, URISyntaxException {
-        HadoopUtil.uploadDir(server.getClusterHelper().getHadoopFS(),
+        HadoopUtil.uploadDir(cluster.getClusterHelper().getHadoopFS(),
             aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
         Bundle bundle = BundleUtil.readELBundle();
-        bundle = new Bundle(bundle, server.getPrefix());
+        bundle = new Bundle(bundle, cluster.getPrefix());
         bundle.generateUniqueBundle();
         bundle.setProcessWorkflow(aggregateWorkflowDir);
         if (feedStart != null && feedEnd != null) {
-            bundle.setFeedValidity(feedStart, feedEnd,
-                bundle.getInputFeedNameFromBundle
-                        ());
+            bundle.setFeedValidity(feedStart, feedEnd, bundle.getInputFeedNameFromBundle());
         }
         if (processStart != null && processEnd != null) {
             bundle.setProcessValidity(processStart, processEnd);
@@ -145,18 +143,19 @@ public class ELValidationsTest extends BaseTestClass {
         try {
             bundle.setInvalidData();
             bundle.setDatasetInstances(startInstance, endInstance);
-            String submitResponse = bundle.submitFeedsScheduleProcess(prismHelper);
-            logger.info("processData in try is: " + Util.prettyPrintXml(bundle.getProcessData()));
+            String submitResponse = bundle.submitFeedsScheduleProcess(prism);
+            LOGGER.info("processData in try is: " + Util.prettyPrintXml(bundle.getProcessData()));
             TimeUtil.sleepSeconds(45);
-            if (isMatch)
-                getAndMatchDependencies(server, bundle);
+            if (isMatch) {
+                getAndMatchDependencies(cluster, bundle);
+            }
             return submitResponse;
         } catch (Exception e) {
             e.printStackTrace();
             throw new TestNGException(e);
         } finally {
-            logger.info("deleting entity:");
-            bundle.deleteBundle(prismHelper);
+            LOGGER.info("deleting entity:");
+            bundle.deleteBundle(prism);
         }
     }
 
@@ -173,7 +172,7 @@ public class ELValidationsTest extends BaseTestClass {
             }
             Assert.assertTrue(bundles != null && bundles.size() > 0, "Bundle job not created.");
             String coordID = bundles.get(0);
-            logger.info("coord id: " + coordID);
+            LOGGER.info("coord id: " + coordID);
             List<String> missingDependencies =
                 OozieUtil.getMissingDependencies(prismHelper, coordID);
             for (int i = 0; i < 10 && missingDependencies == null; ++i) {
@@ -182,35 +181,36 @@ public class ELValidationsTest extends BaseTestClass {
             }
             Assert.assertNotNull(missingDependencies, "Missing dependencies not found.");
             for (String dependency : missingDependencies) {
-                logger.info("dependency from job: " + dependency);
+                LOGGER.info("dependency from job: " + dependency);
             }
             Date jobNominalTime = OozieUtil.getNominalTime(prismHelper, coordID);
             Calendar time = Calendar.getInstance();
             time.setTime(jobNominalTime);
-            logger.info("nominalTime:" + jobNominalTime);
+            LOGGER.info("nominalTime:" + jobNominalTime);
             SimpleDateFormat df = new SimpleDateFormat("dd MMM yyyy HH:mm:ss");
-            logger.info(
+            LOGGER.info(
                 "nominalTime in GMT string: " + df.format(jobNominalTime.getTime()) + " GMT");
             TimeZone z = time.getTimeZone();
             int offset = z.getRawOffset();
             int offsetHrs = offset / 1000 / 60 / 60;
             int offsetMins = offset / 1000 / 60 % 60;
 
-            logger.info("offset: " + offsetHrs);
-            logger.info("offset: " + offsetMins);
+            LOGGER.info("offset: " + offsetHrs);
+            LOGGER.info("offset: " + offsetMins);
 
             time.add(Calendar.HOUR_OF_DAY, (-offsetHrs));
             time.add(Calendar.MINUTE, (-offsetMins));
 
-            logger.info("GMT Time: " + time.getTime());
+            LOGGER.info("GMT Time: " + time.getTime());
 
             int frequency = bundle.getInitialDatasetFrequency();
             List<String> qaDependencyList =
                 getQADepedencyList(time, bundle.getStartInstanceProcess(time),
                     bundle.getEndInstanceProcess(time),
                     frequency, bundle);
-            for (String qaDependency : qaDependencyList)
-                logger.info("qa qaDependencyList: " + qaDependency);
+            for (String qaDependency : qaDependencyList) {
+                LOGGER.info("qa qaDependencyList: " + qaDependency);
+            }
 
             Assert.assertTrue(matchDependencies(missingDependencies, qaDependencyList));
         } catch (Exception e) {
@@ -219,12 +219,14 @@ public class ELValidationsTest extends BaseTestClass {
         }
     }
 
-    private boolean matchDependencies(List<String> fromJob, List<String> QAList) {
-        if (fromJob.size() != QAList.size())
+    private boolean matchDependencies(List<String> fromJob, List<String> qaList) {
+        if (fromJob.size() != qaList.size()) {
             return false;
+        }
         for (int index = 0; index < fromJob.size(); index++) {
-            if (!fromJob.get(index).contains(QAList.get(index)))
+            if (!fromJob.get(index).contains(qaList.get(index))) {
                 return false;
+            }
         }
         return true;
     }
@@ -232,8 +234,8 @@ public class ELValidationsTest extends BaseTestClass {
     private List<String> getQADepedencyList(Calendar nominalTime, Date startRef,
                                             Date endRef, int frequency,
                                             Bundle bundle) {
-        logger.info("start ref:" + startRef);
-        logger.info("end ref:" + endRef);
+        LOGGER.info("start ref:" + startRef);
+        LOGGER.info("end ref:" + endRef);
         Calendar initialTime = Calendar.getInstance();
         initialTime.setTime(startRef);
         Calendar finalTime = Calendar.getInstance();
@@ -243,11 +245,11 @@ public class ELValidationsTest extends BaseTestClass {
 
         TimeZone tz = TimeZone.getTimeZone("GMT");
         nominalTime.setTimeZone(tz);
-        logger.info("nominalTime: " + initialTime.getTime());
-        logger.info("finalTime: " + finalTime.getTime());
+        LOGGER.info("nominalTime: " + initialTime.getTime());
+        LOGGER.info("finalTime: " + finalTime.getTime());
         List<String> returnList = new ArrayList<String>();
         while (!initialTime.getTime().equals(finalTime.getTime())) {
-            logger.info("initialTime: " + initialTime.getTime());
+            LOGGER.info("initialTime: " + initialTime.getTime());
             returnList.add(getPath(path, initialTime));
             initialTime.add(Calendar.MINUTE, frequency);
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/EmbeddedPigScriptTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/EmbeddedPigScriptTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/EmbeddedPigScriptTest.java
index a5e7ff8..1317ae4 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/EmbeddedPigScriptTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/EmbeddedPigScriptTest.java
@@ -59,21 +59,21 @@ import java.util.List;
 @Test(groups = "embedded")
 public class EmbeddedPigScriptTest extends BaseTestClass {
 
-    ColoHelper cluster = servers.get(0);
-    FileSystem clusterFS = serverFS.get(0);
-    OozieClient clusterOC = serverOC.get(0);
-    String pigTestDir = baseHDFSDir + "/EmbeddedPigScriptTest";
-    String pigScriptDir = pigTestDir + "/EmbeddedPigScriptTest/pig";
-    String pigScriptLocation = pigScriptDir + "/id.pig";
-    String inputPath = pigTestDir + "/input" + MINUTE_DATE_PATTERN;
-    private static final Logger logger = Logger.getLogger(EmbeddedPigScriptTest.class);
+    private ColoHelper cluster = servers.get(0);
+    private FileSystem clusterFS = serverFS.get(0);
+    private OozieClient clusterOC = serverOC.get(0);
+    private String pigTestDir = baseHDFSDir + "/EmbeddedPigScriptTest";
+    private String pigScriptDir = pigTestDir + "/EmbeddedPigScriptTest/pig";
+    private String pigScriptLocation = pigScriptDir + "/id.pig";
+    private String inputPath = pigTestDir + "/input" + MINUTE_DATE_PATTERN;
+    private static final Logger LOGGER = Logger.getLogger(EmbeddedPigScriptTest.class);
     private static final double TIMEOUT = 15;
-    String processName;
-    String process;
+    private String processName;
+    private String process;
 
     @BeforeClass(alwaysRun = true)
     public void createTestData() throws Exception {
-        logger.info("in @BeforeClass");
+        LOGGER.info("in @BeforeClass");
 
         //copy pig script
         HadoopUtil.uploadDir(clusterFS, pigScriptDir, OSUtil.RESOURCES + "pig");
@@ -90,7 +90,7 @@ public class EmbeddedPigScriptTest extends BaseTestClass {
 
     @BeforeMethod(alwaysRun = true)
     public void setUp(Method method) throws Exception {
-        logger.info("test name: " + method.getName());
+        LOGGER.info("test name: " + method.getName());
         bundles[0] = BundleUtil.readELBundle();
         bundles[0] = new Bundle(bundles[0], cluster);
         bundles[0].generateUniqueBundle();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
index 637f8a2..4359b16 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
@@ -7,14 +7,13 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *   http://www.apache.org/licenses/LICENSE-2.0
+ *     http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 
 package org.apache.falcon.regression;
@@ -31,7 +30,7 @@ import org.apache.falcon.regression.core.util.AssertUtil;
 import org.apache.falcon.regression.core.util.BundleUtil;
 import org.apache.falcon.regression.core.util.HadoopUtil;
 import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.MathUtil;
+import org.apache.falcon.regression.core.util.MatrixUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
@@ -61,10 +60,13 @@ import java.io.IOException;
 import java.lang.reflect.Method;
 import java.util.List;
 
+/**
+ * Tests for operations with external file systems.
+ */
 @Test(groups = "embedded")
 public class ExternalFSTest extends BaseTestClass{
 
-    public static final String wasbEndPoint =
+    public static final String WASB_END_POINT =
             "wasb://" + MerlinConstants.WASB_CONTAINER + "@" + MerlinConstants.WASB_ACCOUNT;
     private ColoHelper cluster = servers.get(0);
     private ColoHelper cluster2 = servers.get(1);
@@ -83,7 +85,7 @@ public class ExternalFSTest extends BaseTestClass{
     @BeforeClass
     public void setUpClass() throws IOException {
         Configuration conf = new Configuration();
-        conf.set("fs.defaultFS", wasbEndPoint);
+        conf.set("fs.defaultFS", WASB_END_POINT);
         conf.set("fs.azure.account.key." + MerlinConstants.WASB_ACCOUNT,
                 MerlinConstants.WASB_SECRET);
         wasbFS = FileSystem.get(conf);
@@ -138,8 +140,8 @@ public class ExternalFSTest extends BaseTestClass{
         String startTime = TimeUtil.getTimeWrtSystemTime(0);
         String endTime = TimeUtil.addMinsToTime(startTime, 5);
         LOGGER.info("Time range between : " + startTime + " and " + endTime);
-        String datePattern = StringUtils .join(new String[]{
-                "${YEAR}", "${MONTH}", "${DAY}", "${HOUR}", "${MINUTE}"}, separator);
+        String datePattern = StringUtils .join(
+            new String[]{"${YEAR}", "${MONTH}", "${DAY}", "${HOUR}", "${MINUTE}"}, separator);
 
         //configure feed
         String feed = bundles[0].getDataSets().get(0);
@@ -207,7 +209,7 @@ public class ExternalFSTest extends BaseTestClass{
     @DataProvider
     public Object[][] getData() {
         //"-" for single directory, "/" - for dir with subdirs };
-        return MathUtil.crossProduct(new FileSystem[]{wasbFS},
+        return MatrixUtil.crossProduct(new FileSystem[]{wasbFS},
             new String[]{"/", "-"},
             new Boolean[]{true, false});
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
index 229dccd..c2ae1f9 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
@@ -50,18 +50,18 @@ import java.lang.reflect.Method;
 @Test(groups = "distributed")
 public class FeedClusterUpdateTest extends BaseTestClass {
 
-    String baseTestDir = baseHDFSDir + "/FeedClusterUpdateTest";
-    String aggregateWorkflowDir = baseTestDir + "/aggregator";
-    ColoHelper cluster1 = servers.get(0);
-    ColoHelper cluster2 = servers.get(1);
-    ColoHelper cluster3 = servers.get(2);
-    FileSystem cluster2FS = serverFS.get(1);
-    FileSystem cluster3FS = serverFS.get(2);
+    private String baseTestDir = baseHDFSDir + "/FeedClusterUpdateTest";
+    private String aggregateWorkflowDir = baseTestDir + "/aggregator";
+    private ColoHelper cluster1 = servers.get(0);
+    private ColoHelper cluster2 = servers.get(1);
+    private ColoHelper cluster3 = servers.get(2);
+    private FileSystem cluster2FS = serverFS.get(1);
+    private FileSystem cluster3FS = serverFS.get(2);
     private String feed;
-    String startTime;
-    String feedOriginalSubmit;
-    String feedUpdated;
-    private static final Logger logger = Logger.getLogger(FeedClusterUpdateTest.class);
+    private String startTime;
+    private String feedOriginalSubmit;
+    private String feedUpdated;
+    private static final Logger LOGGER = Logger.getLogger(FeedClusterUpdateTest.class);
 
 
     @BeforeClass(alwaysRun = true)
@@ -87,7 +87,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
 
     @BeforeMethod(alwaysRun = true)
     public void setup(Method method) throws Exception {
-        logger.info("test name: " + method.getName());
+        LOGGER.info("test name: " + method.getName());
 
         Bundle bundle = BundleUtil.readELBundle();
         for (int i = 0; i < 3; i++) {
@@ -123,7 +123,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             XmlUtil.createRetention("hours(10)", ActionType.DELETE),
             Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
         ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit);
         TimeUtil.sleepSeconds(10);
@@ -140,16 +140,14 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 "REPLICATION"), 0);
         Assert.assertEquals(InstanceUtil
             .checkIfFeedCoordExist(cluster2.getFeedHelper(),
-                Util.readEntityName(feedOriginalSubmit), "RETENTION" +
-                    ""), 1);
+                Util.readEntityName(feedOriginalSubmit), "RETENTION"), 1);
         Assert.assertEquals(InstanceUtil
             .checkIfFeedCoordExist(cluster3.getFeedHelper(),
                 Util.readEntityName(feedOriginalSubmit),
                 "REPLICATION"), 0);
         Assert.assertEquals(InstanceUtil
                 .checkIfFeedCoordExist(cluster3.getFeedHelper(),
-                    Util.readEntityName(feedOriginalSubmit), "RETENTION"),
-            0);
+                    Util.readEntityName(feedOriginalSubmit), "RETENTION"), 0);
         Assert.assertEquals(InstanceUtil
             .checkIfFeedCoordExist(cluster1.getFeedHelper(),
                 Util.readEntityName(feedOriginalSubmit),
@@ -219,7 +217,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
         ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit);
         TimeUtil.sleepSeconds(10);
@@ -274,7 +272,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
+        LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
         response = prism.getFeedHelper().update(feedUpdated, feedUpdated);
         TimeUtil.sleepSeconds(20);
@@ -312,7 +310,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
                 null);
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
         ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit);
         TimeUtil.sleepSeconds(10);
@@ -367,7 +365,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
+        LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
         response = prism.getFeedHelper().update(feedUpdated, feedUpdated);
         TimeUtil.sleepSeconds(20);
@@ -405,7 +403,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
                 null);
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
         ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit);
         TimeUtil.sleepSeconds(10);
@@ -459,7 +457,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             XmlUtil.createRetention("hours(10)", ActionType.DELETE),
             Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.TARGET, null);
 
-        logger.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
+        LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
         response = prism.getFeedHelper().update(feedUpdated, feedUpdated);
         TimeUtil.sleepSeconds(20);
@@ -497,7 +495,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
                 null);
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
         ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit);
         TimeUtil.sleepSeconds(10);
@@ -552,7 +550,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
+        LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
         response = prism.getFeedHelper().update(feedUpdated, feedUpdated);
         TimeUtil.sleepSeconds(20);
@@ -601,7 +599,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
         ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit);
         TimeUtil.sleepSeconds(10);
@@ -619,8 +617,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 "REPLICATION"), 0);
         Assert.assertEquals(InstanceUtil
             .checkIfFeedCoordExist(cluster2.getFeedHelper(),
-                Util.readEntityName(feedOriginalSubmit), "RETENTION" +
-                    ""), 1);
+                Util.readEntityName(feedOriginalSubmit), "RETENTION"), 1);
         Assert.assertEquals(InstanceUtil
             .checkIfFeedCoordExist(cluster3.getFeedHelper(),
                 Util.readEntityName(feedOriginalSubmit),
@@ -730,7 +727,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
         ServiceResponse response = prism.getFeedHelper().submitEntity(feedOriginalSubmit);
         TimeUtil.sleepSeconds(10);
@@ -781,7 +778,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("Feed: " + Util.prettyPrintXml(feedUpdated));
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feedUpdated));
 
         response = prism.getFeedHelper().update(feedUpdated, feedUpdated);
         TimeUtil.sleepSeconds(20);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
index 15659d2..650d4c3 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
@@ -54,11 +54,11 @@ public class FeedInstanceStatusTest extends BaseTestClass {
     private String feedInputPath = baseTestDir + MINUTE_DATE_PATTERN;
     private String aggregateWorkflowDir = baseTestDir + "/aggregator";
 
-    ColoHelper cluster2 = servers.get(1);
-    ColoHelper cluster3 = servers.get(2);
-    FileSystem cluster2FS = serverFS.get(1);
-    FileSystem cluster3FS = serverFS.get(2);
-    private static final Logger logger = Logger.getLogger(FeedInstanceStatusTest.class);
+    private ColoHelper cluster2 = servers.get(1);
+    private ColoHelper cluster3 = servers.get(2);
+    private FileSystem cluster2FS = serverFS.get(1);
+    private FileSystem cluster3FS = serverFS.get(2);
+    private static final Logger LOGGER = Logger.getLogger(FeedInstanceStatusTest.class);
 
     @BeforeClass(alwaysRun = true)
     public void uploadWorkflow() throws Exception {
@@ -67,7 +67,7 @@ public class FeedInstanceStatusTest extends BaseTestClass {
 
     @BeforeMethod(alwaysRun = true)
     public void testName(Method method) throws Exception {
-        logger.info("test name: " + method.getName());
+        LOGGER.info("test name: " + method.getName());
         Bundle bundle = BundleUtil.readELBundle();
         for (int i = 0; i < 3; i++) {
             bundles[i] = new Bundle(bundle, servers.get(i));
@@ -87,18 +87,18 @@ public class FeedInstanceStatusTest extends BaseTestClass {
      * -submit, -resume, -kill, -rerun.
      */
     @Test(groups = {"multiCluster"})
-    public void feedInstanceStatus_running() throws Exception {
+    public void feedInstanceStatusRunning() throws Exception {
         bundles[0].setInputFeedDataPath(feedInputPath);
 
-        logger.info("cluster bundle1: " + Util.prettyPrintXml(bundles[0].getClusters().get(0)));
+        LOGGER.info("cluster bundle1: " + Util.prettyPrintXml(bundles[0].getClusters().get(0)));
         AssertUtil.assertSucceeded(prism.getClusterHelper()
             .submitEntity(bundles[0].getClusters().get(0)));
 
-        logger.info("cluster bundle2: " + Util.prettyPrintXml(bundles[1].getClusters().get(0)));
+        LOGGER.info("cluster bundle2: " + Util.prettyPrintXml(bundles[1].getClusters().get(0)));
         AssertUtil.assertSucceeded(prism.getClusterHelper()
             .submitEntity(bundles[1].getClusters().get(0)));
 
-        logger.info("cluster bundle3: " + Util.prettyPrintXml(bundles[2].getClusters().get(0)));
+        LOGGER.info("cluster bundle3: " + Util.prettyPrintXml(bundles[2].getClusters().get(0)));
         AssertUtil.assertSucceeded(prism.getClusterHelper()
             .submitEntity(bundles[2].getClusters().get(0)));
 
@@ -127,7 +127,7 @@ public class FeedInstanceStatusTest extends BaseTestClass {
             Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
             "UK/${cluster.colo}");
 
-        logger.info("feed: " + Util.prettyPrintXml(feed));
+        LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
         //status before submit
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
@@ -147,15 +147,12 @@ public class FeedInstanceStatusTest extends BaseTestClass {
         // single instance at -30
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 20));
-
         //single at -10
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 40));
-
         //single at 10
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 40));
-
         //single at 30
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 40));
@@ -177,20 +174,17 @@ public class FeedInstanceStatusTest extends BaseTestClass {
         // single instance at -30
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 20));
-
         //single at -10
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 40));
-
         //single at 10
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 40));
-
         //single at 30
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + TimeUtil.addMinsToTime(startTime, 40));
 
-        logger.info("Wait till feed goes into running ");
+        LOGGER.info("Wait till feed goes into running ");
 
         //suspend instances -10
         prism.getFeedHelper().getProcessInstanceSuspend(feedName,
@@ -240,7 +234,7 @@ public class FeedInstanceStatusTest extends BaseTestClass {
         InstancesResult responseInstance = prism.getFeedHelper().getProcessInstanceStatus(feedName,
             "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 110));
 
-        logger.info(responseInstance.getMessage());
+        LOGGER.info(responseInstance.getMessage());
     }
 
     @AfterClass(alwaysRun = true)


Mime
View raw message