falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rostafiyc...@apache.org
Subject [6/8] incubator-falcon git commit: FALCON-698 Fix checkstyle bugs in test files in falcon-regression. Contributed by Ruslan Ostafiychuk and Raghav Kumar Gautam
Date Fri, 28 Nov 2014 13:10:50 GMT
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLateRerunTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLateRerunTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLateRerunTest.java
index 3f7258e..488cf74 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLateRerunTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLateRerunTest.java
@@ -40,17 +40,17 @@ import java.lang.reflect.Method;
 import java.util.*;
 
 /**
- * Process late data test
+ * Process late data test.
  */
 
 public class ProcessLateRerunTest extends BaseTestClass {
 
 
-    ColoHelper cluster1 = servers.get(0);
-    OozieClient cluster1OC = serverOC.get(0);
-    FileSystem cluster1FS = serverFS.get(0);
-    String aggregateWorkflowDir = baseHDFSDir + "/ProcessLateRerunTest/aggregator";
-    private static final Logger logger = Logger.getLogger(ProcessLateRerunTest.class);
+    private ColoHelper cluster1 = servers.get(0);
+    private OozieClient cluster1OC = serverOC.get(0);
+    private FileSystem cluster1FS = serverFS.get(0);
+    private String aggregateWorkflowDir = baseHDFSDir + "/ProcessLateRerunTest/aggregator";
+    private static final Logger LOGGER = Logger.getLogger(ProcessLateRerunTest.class);
 
     @BeforeClass(alwaysRun = true)
     public void uploadWorkflow() throws Exception {
@@ -59,7 +59,7 @@ public class ProcessLateRerunTest extends BaseTestClass {
 
     @BeforeMethod(alwaysRun = true)
     public void setUp(Method method) throws Exception {
-        logger.info("test name: " + method.getName());
+        LOGGER.info("test name: " + method.getName());
         Bundle bundle = BundleUtil.readLateDataBundle();
         for (int i = 0; i < 1; i++) {
             bundles[i] = new Bundle(bundle, servers.get(i));
@@ -84,7 +84,7 @@ public class ProcessLateRerunTest extends BaseTestClass {
     public void testProcessLateRerunOnEmptyFolder() throws Exception {
         String startTime = TimeUtil.getTimeWrtSystemTime(0);
         String endTime = TimeUtil.addMinsToTime(startTime, 30);
-        logger.info("Time range between : " + startTime + " and " + endTime);
+        LOGGER.info("Time range between : " + startTime + " and " + endTime);
         bundles[0].setProcessValidity(startTime, endTime);
         bundles[0].setProcessPeriodicity(10, Frequency.TimeUnit.minutes);
         bundles[0].setOutputFeedPeriodicity(10, Frequency.TimeUnit.minutes);
@@ -99,20 +99,20 @@ public class ProcessLateRerunTest extends BaseTestClass {
         TimeUtil.sleepSeconds(10);
         InstanceUtil.waitTillInstancesAreCreated(cluster1, bundles[0].getProcessData(), 0);
 
-        getAndCreateDependencies(cluster1,bundles[0],cluster1OC,cluster1FS,false,1);
+        getAndCreateDependencies(cluster1, bundles[0], cluster1OC, cluster1FS, false, 1);
 
         int sleepMins = 6;
-        for(int i=0; i < sleepMins ; i++) {
-            logger.info("Waiting...");
+        for(int i=0; i < sleepMins; i++) {
+            LOGGER.info("Waiting...");
             TimeUtil.sleepSeconds(60);
         }
 
         InstanceUtil.waitTillInstanceReachState(cluster1OC,
-                Util.getProcessName(bundles[0].getProcessData()), 1,
-                CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), 1,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
 
         List<String> bundleList =  OozieUtil.getBundles(cluster1.getFeedHelper().getOozieClient(),
-                Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
         String bundleID = bundleList.get(0);
 
         OozieUtil.validateRetryAttempts(cluster1, bundleID, EntityType.PROCESS, 1);
@@ -128,7 +128,7 @@ public class ProcessLateRerunTest extends BaseTestClass {
     public void testProcessLateRerunWithData() throws Exception {
         String startTime = TimeUtil.getTimeWrtSystemTime(0);
         String endTime = TimeUtil.addMinsToTime(startTime, 30);
-        logger.info("Time range between : " + startTime + " and " + endTime);
+        LOGGER.info("Time range between : " + startTime + " and " + endTime);
         bundles[0].setProcessValidity(startTime, endTime);
         bundles[0].setProcessPeriodicity(5, Frequency.TimeUnit.minutes);
         bundles[0].setOutputFeedPeriodicity(5, Frequency.TimeUnit.minutes);
@@ -137,26 +137,26 @@ public class ProcessLateRerunTest extends BaseTestClass {
         ProcessMerlin processMerlin = new ProcessMerlin(bundles[0].getProcessData());
         String inputName = processMerlin.getInputs().getInputs().get(0).getName();
 
-        bundles[0].setProcessLatePolicy(getLateData(4,"minutes","periodic",inputName,aggregateWorkflowDir));
+        bundles[0].setProcessLatePolicy(getLateData(4, "minutes", "periodic", inputName, aggregateWorkflowDir));
         bundles[0].submitAndScheduleProcess();
         AssertUtil.checkStatus(cluster1OC, EntityType.PROCESS, bundles[0], Job.Status.RUNNING);
         TimeUtil.sleepSeconds(10);
         InstanceUtil.waitTillInstancesAreCreated(cluster1, bundles[0].getProcessData(), 0);
 
-        getAndCreateDependencies(cluster1,bundles[0],cluster1OC,cluster1FS,true,1);
+        getAndCreateDependencies(cluster1, bundles[0], cluster1OC, cluster1FS, true, 1);
 
         int sleepMins = 6;
-        for(int i=0; i < sleepMins ; i++) {
-            logger.info("Waiting...");
+        for(int i=0; i < sleepMins; i++) {
+            LOGGER.info("Waiting...");
             TimeUtil.sleepSeconds(60);
         }
 
         InstanceUtil.waitTillInstanceReachState(cluster1OC,
-                Util.getProcessName(bundles[0].getProcessData()), 1,
-                CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), 1,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
 
         List<String> bundleList =  OozieUtil.getBundles(cluster1.getFeedHelper().getOozieClient(),
-                Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
         String bundleID = bundleList.get(0);
 
         OozieUtil.validateRetryAttempts(cluster1, bundleID, EntityType.PROCESS, 1);
@@ -173,14 +173,14 @@ public class ProcessLateRerunTest extends BaseTestClass {
         String endTime = TimeUtil.addMinsToTime(startTime, 30);
         String startInstance = "now(0,-5)";
         String endInstance = "now(0,0)";
-        logger.info("Time range between : " + startTime + " and " + endTime);
+        LOGGER.info("Time range between : " + startTime + " and " + endTime);
         bundles[0].setProcessValidity(startTime, endTime);
         bundles[0].setProcessPeriodicity(10, Frequency.TimeUnit.minutes);
         bundles[0].setOutputFeedPeriodicity(10, Frequency.TimeUnit.minutes);
         ProcessMerlin processMerlin = new ProcessMerlin(bundles[0].getProcessData());
         String inputName = processMerlin.getInputs().getInputs().get(0).getName();
 
-        bundles[0].setProcessLatePolicy(getLateData(4,"minutes","periodic",inputName,aggregateWorkflowDir));
+        bundles[0].setProcessLatePolicy(getLateData(4, "minutes", "periodic", inputName, aggregateWorkflowDir));
         bundles[0].setProcessConcurrency(2);
 
         // Increase the window of input for process
@@ -191,20 +191,20 @@ public class ProcessLateRerunTest extends BaseTestClass {
         TimeUtil.sleepSeconds(10);
         InstanceUtil.waitTillInstancesAreCreated(cluster1, bundles[0].getProcessData(), 0);
 
-        getAndCreateDependencies(cluster1,bundles[0],cluster1OC,cluster1FS,false,3);
+        getAndCreateDependencies(cluster1, bundles[0], cluster1OC, cluster1FS, false, 3);
 
         int sleepMins = 6;
-        for(int i=0; i < sleepMins ; i++) {
-            logger.info("Waiting...");
+        for(int i=0; i < sleepMins; i++) {
+            LOGGER.info("Waiting...");
             TimeUtil.sleepSeconds(60);
         }
 
         InstanceUtil.waitTillInstanceReachState(cluster1OC,
-                Util.getProcessName(bundles[0].getProcessData()), 1,
-                CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), 1,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
 
         List<String> bundleList =  OozieUtil.getBundles(cluster1.getFeedHelper().getOozieClient(),
-                Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
         String bundleID = bundleList.get(0);
 
         OozieUtil.validateRetryAttempts(cluster1, bundleID, EntityType.PROCESS, 1);
@@ -221,7 +221,7 @@ public class ProcessLateRerunTest extends BaseTestClass {
         String endTime = TimeUtil.addMinsToTime(startTime, 30);
         String startInstance = "now(0,-5)";
         String endInstance = "now(0,0)";
-        logger.info("Time range between : " + startTime + " and " + endTime);
+        LOGGER.info("Time range between : " + startTime + " and " + endTime);
         bundles[0].setProcessValidity(startTime, endTime);
         bundles[0].setProcessPeriodicity(10, Frequency.TimeUnit.minutes);
         bundles[0].setOutputFeedPeriodicity(10, Frequency.TimeUnit.minutes);
@@ -250,20 +250,20 @@ public class ProcessLateRerunTest extends BaseTestClass {
         TimeUtil.sleepSeconds(10);
         InstanceUtil.waitTillInstancesAreCreated(cluster1, bundles[0].getProcessData(), 0);
 
-        getAndCreateDependencies(cluster1,bundles[0],cluster1OC,cluster1FS,false,7);
+        getAndCreateDependencies(cluster1, bundles[0], cluster1OC, cluster1FS, false, 7);
 
         int sleepMins = 6;
-        for(int i=0; i < sleepMins ; i++) {
-            logger.info("Waiting...");
+        for(int i=0; i < sleepMins; i++) {
+            LOGGER.info("Waiting...");
             TimeUtil.sleepSeconds(60);
         }
 
         InstanceUtil.waitTillInstanceReachState(cluster1OC,
-                Util.getProcessName(bundles[0].getProcessData()), 1,
-                CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), 1,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
 
         List<String> bundleList =  OozieUtil.getBundles(cluster1.getFeedHelper().getOozieClient(),
-                Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
+            Util.getProcessName(bundles[0].getProcessData()), EntityType.PROCESS);
         String bundleID = bundleList.get(0);
 
         OozieUtil.validateRetryAttempts(cluster1, bundleID, EntityType.PROCESS, 0);
@@ -275,13 +275,14 @@ public class ProcessLateRerunTest extends BaseTestClass {
     dataFolder - denotes the folder where you want to upload data for late rerun
      */
 
-    private void getAndCreateDependencies(ColoHelper prismHelper, Bundle bundle, OozieClient oozieClient, FileSystem clusterFS, boolean dataFlag, int dataFolder) {
-
+    private void getAndCreateDependencies(ColoHelper prismHelper, Bundle bundle,
+                                          OozieClient oozieClient, FileSystem clusterFS,
+                                          boolean dataFlag, int dataFolder) {
         try {
             List<String> bundles = null;
             for (int i = 0; i < 10; ++i) {
                 bundles = OozieUtil.getBundles(prismHelper.getFeedHelper().getOozieClient(),
-                        Util.getProcessName(bundle.getProcessData()), EntityType.PROCESS);
+                    Util.getProcessName(bundle.getProcessData()), EntityType.PROCESS);
                 if (bundles.size() > 0) {
                     break;
                 }
@@ -289,7 +290,7 @@ public class ProcessLateRerunTest extends BaseTestClass {
             }
             Assert.assertTrue(bundles != null && bundles.size() > 0, "Bundle job not created.");
             String bundleID = bundles.get(0);
-            logger.info("bundle id: " + bundleID);
+            LOGGER.info("bundle id: " + bundleID);
             List<String> missingDependencies = OozieUtil.getMissingDependencies(prismHelper, bundleID);
             for (int i = 0; i < 10 && missingDependencies == null; ++i) {
                 TimeUtil.sleepSeconds(30);
@@ -299,19 +300,20 @@ public class ProcessLateRerunTest extends BaseTestClass {
 
             //print missing dependencies
             for (String dependency : missingDependencies) {
-                logger.info("dependency from job: " + dependency);
+                LOGGER.info("dependency from job: " + dependency);
             }
 
             //create missing dependencies
-            logger.info("Creating missing dependencies...");
-            OozieUtil.createMissingDependencies(prismHelper, EntityType.PROCESS, Util.getProcessName(bundle.getProcessData()), 0, 0);
+            LOGGER.info("Creating missing dependencies...");
+            OozieUtil.createMissingDependencies(prismHelper, EntityType.PROCESS,
+                Util.getProcessName(bundle.getProcessData()), 0, 0);
 
             //Adding data to empty folders depending on dataFlag
-            if(dataFlag) {
+            if (dataFlag) {
                 int tempCount = 1;
                 for (String location : missingDependencies) {
-                    if(tempCount==1) {
-                        logger.info("Transferring data to : " + location);
+                    if (tempCount==1) {
+                        LOGGER.info("Transferring data to : " + location);
                         HadoopUtil.copyDataToFolder(clusterFS, location, OSUtil.RESOURCES + "feed-s4Replication.xml");
                         tempCount++;
                     }
@@ -319,18 +321,18 @@ public class ProcessLateRerunTest extends BaseTestClass {
             }
 
             //Process succeeding on empty folders
-            logger.info("Waiting for process to succeed...");
+            LOGGER.info("Waiting for process to succeed...");
             InstanceUtil.waitTillInstanceReachState(oozieClient,
-                    Util.getProcessName(bundle.getProcessData()), 1,
-                    CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+                Util.getProcessName(bundle.getProcessData()), 1,
+                CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
 
             TimeUtil.sleepSeconds(30);
 
             //Adding data to check late rerun
             int tempCounter = 1;
             for (String dependency : missingDependencies) {
-                if(tempCounter==dataFolder) {
-                    logger.info("Transferring late data to : " + dependency);
+                if (tempCounter==dataFolder) {
+                    LOGGER.info("Transferring late data to : " + dependency);
                     HadoopUtil.copyDataToFolder(clusterFS, dependency, OSUtil.RESOURCES + "log4j.properties");
                 }
                 tempCounter++;
@@ -342,7 +344,8 @@ public class ProcessLateRerunTest extends BaseTestClass {
         }
     }
 
-    private static LateProcess getLateData(int delay, String delayUnits, String retryType, String inputData, String workflowDir) {
+    private static LateProcess getLateData(int delay, String delayUnits, String retryType,
+                                           String inputData, String workflowDir) {
         LateInput lateInput = new LateInput();
         lateInput.setInput(inputData);
         lateInput.setWorkflowPath(workflowDir);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLibPathTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLibPathTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLibPathTest.java
index c2d8c9b..6b1dec3 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLibPathTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ProcessLibPathTest.java
@@ -44,22 +44,22 @@ import java.lang.reflect.Method;
 import java.util.List;
 
 /**
- * Tests with process lib folder detached from workflow.xml
+ * Tests with process lib folder detached from workflow.xml.
  */
 @Test(groups = "embedded")
 public class ProcessLibPathTest extends BaseTestClass {
 
-    ColoHelper cluster = servers.get(0);
-    FileSystem clusterFS = serverFS.get(0);
-    String testDir = baseHDFSDir + "/ProcessLibPath";
-    String testLibDir = testDir + "/TestLib";
-    private static final Logger logger = Logger.getLogger(ProcessLibPathTest.class);
-    String processName;
-    String process;
+    private ColoHelper cluster = servers.get(0);
+    private FileSystem clusterFS = serverFS.get(0);
+    private String testDir = baseHDFSDir + "/ProcessLibPath";
+    private String testLibDir = testDir + "/TestLib";
+    private static final Logger LOGGER = Logger.getLogger(ProcessLibPathTest.class);
+    private String processName;
+    private String process;
 
     @BeforeClass(alwaysRun = true)
     public void createTestData() throws Exception {
-        logger.info("in @BeforeClass");
+        LOGGER.info("in @BeforeClass");
 
         //common lib for both test cases
         HadoopUtil.uploadDir(clusterFS, testLibDir, OSUtil.RESOURCES_OOZIE + "lib");
@@ -77,7 +77,7 @@ public class ProcessLibPathTest extends BaseTestClass {
 
     @BeforeMethod(alwaysRun = true)
     public void testName(Method method) throws Exception {
-        logger.info("test name: " + method.getName());
+        LOGGER.info("test name: " + method.getName());
         bundles[0] = BundleUtil.readELBundle();
         bundles[0] = new Bundle(bundles[0], cluster);
         bundles[0].generateUniqueBundle();
@@ -98,7 +98,7 @@ public class ProcessLibPathTest extends BaseTestClass {
     }
 
     /**
-     * Test which test a process with no lib folder in workflow location
+     * Test which test a process with no lib folder in workflow location.
      *
      * @throws Exception
      */
@@ -108,7 +108,7 @@ public class ProcessLibPathTest extends BaseTestClass {
         HadoopUtil.uploadDir(clusterFS, workflowDir, OSUtil.RESOURCES_OOZIE);
         HadoopUtil.deleteDirIfExists(workflowDir + "/lib", clusterFS);
         bundles[0].setProcessWorkflow(workflowDir);
-        logger.info("processData: " + Util.prettyPrintXml(process));
+        LOGGER.info("processData: " + Util.prettyPrintXml(process));
         bundles[0].submitFeedsScheduleProcess(prism);
         InstanceUtil.waitTillInstancesAreCreated(cluster, process, 0);
         OozieUtil.createMissingDependencies(cluster, EntityType.PROCESS, processName, 0);
@@ -116,7 +116,7 @@ public class ProcessLibPathTest extends BaseTestClass {
     }
 
     /**
-     * Test which test a process with wrong jar in lib folder in workflow location
+     * Test which test a process with wrong jar in lib folder in workflow location.
      *
      * @throws Exception
      */
@@ -128,7 +128,7 @@ public class ProcessLibPathTest extends BaseTestClass {
         HadoopUtil.copyDataToFolder(clusterFS, workflowDir + "/lib",
             OSUtil.RESOURCES + "ivory-oozie-lib-0.1.jar");
         bundles[0].setProcessWorkflow(workflowDir);
-        logger.info("processData: " + Util.prettyPrintXml(process));
+        LOGGER.info("processData: " + Util.prettyPrintXml(process));
         bundles[0].submitFeedsScheduleProcess(prism);
         InstanceUtil.waitTillInstancesAreCreated(cluster, process, 0);
         OozieUtil.createMissingDependencies(cluster, EntityType.PROCESS, processName, 0);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
index 9de179b..b1a2393 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
@@ -39,25 +39,28 @@ import java.io.File;
 import java.io.IOException;
 import java.util.Arrays;
 
+/**
+ * A listener for test running.
+ */
 public class TestngListener implements ITestListener, IExecutionListener {
-    private static final Logger logger = Logger.getLogger(TestngListener.class);
+    private static final Logger LOGGER = Logger.getLogger(TestngListener.class);
     private final String hr = StringUtils.repeat("-", 100);
 
     @Override
     public void onTestStart(ITestResult result) {
-        logger.info(hr);
-        logger.info(
+        LOGGER.info(hr);
+        LOGGER.info(
             String.format("Testing going to start for: %s.%s(%s)", result.getTestClass().getName(),
                 result.getName(), Arrays.toString(result.getParameters())));
         NDC.push(result.getName());
     }
 
     private void logEndOfTest(ITestResult result, String outcome) {
-        logger.info(
+        LOGGER.info(
             String.format("Testing going to end for: %s.%s(%s) %s", result.getTestClass().getName(),
                 result.getName(), Arrays.toString(result.getParameters()), outcome));
         NDC.pop();
-        logger.info(hr);
+        LOGGER.info(hr);
     }
 
     @Override
@@ -76,12 +79,12 @@ public class TestngListener implements ITestListener, IExecutionListener {
                         result.getTestClass().getRealClass().getSimpleName(), result.getName()));
                 FileUtils.writeByteArrayToFile(new File(filename), scrFile);
             } catch (IOException e) {
-                logger.info("Saving screenshot FAILED: " + e.getCause());
+                LOGGER.info("Saving screenshot FAILED: " + e.getCause());
             }
         }
 
-        logger.info(ExceptionUtils.getStackTrace(result.getThrowable()));
-        logger.info(hr);
+        LOGGER.info(ExceptionUtils.getStackTrace(result.getThrowable()));
+        LOGGER.info(hr);
     }
 
     @Override
@@ -109,7 +112,7 @@ public class TestngListener implements ITestListener, IExecutionListener {
     @Override
     public void onExecutionFinish() {
         if (!Config.getBoolean("log.capture.oozie", false)) {
-            logger.info("oozie log capturing is disabled");
+            LOGGER.info("oozie log capturing is disabled");
             return;
         }
         final String logLocation = Config.getProperty("log.capture.location", "./");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/entity/ListEntitiesTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/entity/ListEntitiesTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/entity/ListEntitiesTest.java
index 51c38cc..1586f76 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/entity/ListEntitiesTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/entity/ListEntitiesTest.java
@@ -72,8 +72,8 @@ public class ListEntitiesTest extends BaseTestClass {
      */
     @BeforeClass(alwaysRun = true)
     public void prepareData()
-            throws IOException, AuthenticationException, JAXBException, URISyntaxException,
-            InterruptedException {
+        throws IOException, AuthenticationException, JAXBException, URISyntaxException,
+        InterruptedException {
         uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
         CleanupUtil.cleanAllEntities(prism);
 
@@ -121,7 +121,7 @@ public class ListEntitiesTest extends BaseTestClass {
      */
     @Test(dataProvider = "getHelpers")
     public void listEntitiesWithOrderBy(IEntityManagerHelper helper)
-            throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
 
         EntityElement[] entities =
             helper.listAllEntities("orderBy=name", null).getEntityList().getElements();
@@ -136,7 +136,7 @@ public class ListEntitiesTest extends BaseTestClass {
      */
     @Test(dataProvider = "getHelpers")
     public void listEntitiesWithFilterByStatus(IEntityManagerHelper helper)
-            throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
         String[] statuses = helper.getEntityType().equalsIgnoreCase("cluster")
             ? new String[]{"SUBMITTED"} : new String[]{"SUBMITTED", "RUNNING"};
 
@@ -170,7 +170,7 @@ public class ListEntitiesTest extends BaseTestClass {
      */
     @Test(dataProvider = "getHelpers")
     public void listEntitiesWithOffset(IEntityManagerHelper helper)
-            throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
 
         EntityElement[] allEntities =
             helper.listAllEntities(null, null).getEntityList().getElements();
@@ -199,7 +199,7 @@ public class ListEntitiesTest extends BaseTestClass {
      */
     @Test(dataProvider = "getHelpers")
     public void listEntitiesWithNumResults(IEntityManagerHelper helper)
-            throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
 
         EntityElement[] allEntities =
             helper.listAllEntities(null, null).getEntityList().getElements();
@@ -223,7 +223,7 @@ public class ListEntitiesTest extends BaseTestClass {
      */
     @Test(dataProvider = "getHelpers")
     public void listEntitiesWithTags(IEntityManagerHelper helper)
-            throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
 
         EntityElement[] allEntities =
             helper.listAllEntities("fields=tags", null).getEntityList().getElements();
@@ -259,7 +259,7 @@ public class ListEntitiesTest extends BaseTestClass {
      */
     @Test(dataProvider = "getHelpers")
     public void listEntitiesWithCustomFilter(IEntityManagerHelper helper)
-            throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
 
         EntityElement[] entities = helper.listEntities(
             "numResults=2&fields=status,tags&filterBy=STATUS:SUBMITTED&orderBy=name&tags=" + tags[2],

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
index f01f30e..2a4a9c1 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
@@ -55,15 +55,18 @@ import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.List;
 
+/**
+ * Tests with operations with hcat feed.
+ */
 public class HCatFeedOperationsTest extends BaseTestClass {
 
-    ColoHelper cluster = servers.get(0);
-    OozieClient clusterOC = serverOC.get(0);
-    HCatClient clusterHC;
+    private ColoHelper cluster = servers.get(0);
+    private OozieClient clusterOC = serverOC.get(0);
+    private HCatClient clusterHC;
 
-    ColoHelper cluster2 = servers.get(1);
-    OozieClient cluster2OC = serverOC.get(1);
-    HCatClient cluster2HC;
+    private ColoHelper cluster2 = servers.get(1);
+    private OozieClient cluster2OC = serverOC.get(1);
+    private HCatClient cluster2HC;
 
     private String dbName = "default";
     private String tableName = "hcatFeedOperationsTest";
@@ -184,8 +187,9 @@ public class HCatFeedOperationsTest extends BaseTestClass {
     }
 
     /**
-     * Submit Hcat Replication feed when Hcat table mentioned in table uri exists on both source and target. The response is
-     * Psucceeded, and a replication co-rdinator should apear on target oozie. The test however does not ensure that
+     * Submit Hcat Replication feed when Hcat table mentioned in table uri exists on both source and target.
+     * The response is  Psucceeded, and a replication co-rdinator should apear on target oozie.
+     * The test however does not ensure that
      * replication goes through.
      *
      * @throws Exception
@@ -255,7 +259,8 @@ public class HCatFeedOperationsTest extends BaseTestClass {
     }
 
 
-    public static void createEmptyTable(HCatClient cli, String dbName, String tabName, List<HCatFieldSchema> partitionCols) throws HCatException{
+    public static void createEmptyTable(HCatClient cli, String dbName, String tabName,
+                                        List<HCatFieldSchema> partitionCols) throws HCatException{
 
         ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
         cols.add(HCatUtil.getStringSchema("id", "id comment"));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatProcessTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatProcessTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatProcessTest.java
index 66372e9..fbf856e 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatProcessTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatProcessTest.java
@@ -62,47 +62,50 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
+/**
+ * Tests falcon processes that run hive scripts.
+ */
 @Test(groups = "embedded")
 public class HCatProcessTest extends BaseTestClass {
-    private static final Logger logger = Logger.getLogger(HCatProcessTest.class);
-    ColoHelper cluster = servers.get(0);
-    FileSystem clusterFS = serverFS.get(0);
-    OozieClient clusterOC = serverOC.get(0);
-    HCatClient clusterHC;
-
-    final String testDir = "/HCatProcessTest";
-    final String baseTestHDFSDir = baseHDFSDir + testDir;
-    String hiveScriptDir = baseTestHDFSDir + "/hive";
-    String hiveScriptFile = hiveScriptDir + "/script.hql";
-    String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator";
-    String hiveScriptFileNonHCatInput = hiveScriptDir + "/script_non_hcat_input.hql";
-    String hiveScriptFileNonHCatOutput = hiveScriptDir + "/script_non_hcat_output.hql";
-    String hiveScriptTwoHCatInputOneHCatOutput =
+    private static final Logger LOGGER = Logger.getLogger(HCatProcessTest.class);
+    private final ColoHelper cluster = servers.get(0);
+    private final FileSystem clusterFS = serverFS.get(0);
+    private final OozieClient clusterOC = serverOC.get(0);
+    private HCatClient clusterHC;
+
+    private final String testDir = "/HCatProcessTest";
+    private final String baseTestHDFSDir = baseHDFSDir + testDir;
+    private final String hiveScriptDir = baseTestHDFSDir + "/hive";
+    private final String hiveScriptFile = hiveScriptDir + "/script.hql";
+    private final String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator";
+    private final String hiveScriptFileNonHCatInput = hiveScriptDir + "/script_non_hcat_input.hql";
+    private final String hiveScriptFileNonHCatOutput =
+        hiveScriptDir + "/script_non_hcat_output.hql";
+    private final String hiveScriptTwoHCatInputOneHCatOutput =
         hiveScriptDir + "/script_two_hcat_input_one_hcat_output.hql";
-    String hiveScriptOneHCatInputTwoHCatOutput =
+    private final String hiveScriptOneHCatInputTwoHCatOutput =
         hiveScriptDir + "/script_one_hcat_input_two_hcat_output.hql";
-    String hiveScriptTwoHCatInputTwoHCatOutput =
+    private final String hiveScriptTwoHCatInputTwoHCatOutput =
         hiveScriptDir + "/script_two_hcat_input_two_hcat_output.hql";
-    final String inputHDFSDir = baseTestHDFSDir + "/input";
-    final String inputHDFSDir2 = baseTestHDFSDir + "/input2";
-    final String outputHDFSDir = baseTestHDFSDir + "/output";
-    final String outputHDFSDir2 = baseTestHDFSDir + "/output2";
-
-    final String dbName = "default";
-    final String inputTableName = "hcatprocesstest_input_table";
-    final String inputTableName2 = "hcatprocesstest_input_table2";
-    final String outputTableName = "hcatprocesstest_output_table";
-    final String outputTableName2 = "hcatprocesstest_output_table2";
-    public static final String col1Name = "id";
-    public static final String col2Name = "value";
-    public static final String partitionColumn = "dt";
-
-    private static final String hcatDir = OSUtil.getPath("src", "test", "resources", "hcat");
-    private static final String localHCatData = OSUtil.getPath(hcatDir, "data");
-    private static final String hiveScript = OSUtil.getPath(hcatDir, "hivescript");
-    final String startDate = "2010-01-01T20:00Z";
-    final String endDate = "2010-01-01T21:10Z";
+    private final String inputHDFSDir = baseTestHDFSDir + "/input";
+    private final String inputHDFSDir2 = baseTestHDFSDir + "/input2";
+    private final String outputHDFSDir = baseTestHDFSDir + "/output";
+    private final String outputHDFSDir2 = baseTestHDFSDir + "/output2";
+
+    private final String dbName = "default";
+    private final String inputTableName = "hcatprocesstest_input_table";
+    private final String inputTableName2 = "hcatprocesstest_input_table2";
+    private final String outputTableName = "hcatprocesstest_output_table";
+    private final String outputTableName2 = "hcatprocesstest_output_table2";
+    private final String col1Name = "id";
+    private final String col2Name = "value";
+    private final String partitionColumn = "dt";
+
+    private final String hcatDir = OSUtil.getPath("src", "test", "resources", "hcat");
+    private final String localHCatData = OSUtil.getPath(hcatDir, "data");
+    private final String hiveScript = OSUtil.getPath(hcatDir, "hivescript");
+    private final String startDate = "2010-01-01T20:00Z";
+    private final String endDate = "2010-01-01T21:10Z";
 
     @BeforeMethod(alwaysRun = true)
     public void setUp() throws Exception {
@@ -129,11 +132,11 @@ public class HCatProcessTest extends BaseTestClass {
     public String[][] generateSeparators() {
         //disabling till FALCON-372 is fixed
         //return new String[][] {{"-"}, {"/"}};
-        return new String[][]{{"-"},};
+        return new String[][]{{"-", }, };
     }
 
     @Test(dataProvider = "generateSeparators")
-    public void OneHCatInputOneHCatOutput(String separator) throws Exception {
+    public void oneHCatInputOneHCatOutput(String separator) throws Exception {
         /* upload data and create partition */
         final String datePattern =
             StringUtils.join(new String[]{"yyyy", "MM", "dd", "HH"}, separator);
@@ -149,20 +152,20 @@ public class HCatProcessTest extends BaseTestClass {
 
         partitionCols.add(HCatUtil.getStringSchema(partitionColumn, partitionColumn + " partition"));
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, inputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(inputHDFSDir)
-            .build());
+                .create(dbName, inputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(inputHDFSDir)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, outputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(outputHDFSDir)
-            .build());
+                .create(dbName, outputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(outputHDFSDir)
+                .build());
 
         addPartitionsToTable(dataDates, dataset, "dt", dbName, inputTableName);
 
@@ -193,7 +196,7 @@ public class HCatProcessTest extends BaseTestClass {
     }
 
     @Test(dataProvider = "generateSeparators")
-    public void TwoHCatInputOneHCatOutput(String separator) throws Exception {
+    public void twoHCatInputOneHCatOutput(String separator) throws Exception {
         /* upload data and create partition */
         final String datePattern =
             StringUtils.join(new String[]{"yyyy", "MM", "dd", "HH"}, separator);
@@ -211,28 +214,28 @@ public class HCatProcessTest extends BaseTestClass {
 
         partitionCols.add(HCatUtil.getStringSchema(partitionColumn, partitionColumn + " partition"));
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, inputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(inputHDFSDir)
-            .build());
+                .create(dbName, inputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(inputHDFSDir)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, inputTableName2, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(inputHDFSDir2)
-            .build());
+                .create(dbName, inputTableName2, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(inputHDFSDir2)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, outputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(outputHDFSDir)
-            .build());
+                .create(dbName, outputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(outputHDFSDir)
+                .build());
 
         addPartitionsToTable(dataDates, dataset, "dt", dbName, inputTableName);
         addPartitionsToTable(dataDates, dataset2, "dt", dbName, inputTableName2);
@@ -278,16 +281,16 @@ public class HCatProcessTest extends BaseTestClass {
             clusterFS.getContentSummary(new Path(inputHDFSDir2 + "/" + dataDates.get(0)));
         final ContentSummary outputContentSummary =
             clusterFS.getContentSummary(new Path(outputHDFSDir + "/dt=" + dataDates.get(0)));
-        logger.info("inputContentSummary = " + inputContentSummary.toString(false));
-        logger.info("inputContentSummary2 = " + inputContentSummary2.toString(false));
-        logger.info("outputContentSummary = " + outputContentSummary.toString(false));
+        LOGGER.info("inputContentSummary = " + inputContentSummary.toString(false));
+        LOGGER.info("inputContentSummary2 = " + inputContentSummary2.toString(false));
+        LOGGER.info("outputContentSummary = " + outputContentSummary.toString(false));
         Assert.assertEquals(inputContentSummary.getLength() + inputContentSummary2.getLength(),
             outputContentSummary.getLength(),
             "Unexpected size of the output.");
     }
 
     @Test(dataProvider = "generateSeparators")
-    public void OneHCatInputTwoHCatOutput(String separator) throws Exception {
+    public void oneHCatInputTwoHCatOutput(String separator) throws Exception {
         /* upload data and create partition */
         final String datePattern =
             StringUtils.join(new String[]{"yyyy", "MM", "dd", "HH"}, separator);
@@ -303,28 +306,28 @@ public class HCatProcessTest extends BaseTestClass {
 
         partitionCols.add(HCatUtil.getStringSchema(partitionColumn, partitionColumn + " partition"));
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, inputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(inputHDFSDir)
-            .build());
+                .create(dbName, inputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(inputHDFSDir)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, outputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(outputHDFSDir)
-            .build());
+                .create(dbName, outputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(outputHDFSDir)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, outputTableName2, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(outputHDFSDir2)
-            .build());
+                .create(dbName, outputTableName2, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(outputHDFSDir2)
+                .build());
 
         addPartitionsToTable(dataDates, dataset, "dt", dbName, inputTableName);
 
@@ -367,7 +370,7 @@ public class HCatProcessTest extends BaseTestClass {
 
 
     @Test(dataProvider = "generateSeparators")
-    public void TwoHCatInputTwoHCatOutput(String separator) throws Exception {
+    public void twoHCatInputTwoHCatOutput(String separator) throws Exception {
         /* upload data and create partition */
         final String datePattern =
             StringUtils.join(new String[]{"yyyy", "MM", "dd", "HH"}, separator);
@@ -385,36 +388,36 @@ public class HCatProcessTest extends BaseTestClass {
 
         partitionCols.add(HCatUtil.getStringSchema(partitionColumn, partitionColumn + " partition"));
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, inputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(inputHDFSDir)
-            .build());
+                .create(dbName, inputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(inputHDFSDir)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, inputTableName2, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(inputHDFSDir2)
-            .build());
+                .create(dbName, inputTableName2, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(inputHDFSDir2)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, outputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(outputHDFSDir)
-            .build());
+                .create(dbName, outputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(outputHDFSDir)
+                .build());
 
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, outputTableName2, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(outputHDFSDir2)
-            .build());
+                .create(dbName, outputTableName2, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(outputHDFSDir2)
+                .build());
 
         addPartitionsToTable(dataDates, dataset, "dt", dbName, inputTableName);
         addPartitionsToTable(dataDates, dataset2, "dt", dbName, inputTableName2);
@@ -468,10 +471,10 @@ public class HCatProcessTest extends BaseTestClass {
             clusterFS.getContentSummary(new Path(outputHDFSDir + "/dt=" + dataDates.get(0)));
         final ContentSummary outputContentSummary2 =
             clusterFS.getContentSummary(new Path(outputHDFSDir2 + "/dt=" + dataDates.get(0)));
-        logger.info("inputContentSummary = " + inputContentSummary.toString(false));
-        logger.info("inputContentSummary2 = " + inputContentSummary2.toString(false));
-        logger.info("outputContentSummary = " + outputContentSummary.toString(false));
-        logger.info("outputContentSummary2 = " + outputContentSummary2.toString(false));
+        LOGGER.info("inputContentSummary = " + inputContentSummary.toString(false));
+        LOGGER.info("inputContentSummary2 = " + inputContentSummary2.toString(false));
+        LOGGER.info("outputContentSummary = " + outputContentSummary.toString(false));
+        LOGGER.info("outputContentSummary2 = " + outputContentSummary2.toString(false));
         Assert.assertEquals(inputContentSummary.getLength() + inputContentSummary2.getLength(),
             outputContentSummary.getLength(),
             "Unexpected size of the output.");
@@ -482,7 +485,7 @@ public class HCatProcessTest extends BaseTestClass {
 
 
     @Test(dataProvider = "generateSeparators")
-    public void OneHCatInputOneNonHCatOutput(String separator) throws Exception {
+    public void oneHCatInputOneNonHCatOutput(String separator) throws Exception {
         /* upload data and create partition */
         final String datePattern =
             StringUtils.join(new String[]{"yyyy", "MM", "dd", "HH"}, separator);
@@ -498,12 +501,12 @@ public class HCatProcessTest extends BaseTestClass {
 
         partitionCols.add(HCatUtil.getStringSchema(partitionColumn, partitionColumn + " partition"));
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, inputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(inputHDFSDir)
-            .build());
+                .create(dbName, inputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(inputHDFSDir)
+                .build());
 
         addPartitionsToTable(dataDates, dataset, "dt", dbName, inputTableName);
 
@@ -523,8 +526,8 @@ public class HCatProcessTest extends BaseTestClass {
         Assert.assertEquals(clusterNames.size(), 1, "Expected only one cluster in the bundle.");
         nonHCatFeed = Util.setClusterNameInFeed(nonHCatFeed, clusterNames.get(0), 0);
         bundles[0].writeFeedElement(nonHCatFeed, outputFeedName);
-        bundles[0].setOutputFeedLocationData(outputHDFSDir + "/" +
-            StringUtils.join(new String[]{"${YEAR}", "${MONTH}", "${DAY}", "${HOUR}"}, separator));
+        bundles[0].setOutputFeedLocationData(outputHDFSDir + "/"
+            + StringUtils.join(new String[]{"${YEAR}", "${MONTH}", "${DAY}", "${HOUR}"}, separator));
         bundles[0].setOutputFeedPeriodicity(1, Frequency.TimeUnit.hours);
         bundles[0].setOutputFeedValidity(startDate, endDate);
 
@@ -544,7 +547,7 @@ public class HCatProcessTest extends BaseTestClass {
     }
 
     @Test(dataProvider = "generateSeparators")
-    public void OneNonCatInputOneHCatOutput(String separator) throws Exception {
+    public void oneNonCatInputOneHCatOutput(String separator) throws Exception {
         /* upload data and create partition */
         final String datePattern =
             StringUtils.join(new String[]{"yyyy", "MM", "dd", "HH"}, separator);
@@ -560,12 +563,12 @@ public class HCatProcessTest extends BaseTestClass {
 
         partitionCols.add(HCatUtil.getStringSchema(partitionColumn, partitionColumn + " partition"));
         clusterHC.createTable(HCatCreateTableDesc
-            .create(dbName, outputTableName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(outputHDFSDir)
-            .build());
+                .create(dbName, outputTableName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(outputHDFSDir)
+                .build());
 
         String nonHCatFeed = BundleUtil.readELBundle().getInputFeedFromBundle();
         final String inputFeedName = bundles[0].getInputFeedNameFromBundle();
@@ -574,8 +577,8 @@ public class HCatProcessTest extends BaseTestClass {
         Assert.assertEquals(clusterNames.size(), 1, "Expected only one cluster in the bundle.");
         nonHCatFeed = Util.setClusterNameInFeed(nonHCatFeed, clusterNames.get(0), 0);
         bundles[0].writeFeedElement(nonHCatFeed, inputFeedName);
-        bundles[0].setInputFeedDataPath(inputHDFSDir + "/" +
-            StringUtils.join(new String[]{"${YEAR}", "${MONTH}", "${DAY}", "${HOUR}"}, separator));
+        bundles[0].setInputFeedDataPath(inputHDFSDir + "/"
+            + StringUtils.join(new String[]{"${YEAR}", "${MONTH}", "${DAY}", "${HOUR}"}, separator));
         bundles[0].setInputFeedPeriodicity(1, Frequency.TimeUnit.hours);
         bundles[0].setInputFeedValidity(startDate, endDate);
 
@@ -603,7 +606,7 @@ public class HCatProcessTest extends BaseTestClass {
 
     private void addPartitionsToTable(List<String> partitions, List<String> partitionLocations,
                                       String partitionCol,
-                                      String dbName, String tableName) throws HCatException {
+                                      String databaseName, String tableName) throws HCatException {
         Assert.assertEquals(partitions.size(), partitionLocations.size(),
             "Number of locations is not same as number of partitions.");
         final List<HCatAddPartitionDesc> partitionDesc = new ArrayList<HCatAddPartitionDesc>();
@@ -613,7 +616,7 @@ public class HCatProcessTest extends BaseTestClass {
             onePartition.put(partitionCol, partition);
             final String partitionLoc = partitionLocations.get(i);
             partitionDesc.add(
-                HCatAddPartitionDesc.create(dbName, tableName, partitionLoc, onePartition).build());
+                HCatAddPartitionDesc.create(databaseName, tableName, partitionLoc, onePartition).build());
         }
         clusterHC.addPartitions(partitionDesc);
     }
@@ -623,8 +626,8 @@ public class HCatProcessTest extends BaseTestClass {
         DateTime startDateJoda = new DateTime(TimeUtil.oozieDateToDate(startDate));
         DateTime endDateJoda = new DateTime(TimeUtil.oozieDateToDate(endDate));
         DateTimeFormatter formatter = DateTimeFormat.forPattern(datePattern);
-        logger.info("generating data between " + formatter.print(startDateJoda) + " and " +
-            formatter.print(endDateJoda));
+        LOGGER.info("generating data between " + formatter.print(startDateJoda) + " and "
+            + formatter.print(endDateJoda));
         List<String> dates = new ArrayList<String>();
         dates.add(formatter.print(startDateJoda));
         while (!startDateJoda.isAfter(endDateJoda)) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
index fce0bf5..af1a751 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
@@ -61,28 +61,31 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+/**
+ * Tests for replication with hcat.
+ */
 @Test(groups = "embedded")
 public class HCatReplicationTest extends BaseTestClass {
 
-    private static final Logger logger = Logger.getLogger(HCatReplicationTest.class);
-    ColoHelper cluster = servers.get(0);
-    FileSystem clusterFS = serverFS.get(0);
-    HCatClient clusterHC;
+    private static final Logger LOGGER = Logger.getLogger(HCatReplicationTest.class);
+    private ColoHelper cluster = servers.get(0);
+    private FileSystem clusterFS = serverFS.get(0);
+    private HCatClient clusterHC;
 
-    ColoHelper cluster2 = servers.get(1);
-    FileSystem cluster2FS = serverFS.get(1);
-    OozieClient cluster2OC = serverOC.get(1);
-    HCatClient cluster2HC;
+    private ColoHelper cluster2 = servers.get(1);
+    private FileSystem cluster2FS = serverFS.get(1);
+    private OozieClient cluster2OC = serverOC.get(1);
+    private HCatClient cluster2HC;
 
-    ColoHelper cluster3 = servers.get(2);
-    FileSystem cluster3FS = serverFS.get(2);
-    OozieClient cluster3OC = serverOC.get(2);
-    HCatClient cluster3HC;
+    private ColoHelper cluster3 = servers.get(2);
+    private FileSystem cluster3FS = serverFS.get(2);
+    private OozieClient cluster3OC = serverOC.get(2);
+    private HCatClient cluster3HC;
 
-    final String baseTestHDFSDir = baseHDFSDir + "/HCatReplicationTest";
+    private final String baseTestHDFSDir = baseHDFSDir + "/HCatReplicationTest";
 
-    final String dbName = "default";
-    private static final String localHCatData = OSUtil.getPath(OSUtil.RESOURCES, "hcat", "data");
+    private final String dbName = "default";
+    private final String localHCatData = OSUtil.getPath(OSUtil.RESOURCES, "hcat", "data");
     private static final double TIMEOUT = 15;
 
     @BeforeClass(alwaysRun = true)
@@ -118,7 +121,7 @@ public class HCatReplicationTest extends BaseTestClass {
     public String[][] generateSeparators() {
         //disabling till FALCON-372 is fixed
         //return new String[][] {{"-"}, {"/"}};
-        return new String[][]{{"-"},};
+        return new String[][]{{"-"}};
     }
 
     // make sure oozie changes mentioned FALCON-389 are done on the clusters. Otherwise the test
@@ -197,10 +200,10 @@ public class HCatReplicationTest extends BaseTestClass {
         //check if data was replicated correctly
         List<Path> cluster1ReplicatedData = HadoopUtil
             .getAllFilesRecursivelyHDFS(clusterFS, new Path(testHdfsDir));
-        logger.info("Data on source cluster: " + cluster1ReplicatedData);
+        LOGGER.info("Data on source cluster: " + cluster1ReplicatedData);
         List<Path> cluster2ReplicatedData = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster2FS, new Path(testHdfsDir));
-        logger.info("Data on target cluster: " + cluster2ReplicatedData);
+        LOGGER.info("Data on target cluster: " + cluster2ReplicatedData);
         AssertUtil.checkForListSizes(cluster1ReplicatedData, cluster2ReplicatedData);
 
     }
@@ -299,24 +302,21 @@ public class HCatReplicationTest extends BaseTestClass {
         //check if data was replicated correctly
         List<Path> srcData = HadoopUtil
             .getAllFilesRecursivelyHDFS(clusterFS, new Path(testHdfsDir));
-        logger.info("Data on source cluster: " + srcData);
+        LOGGER.info("Data on source cluster: " + srcData);
         List<Path> cluster2TargetData = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster2FS, new Path(testHdfsDir));
-        logger.info("Data on target cluster: " + cluster2TargetData);
+        LOGGER.info("Data on target cluster: " + cluster2TargetData);
         AssertUtil.checkForListSizes(srcData, cluster2TargetData);
         List<Path> cluster3TargetData = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster3FS, new Path(testHdfsDir));
-        logger.info("Data on target cluster: " + cluster3TargetData);
+        LOGGER.info("Data on target cluster: " + cluster3TargetData);
         AssertUtil.checkForListSizes(srcData, cluster3TargetData);
     }
 
-    //TODO: More tests need to be added such as
-    // Tests to make sure new partitions that are added are replicated
-    // Tests to make sure partitions that do no match the pattern are not copied
 
     private void addPartitionsToTable(List<String> partitions, List<String> partitionLocations,
                                       String partitionCol,
-                                      String dbName, String tableName, HCatClient hc) throws
+                                      String databaseName, String tableName, HCatClient hc) throws
         HCatException {
         Assert.assertEquals(partitions.size(), partitionLocations.size(),
             "Number of locations is not same as number of partitions.");
@@ -327,10 +327,10 @@ public class HCatReplicationTest extends BaseTestClass {
             onePartition.put(partitionCol, partition);
             final String partitionLoc = partitionLocations.get(i);
             partitionDesc
-                .add(HCatAddPartitionDesc.create(dbName, tableName, partitionLoc, onePartition)
+                .add(HCatAddPartitionDesc.create(databaseName, tableName, partitionLoc, onePartition)
                     .build());
         }
-        logger.info("adding partitions: " + partitionDesc);
+        LOGGER.info("adding partitions: " + partitionDesc);
         hc.addPartitions(partitionDesc);
     }
 
@@ -339,12 +339,12 @@ public class HCatReplicationTest extends BaseTestClass {
                                     String hdfsDir) throws HCatException {
         hcatClient.dropTable(dbName, tblName, true);
         hcatClient.createTable(HCatCreateTableDesc
-            .create(dbName, tblName, cols)
-            .partCols(partitionCols)
-            .ifNotExists(true)
-            .isTableExternal(true)
-            .location(hdfsDir)
-            .build());
+                .create(dbName, tblName, cols)
+                .partCols(partitionCols)
+                .ifNotExists(true)
+                .isTableExternal(true)
+                .location(hdfsDir)
+                .build());
     }
 
     @AfterMethod(alwaysRun = true)

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatRetentionTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatRetentionTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatRetentionTest.java
index db40931..c3699e2 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatRetentionTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatRetentionTest.java
@@ -27,7 +27,7 @@ import org.apache.falcon.regression.core.helpers.ColoHelper;
 import org.apache.falcon.regression.core.util.AssertUtil;
 import org.apache.falcon.regression.core.util.BundleUtil;
 import org.apache.falcon.regression.core.util.HCatUtil;
-import org.apache.falcon.regression.core.util.MathUtil;
+import org.apache.falcon.regression.core.util.MatrixUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.OozieUtil;
 import org.apache.falcon.regression.core.util.HadoopUtil;
@@ -60,19 +60,20 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+/** Test feed retention with hcat feeds. */
 public class HCatRetentionTest extends BaseTestClass {
 
-    private static final Logger logger = Logger.getLogger(HCatRetentionTest.class);
+    private static final Logger LOGGER = Logger.getLogger(HCatRetentionTest.class);
 
     private Bundle bundle;
-    public static HCatClient cli;
-    final String testDir = "/HCatRetentionTest/";
-    final String baseTestHDFSDir = baseHDFSDir + testDir;
-    final String dBName = "default";
-    final ColoHelper cluster = servers.get(0);
-    final FileSystem clusterFS = serverFS.get(0);
-    final OozieClient clusterOC = serverOC.get(0);
-    String tableName;
+    private static HCatClient cli;
+    private final String testDir = "/HCatRetentionTest/";
+    private final String baseTestHDFSDir = baseHDFSDir + testDir;
+    private final String dBName = "default";
+    private final ColoHelper cluster = servers.get(0);
+    private final FileSystem clusterFS = serverFS.get(0);
+    private final OozieClient clusterOC = serverOC.get(0);
+    private String tableName;
 
     @BeforeMethod(alwaysRun = true)
     public void setUp() throws Exception {
@@ -115,7 +116,7 @@ public class HCatRetentionTest extends BaseTestClass {
             final List<DateTime> dataDates =
                 TimeUtil.getDatesOnEitherSide(dataStartTime, dataEndTime, freqType);
             final List<String> dataDateStrings = TimeUtil.convertDatesToString(dataDates,
-                    freqType.getFormatter());
+                freqType.getFormatter());
             AssertUtil.checkForListSizes(dataDates, dataDateStrings);
             final List<String> dataFolders = HadoopUtil.flattenAndPutDataInFolder(clusterFS,
                 OSUtil.OOZIE_EXAMPLE_INPUT_LATE_INPUT, baseTestHDFSDir, dataDateStrings);
@@ -133,19 +134,19 @@ public class HCatRetentionTest extends BaseTestClass {
             AssertUtil.assertSucceeded(prism.getFeedHelper().suspend(feedElement.toString()));
 
             List<String> expectedOutput = getExpectedOutput(retentionPeriod, retentionUnit,
-                    freqType, new DateTime(DateTimeZone.UTC), initialData);
+                freqType, new DateTime(DateTimeZone.UTC), initialData);
             List<String> finalData = getHadoopDataFromDir(clusterFS, baseTestHDFSDir, testDir,
-                    freqType);
+                freqType);
             List<HCatPartition> finalPtnList = cli.getPartitions(dBName, tableName);
 
-            logger.info("checking expectedOutput and finalPtnList");
+            LOGGER.info("checking expectedOutput and finalPtnList");
             AssertUtil.checkForListSizes(expectedOutput, finalPtnList);
-            logger.info("checking expectedOutput and finalData");
+            LOGGER.info("checking expectedOutput and finalData");
             AssertUtil.checkForListSizes(expectedOutput, finalData);
-            logger.info("finalData = " + finalData);
-            logger.info("expectedOutput = " + expectedOutput);
+            LOGGER.info("finalData = " + finalData);
+            LOGGER.info("expectedOutput = " + expectedOutput);
             Assert.assertTrue(Arrays.deepEquals(finalData.toArray(new String[finalData.size()]),
-                    expectedOutput.toArray(new String[expectedOutput.size()])),
+                expectedOutput.toArray(new String[expectedOutput.size()])),
                 "expectedOutput and finalData don't match");
         }
     }
@@ -170,7 +171,7 @@ public class HCatRetentionTest extends BaseTestClass {
     }
 
     /**
-     * Get the expected output after retention is applied
+     * Get the expected output after retention is applied.
      *
      * @param retentionPeriod retention period
      * @param retentionUnit   retention unit
@@ -210,22 +211,18 @@ public class HCatRetentionTest extends BaseTestClass {
         cols.add(HCatUtil.getStringSchema("value", "value comment"));
 
         switch (dataType) {
-            case MINUTELY:
-                ptnCols.add(
-                    HCatUtil.getStringSchema("minute", "min prt"));
-            case HOURLY:
-                ptnCols.add(
-                    HCatUtil.getStringSchema("hour", "hour prt"));
-            case DAILY:
-                ptnCols.add(HCatUtil.getStringSchema("day", "day prt"));
-            case MONTHLY:
-                ptnCols.add(
-                    HCatUtil.getStringSchema("month", "month prt"));
-            case YEARLY:
-                ptnCols.add(
-                    HCatUtil.getStringSchema("year", "year prt"));
-            default:
-                break;
+        case MINUTELY:
+            ptnCols.add(HCatUtil.getStringSchema("minute", "min prt"));
+        case HOURLY:
+            ptnCols.add(HCatUtil.getStringSchema("hour", "hour prt"));
+        case DAILY:
+            ptnCols.add(HCatUtil.getStringSchema("day", "day prt"));
+        case MONTHLY:
+            ptnCols.add(HCatUtil.getStringSchema("month", "month prt"));
+        case YEARLY:
+            ptnCols.add(HCatUtil.getStringSchema("year", "year prt"));
+        default:
+            break;
         }
         HCatCreateTableDesc tableDesc = HCatCreateTableDesc
             .create(dbName, tableName, cols)
@@ -250,19 +247,19 @@ public class HCatRetentionTest extends BaseTestClass {
             final String dataFolder = dataFolders.get(i);
             final DateTime dataDate = dataDates.get(i);
             switch (freqType) {
-                case MINUTELY:
-                    ptn.put("minute", "" + dataDate.getMinuteOfHour());
-                case HOURLY:
-                    ptn.put("hour", "" + dataDate.getHourOfDay());
-                case DAILY:
-                    ptn.put("day", "" + dataDate.getDayOfMonth());
-                case MONTHLY:
-                    ptn.put("month", "" + dataDate.getMonthOfYear());
-                case YEARLY:
-                    ptn.put("year", "" + dataDate.getYear());
-                    break;
-                default:
-                    Assert.fail("Unexpected freqType = " + freqType);
+            case MINUTELY:
+                ptn.put("minute", "" + dataDate.getMinuteOfHour());
+            case HOURLY:
+                ptn.put("hour", "" + dataDate.getHourOfDay());
+            case DAILY:
+                ptn.put("day", "" + dataDate.getDayOfMonth());
+            case MONTHLY:
+                ptn.put("month", "" + dataDate.getMonthOfYear());
+            case YEARLY:
+                ptn.put("year", "" + dataDate.getYear());
+                break;
+            default:
+                Assert.fail("Unexpected freqType = " + freqType);
             }
             //Each HCat partition maps to a directory, not to a file
             HCatAddPartitionDesc addPtn = HCatAddPartitionDesc.create(dbName,
@@ -274,16 +271,20 @@ public class HCatRetentionTest extends BaseTestClass {
 
     @DataProvider(name = "loopBelow")
     public Object[][] getTestData(Method m) {
-        RetentionUnit[] retentionUnits = new RetentionUnit[]{RetentionUnit.HOURS, RetentionUnit.DAYS,
-            RetentionUnit.MONTHS};// "minutes","years",
+        RetentionUnit[] retentionUnits = new RetentionUnit[]{
+            RetentionUnit.HOURS,
+            RetentionUnit.DAYS,
+            RetentionUnit.MONTHS,
+        };
         Integer[] periods = new Integer[]{7, 824, 43}; // a negative value like -4 should be covered
         // in validation scenarios.
         FreqType[] dataTypes =
             new FreqType[]{
                 //disabling since falcon has support is for only for single hcat partition
                 //FreqType.DAILY, FreqType.MINUTELY, FreqType.HOURLY, FreqType.MONTHLY,
-                FreqType.YEARLY};
-        return MathUtil.crossProduct(periods, retentionUnits, dataTypes);
+                FreqType.YEARLY,
+            };
+        return MatrixUtil.crossProduct(periods, retentionUnits, dataTypes);
     }
 
     @AfterClass(alwaysRun = true)

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
index 4ba3cd3..63f98f2 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
@@ -184,8 +184,8 @@ public class EntitySummaryTest extends BaseTestClass {
     private List<String> scheduleEntityValidateWaitingInstances(ColoHelper cluster, String entity,
                                                                 EntityType entityType,
                                                                 String clusterName)
-            throws AuthenticationException, IOException, URISyntaxException, JAXBException,
-            OozieClientException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, JAXBException,
+        OozieClientException, InterruptedException {
         String entityName = Util.readEntityName(entity);
         IEntityManagerHelper helper;
         List<String> names = new ArrayList<String>();
@@ -242,7 +242,7 @@ public class EntitySummaryTest extends BaseTestClass {
      */
     private void validateProgressingInstances(List<String> names, EntityType entityType,
                                               String clusterName)
-            throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
+        throws AuthenticationException, IOException, URISyntaxException, InterruptedException {
         InstancesResult r;
         IEntityManagerHelper helper;
         if (entityType == EntityType.FEED) {
@@ -307,8 +307,8 @@ public class EntitySummaryTest extends BaseTestClass {
      */
     @Test
     public void getSummaryFilterBy()
-            throws URISyntaxException, IOException, AuthenticationException, JAXBException,
-            InterruptedException {
+        throws URISyntaxException, IOException, AuthenticationException, JAXBException,
+        InterruptedException {
         //prepare process template
         bundles[0].setProcessValidity(startTime, endTime);
         bundles[0].submitClusters(prism);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/387604d1/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiProcessInstanceTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiProcessInstanceTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiProcessInstanceTest.java
index 8213cd9..38e1cb2 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiProcessInstanceTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiProcessInstanceTest.java
@@ -50,24 +50,27 @@ import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.List;
 
+/**
+ * Tests for Lineage API with submitted process.
+ */
 @Test(groups = "lineage-rest")
 public class LineageApiProcessInstanceTest extends BaseTestClass {
-    private static final Logger logger = Logger.getLogger(LineageApiProcessInstanceTest.class);
-
-    ColoHelper cluster = servers.get(0);
-    FileSystem clusterFS = serverFS.get(0);
-    LineageHelper lineageHelper;
-    String baseTestHDFSDir = baseHDFSDir + "/LineageApiInstanceTest";
-    String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator";
-    String feedInputPrefix = baseTestHDFSDir + "/input";
-    String feedInputPath = feedInputPrefix + MINUTE_DATE_PATTERN;
-    String feedOutputPath = baseTestHDFSDir + "/output-data" + MINUTE_DATE_PATTERN;
-    String processName;
-    String inputFeedName;
-    String outputFeedName;
-    final String dataStartDate = "2010-01-02T09:00Z";
-    final String processStartDate = "2010-01-02T09:50Z";
-    final String endDate = "2010-01-02T10:00Z";
+    private static final Logger LOGGER = Logger.getLogger(LineageApiProcessInstanceTest.class);
+
+    private ColoHelper cluster = servers.get(0);
+    private FileSystem clusterFS = serverFS.get(0);
+    private LineageHelper lineageHelper;
+    private String baseTestHDFSDir = baseHDFSDir + "/LineageApiInstanceTest";
+    private String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator";
+    private String feedInputPrefix = baseTestHDFSDir + "/input";
+    private String feedInputPath = feedInputPrefix + MINUTE_DATE_PATTERN;
+    private String feedOutputPath = baseTestHDFSDir + "/output-data" + MINUTE_DATE_PATTERN;
+    private String processName;
+    private String inputFeedName;
+    private String outputFeedName;
+    private final String dataStartDate = "2010-01-02T09:00Z";
+    private final String processStartDate = "2010-01-02T09:50Z";
+    private final String endDate = "2010-01-02T10:00Z";
 
 
     @BeforeClass(alwaysRun = true)
@@ -87,7 +90,7 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
 
         // data set creation
         List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(dataStartDate, endDate, 5);
-        logger.info("dataDates = " + dataDates);
+        LOGGER.info("dataDates = " + dataDates);
         HadoopUtil.flattenAndPutDataInFolder(clusterFS, OSUtil.NORMAL_INPUT, feedInputPrefix,
             dataDates);
 
@@ -107,8 +110,9 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
         for (int i = 0; i < 20; i++) {
             status = InstanceUtil.getDefaultCoordinatorStatus(cluster,
                 Util.getProcessName(bundles[0].getProcessData()), 0);
-            if (status == Job.Status.SUCCEEDED || status == Job.Status.KILLED)
+            if (status == Job.Status.SUCCEEDED || status == Job.Status.KILLED) {
                 break;
+            }
             TimeUtil.sleepSeconds(30);
         }
         Assert.assertNotNull(status);
@@ -122,7 +126,7 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
     }
 
     /**
-     * Test navigation from the process vertex to its instances vertices
+     * Test navigation from the process vertex to its instances vertices.
      * @throws Exception
      */
     @Test
@@ -135,17 +139,17 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
         GraphAssert.assertVertexSanity(processIncoming);
         final List<Vertex> processInstanceVertices =
             processIncoming.filterByType(Vertex.VERTEX_TYPE.PROCESS_INSTANCE);
-        logger.info("process instances = " + processInstanceVertices);
+        LOGGER.info("process instances = " + processInstanceVertices);
         InstancesResult result = prism.getProcessHelper()
-            .getProcessInstanceStatus(processName, "?start=" + processStartDate +
-                "&end=" + endDate);
+            .getProcessInstanceStatus(processName, "?start=" + processStartDate
+                + "&end=" + endDate);
         Assert.assertEquals(processInstanceVertices.size(), result.getInstances().length,
-            "Number of process instances should be same weather it is retrieved from lineage api " +
-                "or falcon rest api");
+            "Number of process instances should be same weather it is retrieved from lineage api "
+                + "or falcon rest api");
     }
 
     /**
-     * Test navigation from the process instance vertex to its input and output feed instances
+     * Test navigation from the process instance vertex to its input and output feed instances.
      * @throws Exception
      */
     @Test
@@ -159,15 +163,15 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
         // fetching process instance vertex
         final List<Vertex> piVertices =
             processIncoming.filterByType(Vertex.VERTEX_TYPE.PROCESS_INSTANCE);
-        logger.info("process instance vertex = " + piVertices);
+        LOGGER.info("process instance vertex = " + piVertices);
 
         // fetching process instances info
         InstancesResult piResult = prism.getProcessHelper()
-            .getProcessInstanceStatus(processName, "?start=" + processStartDate +
-                "&end=" + endDate);
+            .getProcessInstanceStatus(processName, "?start=" + processStartDate
+                + "&end=" + endDate);
         Assert.assertEquals(piVertices.size(), piResult.getInstances().length,
-            "Number of process instances should be same weather it is retrieved from lineage api " +
-                "or falcon rest api");
+            "Number of process instances should be same weather it is retrieved from lineage api "
+                + "or falcon rest api");
         final List<String> allowedPITimes = new ArrayList<String>();
         for (InstancesResult.Instance processInstance : piResult.getInstances()) {
             allowedPITimes.add(processInstance.getInstance());
@@ -177,10 +181,10 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
             Assert.assertTrue(piVertex.getName().startsWith(processName),
                 "Process instance names should start with process name: " + piVertex.getName());
             String processInstanceTime = piVertex.getName().substring(processName.length() + 1);
-            logger.info("processInstanceTime = " + processInstanceTime);
+            LOGGER.info("processInstanceTime = " + processInstanceTime);
             Assert.assertTrue(allowedPITimes.remove(processInstanceTime),
-                "Unexpected processInstanceTime: " + processInstanceTime +
-                    "it should have been be in the list " + allowedPITimes);
+                "Unexpected processInstanceTime: " + processInstanceTime
+                    + "it should have been be in the list " + allowedPITimes);
 
             VerticesResult piIncoming = lineageHelper.getVerticesByDirection(piVertex.getId(),
                 Direction.inComingVertices);
@@ -200,10 +204,10 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
                     "input feed instances should start with input feed name: " + inFeedInstName);
                 final String inFeedInstanceTime = inFeedInstName.substring(
                     inputFeedName.length() + 1);
-                logger.info("inFeedInstanceTime = " + inFeedInstanceTime);
+                LOGGER.info("inFeedInstanceTime = " + inFeedInstanceTime);
                 Assert.assertTrue(allowedInpFeedInstDates.remove(inFeedInstanceTime),
-                    "Unexpected inFeedInstanceTime: " + inFeedInstanceTime  + " it should have " +
-                        "been present in: " + allowedInpFeedInstDates);
+                    "Unexpected inFeedInstanceTime: " + inFeedInstanceTime  + " it should have "
+                        + "been present in: " + allowedInpFeedInstDates);
             }
 
             VerticesResult piOutgoing = lineageHelper.getVerticesByDirection(
@@ -215,10 +219,10 @@ public class LineageApiProcessInstanceTest extends BaseTestClass {
             final Vertex outFeedInst = piOutgoing.filterByType(Vertex.VERTEX_TYPE.FEED_INSTANCE).get(0);
             final String outFeedInstName = outFeedInst.getName();
             Assert.assertTrue(outFeedInstName.startsWith(outputFeedName),
-                "Expecting outFeedInstName: " + outFeedInstName +
-                    " to start with outputFeedName: " + outputFeedName);
-            final String outFeedInstanceTime = outFeedInstName.substring(outputFeedName.length() +
-                1);
+                "Expecting outFeedInstName: " + outFeedInstName
+                    + " to start with outputFeedName: " + outputFeedName);
+            final String outFeedInstanceTime = outFeedInstName.substring(outputFeedName.length()
+                + 1);
             Assert.assertEquals(outFeedInstanceTime, processInstanceTime,
                 "Expecting output feed instance time and process instance time to be same");
         }


Mime
View raw message