falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rag...@apache.org
Subject [5/5] falcon git commit: FALCON-1319: Contribute HiveDr, Mirror tests and some test fixes contributed by Namit Maheshwari, Paul Isaychuk, Raghav Kumar Gautam & Ruslan Ostafiychuk
Date Wed, 29 Jul 2015 17:09:44 GMT
FALCON-1319: Contribute HiveDr, Mirror tests and some test fixes contributed by Namit Maheshwari, Paul Isaychuk, Raghav Kumar Gautam & Ruslan Ostafiychuk


Project: http://git-wip-us.apache.org/repos/asf/falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/falcon/commit/f9669000
Tree: http://git-wip-us.apache.org/repos/asf/falcon/tree/f9669000
Diff: http://git-wip-us.apache.org/repos/asf/falcon/diff/f9669000

Branch: refs/heads/master
Commit: f966900068693ad3a5c2bb3f33e12cbe700084f5
Parents: 1416f5e
Author: Raghav Kumar Gautam <raghav@apache.org>
Authored: Wed Jul 29 10:08:12 2015 -0700
Committer: Raghav Kumar Gautam <raghav@apache.org>
Committed: Wed Jul 29 10:08:12 2015 -0700

----------------------------------------------------------------------
 falcon-regression/.gitignore                    |   2 +
 falcon-regression/CHANGES.txt                   |   3 +
 falcon-regression/merlin-core/pom.xml           |  10 +
 .../regression/Entities/RecipeMerlin.java       | 358 ++++++++++
 .../falcon/regression/core/bundle/Bundle.java   |   9 +
 .../helpers/entity/AbstractEntityHelper.java    |  27 +-
 .../regression/core/response/lineage/Edge.java  |   7 +
 .../core/supportClasses/NotifyingAssert.java    |  76 ++
 .../falcon/regression/core/util/AssertUtil.java |   4 +-
 .../falcon/regression/core/util/ExecUtil.java   |   5 +-
 .../falcon/regression/core/util/HadoopUtil.java |  42 ++
 .../falcon/regression/core/util/HiveAssert.java | 269 +++++++
 .../falcon/regression/core/util/HiveUtil.java   | 156 +++++
 .../falcon/regression/core/util/LogUtil.java    |   2 +-
 falcon-regression/merlin/pom.xml                |  21 +
 .../regression/testHelper/BaseUITestClass.java  |   3 +-
 .../ui/search/AbstractSearchPage.java           |  40 +-
 .../regression/ui/search/ClusterWizardPage.java |   4 +
 .../regression/ui/search/FeedWizardPage.java    |  20 +-
 .../falcon/regression/ui/search/LoginPage.java  |  16 +-
 .../regression/ui/search/MirrorWizardPage.java  | 510 ++++++++++++++
 .../regression/ui/search/NewMirrorPage.java     |  44 --
 .../falcon/regression/ui/search/PageHeader.java |  36 +-
 .../regression/ui/search/ProcessWizardPage.java |   3 +
 .../falcon/regression/ui/search/SearchPage.java |  11 +-
 .../falcon/regression/InstanceParamTest.java    |   7 +-
 .../falcon/regression/InstanceSummaryTest.java  |   5 +-
 .../falcon/regression/ProcessFrequencyTest.java |   3 -
 .../ProcessInstanceColoMixedTest.java           |  11 +-
 .../regression/ProcessInstanceKillsTest.java    |   4 -
 .../regression/ProcessInstanceRerunTest.java    |  54 +-
 .../regression/ProcessInstanceResumeTest.java   |   3 -
 .../regression/ProcessInstanceRunningTest.java  |   4 -
 .../regression/ProcessInstanceStatusTest.java   |   4 -
 .../entity/EntitiesPatternSearchTest.java       |  20 +-
 .../regression/hcat/HCatFeedOperationsTest.java |   2 +
 .../regression/hcat/HCatReplicationTest.java    |   4 +-
 .../falcon/regression/hive/dr/HiveDRTest.java   | 700 +++++++++++++++++++
 .../falcon/regression/hive/dr/HiveDbDRTest.java | 270 +++++++
 .../regression/hive/dr/HiveObjectCreator.java   | 208 ++++++
 .../lineage/ListFeedInstancesTest.java          |   6 +-
 .../lineage/ListProcessInstancesTest.java       |  17 +-
 .../prism/NewPrismProcessUpdateTest.java        |   2 +-
 .../regression/prism/PrismFeedDeleteTest.java   |   8 +-
 .../regression/prism/PrismSubmitTest.java       |   5 +-
 .../prism/UpdateAtSpecificTimeTest.java         |   5 +-
 .../regression/searchUI/EntitiesTableTest.java  |   5 +-
 .../regression/searchUI/EntityPageTest.java     |   4 +-
 .../regression/searchUI/FeedSetupTest.java      |   4 +
 .../regression/searchUI/HomePageTest.java       |   9 +-
 .../regression/searchUI/InstancePageTest.java   |  22 +-
 .../searchUI/MirrorSourceTargetOptionsTest.java | 204 ++++++
 .../regression/searchUI/MirrorSummaryTest.java  | 207 ++++++
 .../falcon/regression/searchUI/MirrorTest.java  | 350 ++++++++++
 .../regression/security/FalconClientTest.java   |   2 +-
 .../regression/security/ProcessAclTest.java     |   4 +
 .../hive-disaster-recovery-template.xml         |  43 ++
 .../hive-disaster-recovery-workflow.xml         | 120 ++++
 .../hive-disaster-recovery.properties           |  75 ++
 .../hive-disaster-recovery-template.xml         |  44 ++
 .../hive-disaster-recovery-workflow.xml         | 293 ++++++++
 .../hive-disaster-recovery.properties           |  95 +++
 .../hive-disaster-recovery-secure-template.xml  |  44 ++
 .../hive-disaster-recovery-secure-workflow.xml  | 401 +++++++++++
 .../hive-disaster-recovery-secure.properties    | 104 +++
 falcon-regression/pom.xml                       |  46 +-
 66 files changed, 4894 insertions(+), 202 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/.gitignore
----------------------------------------------------------------------
diff --git a/falcon-regression/.gitignore b/falcon-regression/.gitignore
index 67de4a6..0cb27d0 100644
--- a/falcon-regression/.gitignore
+++ b/falcon-regression/.gitignore
@@ -46,6 +46,8 @@ bin/
 
 #hadoop-conf
 merlin/src/test/resources/hadoop-conf
+merlin/src/test/resources/hive-conf
+merlin/src/test/resources/falcon-conf
 
 #prop files
 merlin/src/main/resources/Merlin.properties

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/CHANGES.txt
----------------------------------------------------------------------
diff --git a/falcon-regression/CHANGES.txt b/falcon-regression/CHANGES.txt
index c00e66f..8dc8cb7 100644
--- a/falcon-regression/CHANGES.txt
+++ b/falcon-regression/CHANGES.txt
@@ -5,6 +5,9 @@ Trunk (Unreleased)
   INCOMPATIBLE CHANGES
 
   NEW FEATURES
+   FALCON-1319 Contribute HiveDr, Mirror tests and some test fixes (Namit Maheshwari, Paul Isaychuk,
+   Raghav Kumar Gautam & Ruslan Ostafiychuk via Raghav Kumar Gautam)
+
    FALCON-1254 ClusterSetup UI: edit xml test cases, stabilize 2 tests (Paul Isaychuk via Ruslan Ostafiychuk)
    
    FALCON-1215 Adding new test cases related to rerun feature (Pragya M via Samarth Gupta)

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/pom.xml b/falcon-regression/merlin-core/pom.xml
index e0180b4..4b26fde 100644
--- a/falcon-regression/merlin-core/pom.xml
+++ b/falcon-regression/merlin-core/pom.xml
@@ -59,6 +59,16 @@
                     <groupId>org.apache.hive.hcatalog</groupId>
                     <artifactId>hive-hcatalog-core</artifactId>
                 </dependency>
+
+                <dependency>
+                    <groupId>org.apache.hive</groupId>
+                    <artifactId>hive-jdbc</artifactId>
+                </dependency>
+
+                <dependency>
+                    <groupId>org.apache.hive</groupId>
+                    <artifactId>hive-metastore</artifactId>
+                </dependency>
             </dependencies>
         </profile>
     </profiles>

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
new file mode 100644
index 0000000..40fec08
--- /dev/null
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
@@ -0,0 +1,358 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression.Entities;
+
+import org.apache.commons.configuration.AbstractFileConfiguration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.filefilter.FalseFileFilter;
+import org.apache.commons.io.filefilter.RegexFileFilter;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.falcon.cli.FalconCLI;
+import org.apache.falcon.entity.v0.Frequency;
+import org.apache.falcon.entity.v0.cluster.Interfacetype;
+import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
+import org.apache.falcon.entity.v0.process.ACL;
+import org.apache.falcon.entity.v0.process.PolicyType;
+import org.apache.falcon.entity.v0.process.Retry;
+import org.apache.falcon.regression.core.util.Config;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.log4j.Logger;
+import org.testng.Assert;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.UUID;
+
+/** Class for representing a falcon recipe. */
+public final class RecipeMerlin {
+    private static final Logger LOGGER = Logger.getLogger(RecipeMerlin.class);
+    private static final String WORKFLOW_PATH_KEY = "falcon.recipe.workflow.path";
+    private static final String RECIPE_NAME_KEY = "falcon.recipe.name";
+    private static final String WRITE_DIR =
+        Config.getProperty("recipe.location", "/tmp/falcon-recipe");
+
+    private String template;
+    private AbstractFileConfiguration properties;
+    private String workflow;
+    private ClusterMerlin recipeCluster;
+    private ClusterMerlin srcCluster;
+    private ClusterMerlin tgtCluster;
+
+
+    public ClusterMerlin getRecipeCluster() {
+        return recipeCluster;
+    }
+
+    public ClusterMerlin getSrcCluster() {
+        return srcCluster;
+    }
+
+    public ClusterMerlin getTgtCluster() {
+        return tgtCluster;
+    }
+
+    public FalconCLI.RecipeOperation getRecipeOperation() {
+        return recipeOperation;
+    }
+
+    private FalconCLI.RecipeOperation recipeOperation;
+
+    private RecipeMerlin() {
+    }
+
+    public String getName() {
+        return properties.getString(RECIPE_NAME_KEY);
+    }
+
+    public void setUniqueName(String prefix) {
+        properties.setProperty(RECIPE_NAME_KEY, prefix + UUID.randomUUID().toString().split("-")[0]);
+    }
+
+    public String getSourceDir() {
+        return properties.getString("drSourceDir");
+    }
+
+    public RecipeMerlin withSourceDir(final String srcDir) {
+        properties.setProperty("drSourceDir", srcDir);
+        return this;
+    }
+
+    public String getTargetDir() {
+        return properties.getString("drTargetDir");
+    }
+
+    public RecipeMerlin withTargetDir(final String tgtDir) {
+        properties.setProperty("drTargetDir", tgtDir);
+        return this;
+    }
+
+    public String getSourceDb() {
+        return StringUtils.join(properties.getStringArray("sourceDatabase"), ',');
+    }
+
+    public RecipeMerlin withSourceDb(final String srcDatabase) {
+        properties.setProperty("sourceDatabase", srcDatabase);
+        return this;
+    }
+
+    public String getSourceTable() {
+        return StringUtils.join(properties.getStringArray("sourceTable"), ',');
+    }
+
+    public RecipeMerlin withSourceTable(final String tgtTable) {
+        properties.setProperty("sourceTable", tgtTable);
+        return this;
+    }
+
+    public RecipeMerlin withSourceCluster(ClusterMerlin sourceCluster) {
+        this.srcCluster = sourceCluster;
+        properties.setProperty("sourceCluster", sourceCluster.getName());
+        properties.setProperty("sourceMetastoreUri", sourceCluster.getProperty("hive.metastore.uris"));
+        properties.setProperty("sourceHiveServer2Uri", sourceCluster.getProperty("hive.server2.uri"));
+        //properties.setProperty("sourceServicePrincipal",
+        //    sourceCluster.getProperty("hive.metastore.kerberos.principal"));
+        properties.setProperty("sourceStagingPath", sourceCluster.getLocation("staging"));
+        properties.setProperty("sourceNN", sourceCluster.getInterfaceEndpoint(Interfacetype.WRITE));
+        properties.setProperty("sourceRM", sourceCluster.getInterfaceEndpoint(Interfacetype.EXECUTE));
+        return this;
+    }
+
+    public RecipeMerlin withTargetCluster(ClusterMerlin targetCluster) {
+        this.tgtCluster = targetCluster;
+        properties.setProperty("targetCluster", targetCluster.getName());
+        properties.setProperty("targetMetastoreUri", targetCluster.getProperty("hive.metastore.uris"));
+        properties.setProperty("targetHiveServer2Uri", targetCluster.getProperty("hive.server2.uri"));
+        //properties.setProperty("targetServicePrincipal",
+        //    targetCluster.getProperty("hive.metastore.kerberos.principal"));
+        properties.setProperty("targetStagingPath", targetCluster.getLocation("staging"));
+        properties.setProperty("targetNN", targetCluster.getInterfaceEndpoint(Interfacetype.WRITE));
+        properties.setProperty("targetRM", targetCluster.getInterfaceEndpoint(Interfacetype.EXECUTE));
+        return this;
+    }
+
+    public RecipeMerlin withRecipeCluster(ClusterMerlin paramRecipeCluster) {
+        this.recipeCluster = paramRecipeCluster;
+        properties.setProperty("falcon.recipe.cluster.name", paramRecipeCluster.getName());
+        properties.setProperty("falcon.recipe.cluster.hdfs.writeEndPoint",
+            paramRecipeCluster.getInterfaceEndpoint(Interfacetype.WRITE));
+        return this;
+    }
+
+    public RecipeMerlin withValidity(final String start, final String end) {
+        properties.setProperty("falcon.recipe.cluster.validity.start", start);
+        properties.setProperty("falcon.recipe.cluster.validity.end", end);
+        return this;
+    }
+
+    public String getValidityStart() {
+        return properties.getString("falcon.recipe.cluster.validity.start");
+    }
+
+    public String getValidityEnd() {
+        return properties.getString("falcon.recipe.cluster.validity.end");
+    }
+
+    public RecipeMerlin withFrequency(final Frequency frequency) {
+        properties.setProperty("falcon.recipe.process.frequency", frequency.toString());
+        return this;
+    }
+
+    public Frequency getFrequency() {
+        return Frequency.fromString(properties.getString("falcon.recipe.process.frequency"));
+    }
+
+    public String getMaxEvents() {
+        return properties.getString("maxEvents");
+    }
+
+    public String getReplicationMaxMaps() {
+        return properties.getString("replicationMaxMaps");
+    }
+
+    public String getDistCpMaxMaps() {
+        return properties.getString("distcpMaxMaps");
+    }
+
+    public String getMapBandwidth() {
+        return properties.getString("distcpMapBandwidth");
+    }
+
+    public Retry getRetry() {
+        final int retryAttempts = properties.getInt("falcon.recipe.retry.attempts");
+        final String retryDelay = properties.getString("falcon.recipe.retry.delay");
+        final String retryPolicy = properties.getString("falcon.recipe.retry.policy");
+
+        Retry retry = new Retry();
+        retry.setAttempts(retryAttempts);
+        retry.setDelay(Frequency.fromString(retryDelay));
+        retry.setPolicy(PolicyType.fromValue(retryPolicy));
+        return retry;
+    }
+
+    public ACL getAcl() {
+        ACL acl = new ACL();
+        acl.setOwner(properties.getString("falcon.recipe.acl.owner"));
+        acl.setGroup(properties.getString("falcon.recipe.acl.group"));
+        acl.setPermission(properties.getString("falcon.recipe.acl.permission"));
+        return acl;
+    }
+
+
+    /**
+     * Read recipe from a given directory. Expecting that recipe will follow these conventions.
+     * <br> 1. properties file will have .properties extension
+     * <br> 2. template file will have end with -template.xml
+     * <br> 3. workflow file will have end with -workflow.xml
+     * @param readPath the location from where recipe will be read
+     * @param recipeOperation operation of this recipe
+     */
+    public static RecipeMerlin readFromDir(final String readPath,
+                                           FalconCLI.RecipeOperation recipeOperation) {
+        Assert.assertTrue(StringUtils.isNotEmpty(readPath), "readPath for recipe can't be empty");
+        Assert.assertNotNull(recipeOperation, "readPath for recipe can't be empty");
+        RecipeMerlin instance = new RecipeMerlin();
+        instance.recipeOperation = recipeOperation;
+        LOGGER.info("Loading recipe from directory: " + readPath);
+        File directory = null;
+        try {
+            directory = new File(RecipeMerlin.class.getResource("/" + readPath).toURI());
+        } catch (URISyntaxException e) {
+            Assert.fail("could not find dir: " + readPath);
+        }
+        final Collection<File> propertiesFiles = FileUtils.listFiles(directory,
+            new RegexFileFilter(".*\\.properties"), FalseFileFilter.INSTANCE);
+        Assert.assertEquals(propertiesFiles.size(), 1,
+            "Expecting only one property file at: " + readPath +" found: " + propertiesFiles);
+        try {
+            instance.properties =
+                new PropertiesConfiguration(propertiesFiles.iterator().next());
+        } catch (ConfigurationException e) {
+            Assert.fail("Couldn't read recipe's properties file because of exception: "
+                + ExceptionUtils.getStackTrace(e));
+        }
+        instance.properties.setFileName(null); //prevent accidental overwrite of template
+        //removing defaults - specific test need to supplied this
+        instance.properties.clearProperty("sourceDatabase");
+        instance.properties.clearProperty("sourceTable");
+        instance.properties.clearProperty("targetDatabase");
+        instance.properties.clearProperty("targetTable");
+        instance.properties.setProperty("falcon.recipe.acl.owner", MerlinConstants.CURRENT_USER_NAME);
+        instance.properties.setProperty("falcon.recipe.acl.group", MerlinConstants.CURRENT_USER_GROUP);
+        instance.properties.setProperty("falcon.recipe.acl.permission", "*");
+
+        final Collection<File> templatesFiles = FileUtils.listFiles(directory,
+            new RegexFileFilter(".*-template\\.xml"), FalseFileFilter.INSTANCE);
+        Assert.assertEquals(templatesFiles.size(), 1,
+            "Expecting only one template file at: " + readPath + " found: " + templatesFiles);
+        try {
+            instance.template =
+                FileUtils.readFileToString(templatesFiles.iterator().next());
+        } catch (IOException e) {
+            Assert.fail("Couldn't read recipe's template file because of exception: "
+                + ExceptionUtils.getStackTrace(e));
+        }
+
+        final Collection<File> workflowFiles = FileUtils.listFiles(directory,
+            new RegexFileFilter(".*-workflow\\.xml"), FalseFileFilter.INSTANCE);
+        Assert.assertEquals(workflowFiles.size(), 1,
+            "Expecting only one workflow file at: " + readPath + " found: " + workflowFiles);
+        try {
+            instance.workflow = FileUtils.readFileToString(workflowFiles.iterator().next());
+        } catch (IOException e) {
+            Assert.fail("Couldn't read recipe's workflow file because of exception: "
+                + ExceptionUtils.getStackTrace(e));
+        }
+        return instance;
+    }
+
+    /**
+     * Write recipe.
+     */
+    private void write() {
+        final String templateFileLocation = OSUtil.concat(WRITE_DIR, getName() + "-template.xml");
+        try {
+            Assert.assertNotNull(templateFileLocation,
+                "Write location for template file is unexpectedly null.");
+            FileUtils.writeStringToFile(new File(templateFileLocation), template);
+        } catch (IOException e) {
+            Assert.fail("Couldn't write recipe's template file because of exception: "
+                + ExceptionUtils.getStackTrace(e));
+        }
+
+        final String workflowFileLocation = OSUtil.concat(WRITE_DIR, getName() + "-workflow.xml");
+        try {
+            Assert.assertNotNull(workflowFileLocation,
+                "Write location for workflow file is unexpectedly null.");
+            FileUtils.writeStringToFile(new File(workflowFileLocation), workflow);
+        } catch (IOException e) {
+            Assert.fail("Couldn't write recipe's workflow file because of exception: "
+                + ExceptionUtils.getStackTrace(e));
+        }
+        properties.setProperty(WORKFLOW_PATH_KEY, workflowFileLocation);
+        properties.setProperty("falcon.recipe.workflow.name", getName() + "-workflow");
+
+        final String propFileLocation = OSUtil.concat(WRITE_DIR, getName() + ".properties");
+        try {
+            Assert.assertNotNull(propFileLocation,
+                "Write location for properties file is unexpectedly null.");
+            properties.save(new File(propFileLocation));
+        } catch (ConfigurationException e) {
+            Assert.fail("Couldn't write recipe's process file because of exception: "
+                + ExceptionUtils.getStackTrace(e));
+        }
+    }
+
+    /**
+     * Get submission command.
+     */
+    public List<String> getSubmissionCommand() {
+        write();
+        final List<String> cmd = new ArrayList<>();
+        Collections.addAll(cmd, "recipe", "-name", getName(),
+            "-operation", recipeOperation.toString());
+        return cmd;
+    }
+
+    /**
+     * Set tags for recipe.
+     */
+    public List<String> getTags() {
+        final String tagsStr = properties.getString("falcon.recipe.tags");
+        if (StringUtils.isEmpty(tagsStr)) {
+            return new ArrayList<>();
+        }
+        return Arrays.asList(tagsStr.split(","));
+    }
+
+    /**
+     * Set tags for recipe.
+     */
+    public void setTags(List<String> tags) {
+        properties.setProperty("falcon.recipe.tags", StringUtils.join(tags, ','));
+    }
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/bundle/Bundle.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/bundle/Bundle.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/bundle/Bundle.java
index f463f91..67d9ee2 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/bundle/Bundle.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/bundle/Bundle.java
@@ -19,6 +19,7 @@
 package org.apache.falcon.regression.core.bundle;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.falcon.cli.FalconCLI;
 import org.apache.falcon.entity.v0.Frequency;
 import org.apache.falcon.entity.v0.Frequency.TimeUnit;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
@@ -915,4 +916,12 @@ public class Bundle {
         processMerlin.setTags(value);
         setProcessData(processMerlin.toString());
     }
+
+
+    public static int runFalconCLI(List<String> args) throws Exception {
+        args.add(1, "-url");
+        args.add(2, prismHelper.getClusterHelper().getHostname());
+        LOGGER.info("Going to run falcon jar with args: " + args);
+        return new FalconCLI().run(args.toArray(new String[]{}));
+    }
 }

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/helpers/entity/AbstractEntityHelper.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/helpers/entity/AbstractEntityHelper.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/helpers/entity/AbstractEntityHelper.java
index 50d7439..7b8d111 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/helpers/entity/AbstractEntityHelper.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/helpers/entity/AbstractEntityHelper.java
@@ -20,6 +20,7 @@ package org.apache.falcon.regression.core.helpers.entity;
 
 import com.jcraft.jsch.JSchException;
 import org.apache.commons.exec.CommandLine;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.falcon.regression.core.helpers.FalconClientBuilder;
 import org.apache.falcon.regression.core.response.ServiceResponse;
@@ -28,6 +29,7 @@ import org.apache.falcon.regression.core.util.Config;
 import org.apache.falcon.regression.core.util.ExecUtil;
 import org.apache.falcon.regression.core.util.FileUtil;
 import org.apache.falcon.regression.core.util.HCatUtil;
+import org.apache.falcon.regression.core.util.HiveUtil;
 import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OozieUtil;
 import org.apache.falcon.regression.core.util.Util;
@@ -38,7 +40,6 @@ import org.apache.falcon.resource.InstancesSummaryResult;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
-import org.apache.commons.lang.StringUtils;
 import org.apache.hive.hcatalog.api.HCatClient;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.log4j.Logger;
@@ -47,6 +48,8 @@ import org.testng.Assert;
 
 import java.io.IOException;
 import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
 import java.util.List;
 
 /** Abstract class for helper classes. */
@@ -116,6 +119,21 @@ public abstract class AbstractEntityHelper {
         return this.hCatClient;
     }
 
+    protected Connection hiveJdbcConnection;
+
+    public Connection getHiveJdbcConnection() {
+        if (null == hiveJdbcConnection) {
+            try {
+                hiveJdbcConnection =
+                    HiveUtil.getHiveJdbcConnection(hiveJdbcUrl, hiveJdbcUser, hiveJdbcPassword, hiveMetaStorePrincipal);
+            } catch (ClassNotFoundException | SQLException | InterruptedException | IOException e) {
+                Assert.fail("Unable to create hive jdbc connection because of exception:\n"
+                    + ExceptionUtils.getStackTrace(e));
+            }
+        }
+        return hiveJdbcConnection;
+    }
+
     //basic properties
     protected String qaHost;
 
@@ -140,6 +158,9 @@ public abstract class AbstractEntityHelper {
     protected String serviceStopCmd;
     protected String serviceStatusCmd;
     protected String hcatEndpoint = "";
+    protected String hiveJdbcUrl = "";
+    protected String hiveJdbcUser = "";
+    protected String hiveJdbcPassword = "";
 
     public String getNamenodePrincipal() {
         return namenodePrincipal;
@@ -241,6 +262,10 @@ public abstract class AbstractEntityHelper {
         this.namenodePrincipal = Config.getProperty(prefix + "namenode.kerberos.principal", "none");
         this.hiveMetaStorePrincipal = Config.getProperty(
                 prefix + "hive.metastore.kerberos.principal", "none");
+        this.hiveJdbcUrl = Config.getProperty(prefix + "hive.jdbc.url", "none");
+        this.hiveJdbcUser =
+            Config.getProperty(prefix + "hive.jdbc.user", System.getProperty("user.name"));
+        this.hiveJdbcPassword = Config.getProperty(prefix + "hive.jdbc.password", "");
     }
 
     public abstract String getEntityType();

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
index a230db0..c1a7eb8 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
@@ -50,6 +50,13 @@ public class Edge extends GraphEntity {
         @SerializedName("first")FIRST,
         @SerializedName("second")SECOND,
         @SerializedName("third")THIRD,
+        @SerializedName("fourth")FOURTH,
+        @SerializedName("fifth")FIFTH,
+        @SerializedName("sixth")SIXTH,
+        @SerializedName("seventh")SEVENTH,
+        @SerializedName("eighth")EIGHTH,
+        @SerializedName("ninth")NINTH,
+        @SerializedName("tenth")TENTH,
         @SerializedName("value")VALUE,
     }
     @SerializedName("_id")

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/NotifyingAssert.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/NotifyingAssert.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/NotifyingAssert.java
new file mode 100644
index 0000000..52b4fd3
--- /dev/null
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/NotifyingAssert.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression.core.supportClasses;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.log4j.Logger;
+import org.testng.asserts.IAssert;
+import org.testng.asserts.SoftAssert;
+import org.testng.collections.Maps;
+
+import java.util.Map;
+
+/**
+ * NotifyingAssert: This is same as SoftAssert provided by TestNg. Additionally, it adds an option
+ * of printing stacktrace whenever test execution fails.
+ */
+public class NotifyingAssert extends SoftAssert {
+    private final boolean printFailures;
+    // LinkedHashMap to preserve the order
+    private Map<AssertionError, IAssert> mErrors = Maps.newLinkedHashMap();
+    private static final Logger LOGGER = Logger.getLogger(NotifyingAssert.class);
+
+    /**
+     * Same of SoftAssert - just adds an option for logging assertion failure stacktraces.
+     * @param logFailures - switches on printing of stacktrace in logs on failures.
+     */
+    public NotifyingAssert(boolean logFailures) {
+        this.printFailures = logFailures;
+    }
+
+    @Override
+    public void executeAssert(IAssert a) {
+        try {
+            a.doAssert();
+        } catch(AssertionError ex) {
+            onAssertFailure(a, ex);
+            mErrors.put(ex, a);
+            if (printFailures) {
+                LOGGER.info("Assertion failed - exception : " + ex + "\n"
+                    + ExceptionUtils.getStackTrace(ex));
+            }
+        }
+    }
+
+    public void assertAll() {
+        if (!mErrors.isEmpty()) {
+            StringBuilder sb = new StringBuilder("The following asserts failed:\n");
+            boolean first = true;
+            for (Map.Entry<AssertionError, IAssert> ae : mErrors.entrySet()) {
+                if (first) {
+                    first = false;
+                } else {
+                    sb.append(", ");
+                }
+                sb.append(ae.getValue().getMessage());
+            }
+            throw new AssertionError(sb.toString());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/AssertUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/AssertUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/AssertUtil.java
index 095e6f4..f984457 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/AssertUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/AssertUtil.java
@@ -217,8 +217,8 @@ public final class AssertUtil {
      */
     public static void assertFailed(ExecResult execResult, String expectedMessage) {
         Assert.assertFalse(execResult.hasSuceeded(),
-                "Unexpectedly succeeded execResult: " + execResult);
-        Assert.assertTrue(execResult.getError().contains(expectedMessage),
+            "Unexpectedly succeeded execResult: " + execResult);
+        Assert.assertTrue((execResult.getError() + execResult.getOutput()).contains(expectedMessage),
             "Expected error: " + expectedMessage + " in execResult: " + execResult);
     }
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/ExecUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/ExecUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/ExecUtil.java
index 0a8e0df..a792f62 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/ExecUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/ExecUtil.java
@@ -150,13 +150,16 @@ public final class ExecUtil {
         final ByteArrayOutputStream errStream = new ByteArrayOutputStream();
         executor.setStreamHandler(new PumpStreamHandler(outStream, errStream));
         int exitVal = 1;
+        String exception = "";
         try {
             exitVal = executor.execute(commandLine);
         } catch (IOException e) {
             LOGGER.warn("Caught exception: " + e);
+            exception = e.toString();
         }
         final String output = outStream.toString();
-        final String errors = errStream.toString();
+        String errors = errStream.toString();
+        errors = errors.isEmpty() ? exception : errors;
 
         LOGGER.info("exitVal: " + exitVal);
         LOGGER.info("output: " + output);

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
index afcb9c5..a3b059e 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HadoopUtil.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.log4j.Logger;
 
 import java.io.File;
@@ -35,6 +36,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.UUID;
 import java.util.regex.Pattern;
 
 /**
@@ -62,6 +64,18 @@ public final class HadoopUtil {
         return path;
     }
 
+    public static String joinPath(String basePath, String... restParts) {
+        final String separator = "/";
+        List<String> cleanParts = new ArrayList<>();
+        String cleanBasePath = basePath.replaceFirst(separator + "$", "");
+        cleanParts.add(cleanBasePath);
+        for (String onePart : restParts) {
+            final String cleanPart = onePart.replaceFirst("^" + separator, "").replaceFirst(separator + "$", "");
+            cleanParts.add(cleanPart);
+        }
+        return StringUtils.join(cleanParts, separator);
+    }
+
     /**
      * Retrieves all file names contained in a given directory.
      * @param fs filesystem
@@ -195,6 +209,34 @@ public final class HadoopUtil {
     }
 
     /**
+     * Copies given data to hdfs location.
+     * @param fs target filesystem
+     * @param dstHdfsDir destination dir
+     * @param data source location
+     * @param overwrite do we want to overwrite the data
+     * @throws IOException
+     */
+    public static void writeDataForHive(final FileSystem fs, final String dstHdfsDir,
+        final CharSequence data, boolean overwrite) throws IOException {
+        LOGGER.info(String.format("Writing data %s to hdfs location %s", data, dstHdfsDir));
+        final File tempFile = File.createTempFile(UUID.randomUUID().toString().split("-")[0], ".dat");
+        FileUtils.write(tempFile, data);
+        if (overwrite) {
+            HadoopUtil.deleteDirIfExists(dstHdfsDir, fs);
+        }
+        try {
+            fs.mkdirs(new Path(dstHdfsDir));
+        } catch (Exception e) {
+            //ignore
+        }
+        fs.setPermission(new Path(dstHdfsDir), FsPermission.getDirDefault());
+        HadoopUtil.copyDataToFolder(fs, dstHdfsDir, tempFile.getAbsolutePath());
+        if (!tempFile.delete()) {
+            LOGGER.warn("Deletion of " + tempFile + " failed.");
+        }
+    }
+
+    /**
      * Lists names of given directory subfolders.
      * @param fs filesystem
      * @param baseDir given directory

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveAssert.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveAssert.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveAssert.java
new file mode 100644
index 0000000..2a934b5
--- /dev/null
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveAssert.java
@@ -0,0 +1,269 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression.core.util;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hive.hcatalog.api.HCatClient;
+import org.apache.hive.hcatalog.api.HCatDatabase;
+import org.apache.hive.hcatalog.api.HCatPartition;
+import org.apache.hive.hcatalog.api.HCatTable;
+import org.apache.hive.hcatalog.common.HCatException;
+import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.log4j.Logger;
+import org.testng.asserts.SoftAssert;
+
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/** Assertions for to Hive objects. */
+public final class HiveAssert {
+    private HiveAssert() {
+        throw new AssertionError("Instantiating utility class...");
+    }
+
+    private static final Logger LOGGER = Logger.getLogger(HiveAssert.class);
+
+    /**
+     * Assertion for column equality - it also covers stuff that is not covered by
+     * HCatFieldSchema.equals().
+     * @param columns1 first column for comparison
+     * @param columns2 second column for comparison
+     * @param softAssert object to use for performing assertion
+     * @return object used for performing assertion
+     */
+    public static SoftAssert assertColumnListEqual(List<HCatFieldSchema> columns1,
+                                                   List<HCatFieldSchema> columns2,
+                                                   SoftAssert softAssert) {
+        softAssert.assertEquals(columns1, columns2, "List of columns for two tables are not same");
+        for (int i = 0; i < columns1.size(); ++i) {
+            HCatFieldSchema column1 = columns1.get(i);
+            HCatFieldSchema column2 = columns2.get(i);
+            softAssert.assertEquals(column2.getComment(), column1.getComment(),
+                "Comments of the columns: " + column1 + " & " + column2 + " is not same");
+        }
+        return softAssert;
+    }
+
+    /**
+     * Assertion for equality of partitions - equality using HCatPartition.equals() is not
+     * satisfactory for our purpose.
+     * @param table1Partitions first list of partitions for comparison
+     * @param table2Partitions second list of partitions for comparison
+     * @param softAssert object to use for performing assertion
+     * @return object used for performing assertion
+     */
+    public static SoftAssert assertPartitionListEqual(List<HCatPartition> table1Partitions,
+        List<HCatPartition> table2Partitions, SoftAssert softAssert) {
+        softAssert.assertEquals(table1Partitions.size(), table2Partitions.size(),
+            "Number of partitions are not same");
+        try {
+            for (int i = 0; i < table1Partitions.size(); i++) {
+                final HCatPartition table1Partition = table1Partitions.get(i);
+                final HCatPartition table2Partition = table2Partitions.get(i);
+                softAssert.assertEquals(table2Partition.getValues(), table1Partition.getValues(),
+                    "Partitions don't have same values");
+            }
+        } catch (Exception e) {
+            softAssert.fail("Couldn't do partition equality.", e);
+        }
+        return softAssert;
+    }
+
+    /**
+     * Assertion for equality of two tables (including table properties and table type).
+     * @param cluster1 the ColoHelper of first cluster
+     * @param table1 the first table
+     * @param cluster2 the ColoHelper of second cluster
+     * @param table2 the second table
+     * @param softAssert object used for performing assertion
+     * @return object used for performing assertion
+     * @throws java.io.IOException
+     */
+    public static SoftAssert assertTableEqual(ColoHelper cluster1, HCatTable table1,
+                                              ColoHelper cluster2, HCatTable table2,
+                                              SoftAssert softAssert) throws IOException {
+        return assertTableEqual(cluster1, table1, cluster2, table2, softAssert, true);
+    }
+
+    /**
+     * Assertion for equality of two tables.
+     * @param cluster1 the ColoHelper of first cluster
+     * @param table1 the first table (expected values)
+     * @param cluster2 the ColoHelper of second cluster
+     * @param table2 the second table (actual values)
+     * @param softAssert object used for performing assertion
+     * @return object used for performing assertion
+     * @throws java.io.IOException
+     */
+    public static SoftAssert assertTableEqual(ColoHelper cluster1, HCatTable table1,
+                                              ColoHelper cluster2, HCatTable table2,
+                                              SoftAssert softAssert,
+                                              boolean notIgnoreTblTypeAndProps) throws IOException {
+        FileSystem cluster1FS = cluster1.getClusterHelper().getHadoopFS();
+        FileSystem cluster2FS = cluster2.getClusterHelper().getHadoopFS();
+        final String table1FullName = table1.getDbName() + "." + table1.getTableName();
+        final String table2FullName = table2.getDbName() + "." + table2.getTableName();
+        LOGGER.info("Checking equality of table : " + table1FullName + " & " + table2FullName);
+        //table metadata equality
+        softAssert.assertEquals(table2.comment(), table1.comment(),
+            "Table " + table1FullName + " has different comment from " + table2FullName);
+        softAssert.assertEquals(table2.getBucketCols(), table1.getBucketCols(),
+            "Table " + table1FullName + " has different bucket columns from " + table2FullName);
+        assertColumnListEqual(table1.getCols(), table2.getCols(), softAssert);
+        softAssert.assertEquals(table2.getNumBuckets(), table1.getNumBuckets(),
+            "Table " + table1FullName + " has different number of buckets from " + table2FullName);
+        assertColumnListEqual(table1.getPartCols(), table2.getPartCols(), softAssert);
+        softAssert.assertEquals(table2.getSerdeParams(), table1.getSerdeParams(),
+            "Table " + table1FullName + " has different serde params from " + table2FullName);
+        softAssert.assertEquals(table2.getSortCols(), table1.getSortCols(),
+            "Table " + table1FullName + " has different sort columns from " + table2FullName);
+        softAssert.assertEquals(table2.getStorageHandler(), table1.getStorageHandler(),
+            "Table " + table1FullName + " has different storage handler from " + table2FullName);
+        if (notIgnoreTblTypeAndProps) {
+            softAssert.assertEquals(table2.getTabletype(), table1.getTabletype(),
+                "Table " + table1FullName + " has different Tabletype from " + table2FullName);
+        }
+        final Map<String, String> tbl1Props = table1.getTblProps();
+        final Map<String, String> tbl2Props = table2.getTblProps();
+        final String[] ignoreTblProps = {"transient_lastDdlTime", "repl.last.id",
+            "last_modified_by", "last_modified_time", "COLUMN_STATS_ACCURATE", };
+        for (String ignoreTblProp : ignoreTblProps) {
+            tbl1Props.remove(ignoreTblProp);
+            tbl2Props.remove(ignoreTblProp);
+        }
+        final String[] ignoreDefaultProps = {"numRows", "rawDataSize"};
+        for (String ignoreProp : ignoreDefaultProps) {
+            if ("-1".equals(tbl1Props.get(ignoreProp))) {
+                tbl1Props.remove(ignoreProp);
+            }
+            if ("-1".equals(tbl2Props.get(ignoreProp))) {
+                tbl2Props.remove(ignoreProp);
+            }
+        }
+
+        if (notIgnoreTblTypeAndProps) {
+            softAssert.assertEquals(tbl2Props, tbl1Props,
+                "Table " + table1FullName + " has different TblProps from " + table2FullName);
+        }
+        LOGGER.info("Checking equality of table partitions");
+        HCatClient hcatClient1 = cluster1.getClusterHelper().getHCatClient();
+        HCatClient hcatClient2 = cluster2.getClusterHelper().getHCatClient();
+        final List<HCatPartition> table1Partitions =
+            hcatClient1.getPartitions(table1.getDbName(), table1.getTableName());
+        final List<HCatPartition> table2Partitions =
+            hcatClient2.getPartitions(table2.getDbName(), table2.getTableName());
+        assertPartitionListEqual(table1Partitions, table2Partitions, softAssert);
+        if (notIgnoreTblTypeAndProps) {
+            softAssert.assertEquals(
+                cluster2FS.getContentSummary(new Path(table2.getLocation())).getLength(),
+                cluster1FS.getContentSummary(new Path(table1.getLocation())).getLength(),
+                "Size of content for table1 and table2 are different");
+        }
+
+        //table content equality
+        LOGGER.info("Checking equality of table contents");
+        Statement jdbcStmt1 = null, jdbcStmt2 = null;
+        try {
+            final boolean execute1;
+            final boolean execute2;
+            jdbcStmt1 = cluster1.getClusterHelper().getHiveJdbcConnection().createStatement();
+            jdbcStmt2 = cluster2.getClusterHelper().getHiveJdbcConnection().createStatement();
+            execute1 = jdbcStmt1.execute("select * from " + table1FullName);
+            execute2 = jdbcStmt2.execute("select * from " + table2FullName);
+            softAssert.assertEquals(execute2, execute1,
+                "Table " + table1FullName + " has different result of select * from " + table2FullName);
+            if (execute1 && execute2) {
+                final ResultSet resultSet1 = jdbcStmt1.getResultSet();
+                final ResultSet resultSet2 = jdbcStmt2.getResultSet();
+                final List<String> rows1 = HiveUtil.fetchRows(resultSet1);
+                final List<String> rows2 = HiveUtil.fetchRows(resultSet2);
+                softAssert.assertEquals(rows2, rows1,
+                    "Table " + table1FullName + " has different content from " + table2FullName);
+            }
+        } catch (SQLException e) {
+            softAssert.fail("Comparison of content of table " + table1FullName
+                + " with content of table " + table2FullName + " failed because of exception\n"
+                + ExceptionUtils.getFullStackTrace(e));
+        } finally {
+            if (jdbcStmt1 != null) {
+                try {
+                    jdbcStmt1.close();
+                } catch (SQLException e) {
+                    LOGGER.warn("Closing of jdbcStmt1 failed: " + ExceptionUtils.getFullStackTrace(e));
+                }
+            }
+            if (jdbcStmt2 != null) {
+                try {
+                    jdbcStmt2.close();
+                } catch (SQLException e) {
+                    LOGGER.warn("Closing of jdbcStmt2 failed: " + ExceptionUtils.getFullStackTrace(e));
+                }
+            }
+        }
+        return softAssert;
+    }
+
+    /**
+     * Assertion for equality of two dbs.
+     * @param cluster1 the ColoHelper of first cluster
+     * @param db1 first database for comparison (expected values)
+     * @param cluster2 the ColoHelper of second cluster
+     * @param db2 second database for comparison (actual values)
+     * @param softAssert object used for performing assertion
+     * @return object used for performing assertion
+     * @throws java.io.IOException
+     */
+    public static SoftAssert assertDbEqual(ColoHelper cluster1, HCatDatabase db1,
+                                           ColoHelper cluster2, HCatDatabase db2,
+                                           SoftAssert softAssert) throws IOException {
+        HCatClient hcatClient1 = cluster1.getClusterHelper().getHCatClient();
+        HCatClient hcatClient2 = cluster2.getClusterHelper().getHCatClient();
+        //check database name equality
+        final String db1Name = db1.getName();
+        final String db2Name = db2.getName();
+        softAssert.assertEquals(db2.getComment(), db1.getComment(), "Comment differ for the dbs");
+        //check database properties equality
+        softAssert.assertEquals(db2.getProperties(), db1.getProperties(),
+            "Database " + db1Name + " has different properties from " + db2Name);
+        //checking table equality
+        final List<String> db1tableNames = hcatClient1.listTableNamesByPattern(db1Name, ".*");
+        final List<String> db2tableNames = hcatClient2.listTableNamesByPattern(db2Name, ".*");
+        Collections.sort(db1tableNames);
+        Collections.sort(db2tableNames);
+        softAssert.assertEquals(db2tableNames, db1tableNames,
+            "Table names are not same. Actual: " + db1tableNames + " Expected: " + db2tableNames);
+        for (String tableName : db1tableNames) {
+            try {
+                assertTableEqual(cluster1, hcatClient1.getTable(db1Name, tableName),
+                    cluster2, hcatClient2.getTable(db2Name, tableName), softAssert);
+            } catch (HCatException e) {
+                softAssert.fail("Table equality check threw exception.", e);
+            }
+        }
+        return softAssert;
+    }
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveUtil.java
new file mode 100644
index 0000000..293a210
--- /dev/null
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/HiveUtil.java
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression.core.util;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utility class for stuff related to hive. All the methods in this class assume that they are
+ * dealing with small dataset.
+ */
+public final class HiveUtil {
+
+    private HiveUtil() {
+        throw new AssertionError("Instantiating utility class...");
+    }
+    private static final String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";
+
+    private static final Logger LOGGER = Logger.getLogger(HiveUtil.class);
+
+    public static Connection getHiveJdbcConnection(final String jdbcUrl, final String user, final String password,
+                                                   final String hivePrincipal)
+        throws ClassNotFoundException, SQLException, IOException, InterruptedException {
+        final String transportMode = new HiveConf().get("hive.server2.transport.mode", "binary");
+        String connectionStringSuffix = "";
+        if (transportMode.equalsIgnoreCase("http")) {
+            connectionStringSuffix += "transportMode=http;httpPath=cliservice;";
+        }
+        if (MerlinConstants.IS_SECURE) {
+            connectionStringSuffix += String.format("principal=%s;kerberosAuthType=fromSubject;", hivePrincipal);
+        }
+        final String connectionStringSuffix2 = connectionStringSuffix;
+        final UserGroupInformation ugi = KerberosHelper.getUGI(user);
+        final Connection conn = ugi.doAs(new PrivilegedExceptionAction<Connection>() {
+            @Override
+            public Connection run() throws Exception {
+                Class.forName(DRIVER_NAME);
+                return DriverManager.getConnection(jdbcUrl + "/;" + connectionStringSuffix2, ugi.getShortUserName(),
+                    password);
+            }
+        });
+
+        return conn;
+    }
+
+    /**
+     * Fetch rows from a given ResultSet and convert is a a list of string, each string is comma
+     * separated column values. The output also has header with column names and footer with
+     * number of rows returned.
+     * @param rs result set
+     * @return List of string - each string corresponds to the output output that you will get on
+     * sql prompt
+     * @throws SQLException
+     */
+    public static List<String> fetchRows(ResultSet rs) throws SQLException {
+        ResultSetMetaData metaData = rs.getMetaData();
+        List<String> output = new ArrayList<String>();
+
+        int numberOfColumns = metaData.getColumnCount();
+        StringBuilder sbCol = new StringBuilder();
+        for (int i = 1; i <= numberOfColumns; i++) {
+            if (i > 1) {
+                sbCol.append(",");
+            }
+            String columnName = metaData.getColumnName(i);
+            // the column name looks like tab1.col1
+            // we want to remove table name else table equality will fail
+            if (columnName.contains(".")) {
+                columnName = columnName.split("\\.")[1];
+            }
+            sbCol.append("'").append(columnName).append("'");
+        }
+        LOGGER.info(sbCol.toString());
+        output.add(sbCol.toString());
+
+        int numberOfRows = 0;
+        while (rs.next()) {
+            StringBuilder sbVal = new StringBuilder();
+            numberOfRows++;
+            for (int i = 1; i <= numberOfColumns; i++) {
+                if (i > 1) {
+                    sbVal.append(",");
+                }
+                String columnValue = rs.getString(i);
+                sbVal.append("'").append(columnValue != null ? columnValue : "").append("'");
+            }
+            LOGGER.info(sbVal.toString());
+            output.add(sbVal.toString());
+        }
+        Collections.sort(output); //sorting to ensure stability results across different runs
+        String rowStr = (numberOfRows > 0 ? numberOfRows : "No")
+            + (numberOfRows == 1 ? " row" : " rows") + " selected";
+        LOGGER.info(rowStr);
+        output.add(rowStr);
+        return output;
+    }
+
+    /**
+     * Run a sql using given connection.
+     * @param connection The connection to be used for running sql
+     * @param sql the sql to be run
+     * @throws SQLException
+     * @return output of the query as a List of strings
+     */
+    public static List<String> runSql(Connection connection, String sql) throws SQLException {
+        Statement stmt = null;
+        try {
+            stmt = connection.createStatement();
+            LOGGER.info("Executing: " + sql);
+            stmt.execute(sql);
+            final ResultSet resultSet = stmt.getResultSet();
+            if (resultSet != null) {
+                final List<String> output = fetchRows(resultSet);
+                LOGGER.info("Results are:\n" + StringUtils.join(output, "\n"));
+                return output;
+            }
+            LOGGER.info("Query executed.");
+        } finally {
+            if (stmt != null) {
+                stmt.close();
+            }
+        }
+        return new ArrayList<>();
+    }
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/LogUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/LogUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/LogUtil.java
index e4ce19d..e587704 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/LogUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/LogUtil.java
@@ -324,7 +324,7 @@ public final class LogUtil {
     public static void writeOozieLogs(ColoHelper coloHelper, String logLocation) {
         final OozieClient oozieClient = coloHelper.getFeedHelper().getOozieClient();
         final String hostname = coloHelper.getClusterHelper().getQaHost();
-        final String oozieLogLocation = OSUtil.concat(logLocation + "oozie_logs", hostname);
+        final String oozieLogLocation = OSUtil.concat(logLocation, "oozie_logs", hostname);
         assert oozieLogLocation != null;
         final File directory = new File(oozieLogLocation);
         if (!directory.exists()) {

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/pom.xml b/falcon-regression/merlin/pom.xml
index 14b43ec..f5502d7 100644
--- a/falcon-regression/merlin/pom.xml
+++ b/falcon-regression/merlin/pom.xml
@@ -52,6 +52,27 @@
                     <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-hdfs</artifactId>
                 </dependency>
+
+                <dependency>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-azure</artifactId>
+                </dependency>
+
+                <dependency>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-distcp</artifactId>
+                </dependency>
+
+                <dependency>
+                    <groupId>org.apache.hive</groupId>
+                    <artifactId>hive-jdbc</artifactId>
+                </dependency>
+                <dependency>
+                    <groupId>org.apache.hive</groupId>
+                    <artifactId>hive-metastore</artifactId>
+                </dependency>
+
+
             </dependencies>
         </profile>
     </profiles>

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/testHelper/BaseUITestClass.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/testHelper/BaseUITestClass.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/testHelper/BaseUITestClass.java
index ba66851..09b8265 100644
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/testHelper/BaseUITestClass.java
+++ b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/testHelper/BaseUITestClass.java
@@ -18,6 +18,7 @@
 
 package org.apache.falcon.regression.testHelper;
 
+import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
 import org.apache.falcon.regression.core.util.Config;
 import org.openqa.selenium.Dimension;
 import org.openqa.selenium.Point;
@@ -41,7 +42,7 @@ public class BaseUITestClass extends BaseTestClass{
     protected static void openBrowser() {
 
         FirefoxProfile profile = new FirefoxProfile();
-        profile.setPreference("network.negotiate-auth.trusted-uris", "http://, https://");
+        profile.setPreference("network.negotiate-auth.trusted-uris", MerlinConstants.PRISM_URL);
 
         driver = new FirefoxDriver(profile);
         driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/AbstractSearchPage.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/AbstractSearchPage.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/AbstractSearchPage.java
index e7d1ca2..1a94800 100644
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/AbstractSearchPage.java
+++ b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/AbstractSearchPage.java
@@ -20,6 +20,7 @@ package org.apache.falcon.regression.ui.search;
 
 import com.google.common.util.concurrent.SimpleTimeLimiter;
 import com.google.common.util.concurrent.TimeLimiter;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
 import org.apache.falcon.regression.core.util.TimeUtil;
@@ -27,20 +28,22 @@ import org.apache.falcon.regression.ui.pages.Page;
 import org.apache.log4j.Logger;
 import org.openqa.selenium.By;
 import org.openqa.selenium.JavascriptExecutor;
+import org.openqa.selenium.TimeoutException;
 import org.openqa.selenium.WebDriver;
 import org.openqa.selenium.WebElement;
 import org.openqa.selenium.support.FindBy;
 import org.openqa.selenium.support.PageFactory;
+import org.openqa.selenium.support.ui.ExpectedCondition;
 import org.openqa.selenium.support.ui.Select;
+import org.openqa.selenium.support.ui.WebDriverWait;
 import org.testng.Assert;
 
+import javax.annotation.Nullable;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 
-
-
 /** Parent page object for all the search ui pages. */
 public abstract class AbstractSearchPage extends Page {
 
@@ -158,16 +161,37 @@ public abstract class AbstractSearchPage extends Page {
     }
 
     public String getActiveAlertText() {
-        WebElement alertsBlock = driver.findElement(By.xpath("//div[@class='messages notifs']"));
-        if (alertsBlock.getAttribute("style").contains("opacity")) {
-            return alertsBlock.findElement(By.xpath("./div[last()]")).getText();
+        if (waitForAlert()) {
+            waitForAngularToFinish();
+            return driver.findElement(By.xpath("//div[@class='messages notifs']/div[last()]")).getText();
         } else {
             return null;
         }
     }
 
-    protected void waitForAlert() {
-        driver.findElements(
-            By.xpath("//div[@class='messages notifs' and contains(@style,'opacity')]"));
+    /**
+     * Wait for active alert.
+     * @return true is alert is present
+     */
+    protected boolean waitForAlert() {
+        final WebElement alertsBlock = driver.findElement(By.xpath("//div[@class='messages notifs']"));
+        try {
+            new WebDriverWait(driver, 5).until(new ExpectedCondition<Boolean>() {
+                @Nullable
+                @Override
+                public Boolean apply(WebDriver webDriver) {
+                    String style = alertsBlock.getAttribute("style");
+                    if (style.contains("opacity") && !style.contains("opacity: 1;")) {
+                        String alert = alertsBlock.findElement(By.xpath("./div[last()]")).getText();
+                        return StringUtils.isNotEmpty(alert);
+                    } else {
+                        return false;
+                    }
+                }
+            });
+            return true;
+        } catch (TimeoutException e) {
+            return false;
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/ClusterWizardPage.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/ClusterWizardPage.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/ClusterWizardPage.java
index 0d0abe6..dc419a7 100644
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/ClusterWizardPage.java
+++ b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/ClusterWizardPage.java
@@ -88,6 +88,7 @@ public class ClusterWizardPage extends AbstractSearchPage {
             addProperty(property.getName(), property.getValue());
         }
         setLocations(cluster.getLocations().getLocations());
+        waitForAngularToFinish();
     }
 
     /**
@@ -415,6 +416,7 @@ public class ClusterWizardPage extends AbstractSearchPage {
      */
     public void clickNext() {
         next.click();
+        waitForAngularToFinish();
         Assert.assertTrue(summaryBox.isDisplayed(), "Summary box should be displayed.");
     }
 
@@ -431,11 +433,13 @@ public class ClusterWizardPage extends AbstractSearchPage {
      */
     public void clickPrevious() {
         previous.click();
+        waitForAngularToFinish();
         UIAssert.assertDisplayed(clusterBox, "Cluster box");
     }
 
     public void checkRegistry() {
         clusterBox.findElement(By.xpath("//input[@type='checkbox']")).click();
+        waitForAngularToFinish();
     }
 
     public String getInterfaceEndpoint(Interfacetype interfacetype) {

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/FeedWizardPage.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/FeedWizardPage.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/FeedWizardPage.java
index eb5ff87..f3a107c 100644
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/FeedWizardPage.java
+++ b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/FeedWizardPage.java
@@ -316,6 +316,7 @@ public class FeedWizardPage extends AbstractSearchPage {
 
     public void clickNext(){
         nextButton.click();
+        waitForAngularToFinish();
     }
 
     public void clickPrevious(){
@@ -327,11 +328,13 @@ public class FeedWizardPage extends AbstractSearchPage {
     }
 
     public void clickEditXml(){
+        waitForAngularToFinish();
         editXmlButton.click();
     }
 
     public void clickCatalogStorageButton(){
         catalogStorageButton.click();
+        waitForAngularToFinish();
     }
 
     public void setFeedName(String name){
@@ -416,6 +419,7 @@ public class FeedWizardPage extends AbstractSearchPage {
     }
 
     public void addProperty(){
+        waitForAngularToFinish();
         addPropertyButton.click();
     }
 
@@ -579,6 +583,7 @@ public class FeedWizardPage extends AbstractSearchPage {
         setFeedACLPermissions(feed.getACL().getPermission());
         setFeedSchemaLocation(feed.getSchema().getLocation());
         setFeedSchemaProvider(feed.getSchema().getProvider());
+        waitForAngularToFinish();
     }
 
     // Enter feed info on Page 2 of FeedSetup Wizard
@@ -592,9 +597,11 @@ public class FeedWizardPage extends AbstractSearchPage {
         setFeedTimeZone();
         setFeedPropertyKey(0, feed.getProperties().getProperties().get(0).getName());
         setFeedPropertyValue(0, feed.getProperties().getProperties().get(0).getValue());
-        addPropertyButton.click();
+        addProperty();
+        waitForAngularToFinish();
         setFeedPropertyKey(1, feed.getProperties().getProperties().get(1).getName());
         setFeedPropertyValue(1, feed.getProperties().getProperties().get(1).getValue());
+        waitForAngularToFinish();
     }
 
     // Enter feed info on Page 3 of FeedSetup Wizard
@@ -602,7 +609,7 @@ public class FeedWizardPage extends AbstractSearchPage {
         setFeedPath(0, feed.getLocations().getLocations().get(0).getPath());
         setFeedPath(1, feed.getLocations().getLocations().get(1).getPath());
         setFeedPath(2, feed.getLocations().getLocations().get(2).getPath());
-
+        waitForAngularToFinish();
     }
 
     // Enter feed info on Page 4 of FeedSetup Wizard
@@ -628,18 +635,19 @@ public class FeedWizardPage extends AbstractSearchPage {
         setFeedClusterRetentionLimit("99");
         setFeedClusterRetentionUnit(feed.getClusters().getClusters().get(0)
             .getRetention().getLimit().getTimeUnit().name());
+        waitForAngularToFinish();
     }
 
     // setFeed method runs the default feed setup wizard, entering data on each page
     public void setFeed(FeedMerlin feed){
         setFeedGeneralInfo(feed);
-        nextButton.click();
+        clickNext();
         setFeedPropertiesInfo(feed);
-        nextButton.click();
+        clickNext();
         setFeedLocationInfo(feed);
-        nextButton.click();
+        clickNext();
         setFeedClustersInfo(feed);
-        nextButton.click();
+        clickNext();
         saveFeedButton.click();
         waitForAlert();
     }

http://git-wip-us.apache.org/repos/asf/falcon/blob/f9669000/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/LoginPage.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/LoginPage.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/LoginPage.java
index eb41e23..3193d21 100644
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/LoginPage.java
+++ b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/LoginPage.java
@@ -78,16 +78,20 @@ public class LoginPage extends AbstractSearchPage {
 
     /** Login successfully and take to the next page i.e. search page. */
     public SearchPage doDefaultLogin() {
-        getUserTextBox().clear();
-        appendToUserName(UI_DEFAULT_USER);
-        tryLogin();
+        if (!MerlinConstants.IS_SECURE) {
+            getUserTextBox().clear();
+            appendToUserName(UI_DEFAULT_USER);
+            tryLogin();
+        }
         LOGGER.info("Search page should have opened.");
         final SearchPage searchPage = PageFactory.initElements(driver, SearchPage.class);
         searchPage.checkPage();
         final PageHeader searchHeader = searchPage.getPageHeader();
-        searchHeader.checkLoggedIn();
-        Assert.assertEquals(searchHeader.getLoggedInUser(), LoginPage.UI_DEFAULT_USER,
-            "Unexpected user is displayed");
+        if (!MerlinConstants.IS_SECURE) {
+            searchHeader.checkLoggedIn();
+            Assert.assertEquals(searchHeader.getLoggedInUser(), LoginPage.UI_DEFAULT_USER,
+                "Unexpected user is displayed");
+        }
         return searchPage;
     }
 


Mime
View raw message