falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From samar...@apache.org
Subject [17/27] adding falcon-regression
Date Mon, 04 Aug 2014 10:04:16 GMT
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/bdcf001f/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedStatusTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedStatusTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedStatusTest.java
new file mode 100644
index 0000000..96af174
--- /dev/null
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedStatusTest.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression;
+
+
+import org.apache.falcon.entity.v0.EntityType;
+import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.falcon.regression.core.response.ServiceResponse;
+import org.apache.falcon.regression.core.util.AssertUtil;
+import org.apache.falcon.regression.core.util.BundleUtil;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.falcon.regression.core.util.Util;
+import org.apache.falcon.regression.core.util.Util.URLS;
+import org.apache.falcon.regression.testHelper.BaseTestClass;
+import org.apache.log4j.Logger;
+import org.apache.oozie.client.Job;
+import org.apache.oozie.client.OozieClient;
+import org.testng.Assert;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.lang.reflect.Method;
+
+/**
+ * Feed status tests. Checks getStatus functionality.
+ */
+@Test(groups = "embedded")
+public class FeedStatusTest extends BaseTestClass {
+
+    private ColoHelper cluster = servers.get(0);
+    private OozieClient clusterOC = serverOC.get(0);
+    private String feed;
+    private String aggregateWorkflowDir = baseHDFSDir + "/FeedStatusTest/aggregator";
+    private static final Logger LOGGER = Logger.getLogger(FeedStatusTest.class);
+
+    public void uploadWorkflow() throws Exception {
+        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
+    }
+
+    @BeforeMethod(alwaysRun = true)
+    public void setUp(Method method) throws Exception {
+        LOGGER.info("test name: " + method.getName());
+        bundles[0] = BundleUtil.readELBundle();
+        bundles[0].generateUniqueBundle();
+        bundles[0] = new Bundle(bundles[0], cluster);
+        bundles[0].setProcessWorkflow(aggregateWorkflowDir);
+
+        //submit the cluster
+        ServiceResponse response =
+            prism.getClusterHelper().submitEntity(URLS.SUBMIT_URL, bundles[0].getClusters().get(0));
+        AssertUtil.assertSucceeded(response);
+        feed = bundles[0].getInputFeedFromBundle();
+    }
+
+    @AfterMethod(alwaysRun = true)
+    public void tearDown() {
+        removeBundles();
+    }
+
+    /**
+     * Schedules feed. Queries a feed status and checks the response
+     * correctness and a feed status correspondence.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void getStatusForScheduledFeed() throws Exception {
+        ServiceResponse response =
+            prism.getFeedHelper().submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, feed);
+        LOGGER.info("Feed: " + Util.prettyPrintXml(feed));
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().getStatus(URLS.STATUS_URL, feed);
+
+        AssertUtil.assertSucceeded(response);
+
+        String colo = prism.getFeedHelper().getColo();
+        Assert.assertTrue(response.getMessage().contains(colo + "/RUNNING"));
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.RUNNING);
+    }
+
+    /**
+     * Schedules and suspends feed. Queries a feed status and checks the response
+     * correctness and a feed status correspondence.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void getStatusForSuspendedFeed() throws Exception {
+        ServiceResponse response =
+            prism.getFeedHelper().submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, feed);
+
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().suspend(URLS.SUSPEND_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().getStatus(URLS.STATUS_URL, feed);
+
+        AssertUtil.assertSucceeded(response);
+        String colo = prism.getFeedHelper().getColo();
+        Assert.assertTrue(response.getMessage().contains(colo + "/SUSPENDED"));
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED);
+    }
+
+    /**
+     * Submits feed. Queries a feed status and checks the response
+     * correctness and a feed status correspondence.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void getStatusForSubmittedFeed() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().getStatus(URLS.STATUS_URL, feed);
+
+        AssertUtil.assertSucceeded(response);
+        String colo = prism.getFeedHelper().getColo();
+        Assert.assertTrue(response.getMessage().contains(colo + "/SUBMITTED"));
+        AssertUtil.checkNotStatus(clusterOC, EntityType.FEED, feed, Job.Status.RUNNING);
+    }
+
+    /**
+     * Removes feed. Queries a feed status. Checks that the response correctness.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void getStatusForDeletedFeed() throws Exception {
+        ServiceResponse response =
+            prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().delete(URLS.DELETE_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().getStatus(URLS.STATUS_URL, feed);
+        AssertUtil.assertFailed(response);
+
+        Assert.assertTrue(
+            response.getMessage().contains(Util.readEntityName(feed) + " (FEED) not found"));
+        AssertUtil.checkNotStatus(clusterOC, EntityType.FEED, feed, Job.Status.KILLED);
+    }
+
+    /**
+     * Queries a status of feed which wasn't submitted and checks the response.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void getStatusForNonExistentFeed() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().getStatus(URLS.STATUS_URL, feed);
+        AssertUtil.assertFailed(response);
+        Assert.assertTrue(
+            response.getMessage().contains(Util.readEntityName(feed) + " (FEED) not found"));
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/bdcf001f/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitAndScheduleTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitAndScheduleTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitAndScheduleTest.java
new file mode 100644
index 0000000..ab13dd4
--- /dev/null
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitAndScheduleTest.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression;
+
+import org.apache.falcon.entity.v0.EntityType;
+import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.falcon.regression.core.response.ServiceResponse;
+import org.apache.falcon.regression.core.util.AssertUtil;
+import org.apache.falcon.regression.core.util.BundleUtil;
+import org.apache.falcon.regression.core.util.InstanceUtil;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.falcon.regression.core.util.OozieUtil;
+import org.apache.falcon.regression.core.util.Util;
+import org.apache.falcon.regression.core.util.Util.URLS;
+import org.apache.falcon.regression.testHelper.BaseTestClass;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.log4j.Logger;
+import org.apache.oozie.client.Job;
+import org.apache.oozie.client.OozieClient;
+import org.testng.Assert;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import javax.xml.bind.JAXBException;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.net.URISyntaxException;
+
+/**
+ * Feed submit and schedule tests.
+ */
+@Test(groups = "embedded")
+public class FeedSubmitAndScheduleTest extends BaseTestClass {
+
+    private ColoHelper cluster = servers.get(0);
+    private OozieClient clusterOC = serverOC.get(0);
+    private String aggregateWorkflowDir = baseHDFSDir + "/FeedSubmitAndScheduleTest/aggregator";
+    private static final Logger LOGGER = Logger.getLogger(FeedSubmitAndScheduleTest.class);
+
+    @BeforeMethod(alwaysRun = true)
+    public void uploadWorkflow() throws Exception {
+        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
+    }
+
+    @BeforeMethod(alwaysRun = true)
+    public void setUp(Method method) throws Exception {
+        LOGGER.info("test name: " + method.getName());
+        bundles[0] = BundleUtil.readELBundle();
+        bundles[0] = new Bundle(bundles[0], cluster);
+        bundles[0].generateUniqueBundle();
+        bundles[0].setProcessWorkflow(aggregateWorkflowDir);
+    }
+
+    @AfterMethod(alwaysRun = true)
+    public void tearDown() {
+        removeBundles();
+    }
+
+    @Test(groups = {"singleCluster"})
+    public void snsNewFeed() throws Exception {
+        submitFirstClusterScheduleFirstFeed();
+    }
+
+    /**
+     * Submits and schedules feed with a cluster it depends on.
+     *
+     * @throws JAXBException
+     * @throws IOException
+     * @throws URISyntaxException
+     * @throws AuthenticationException
+     */
+    private void submitFirstClusterScheduleFirstFeed()
+        throws JAXBException, IOException, URISyntaxException, AuthenticationException {
+        Assert.assertEquals(Util.parseResponse(prism.getClusterHelper()
+            .submitEntity(URLS.SUBMIT_URL, bundles[0].getClusters().get(0))).getStatusCode(), 200);
+        ServiceResponse response = prism.getFeedHelper()
+            .submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, bundles[0].getDataSets().get(0));
+        AssertUtil.assertSucceeded(response);
+    }
+
+    /**
+     * Submits and schedules a feed and then tries to do the same on it. Checks that status
+     * hasn't changed and response is successful.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void snsExistingFeed() throws Exception {
+        submitFirstClusterScheduleFirstFeed();
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.RUNNING);
+
+        //get created bundle id
+        String bundleId = InstanceUtil
+            .getLatestBundleID(cluster, Util.readEntityName(bundles[0].getDataSets().get(0)),
+                EntityType.FEED);
+
+        //try to submit and schedule the same process again
+        ServiceResponse response = prism.getFeedHelper()
+            .submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, bundles[0].getDataSets().get(0));
+        AssertUtil.assertSucceeded(response);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.RUNNING);
+
+        //check that new bundle wasn't created
+        OozieUtil.verifyNewBundleCreation(cluster, bundleId, null, bundles[0].getDataSets().get(0), false, false);
+    }
+
+    /**
+     * Try to submit and schedule feed without submitting cluster it depends on.
+     * Request should fail.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void snsFeedWithoutCluster() throws Exception {
+        ServiceResponse response = prism.getFeedHelper()
+            .submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, bundles[0].getDataSets().get(0));
+        AssertUtil.assertFailed(response);
+    }
+
+    /**
+     * Submits and schedules feed. Removes it. Submitted and schedules removed feed.
+     * Checks response and status of feed.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void snsDeletedFeed() throws Exception {
+        submitFirstClusterScheduleFirstFeed();
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.RUNNING);
+        Assert.assertEquals(
+            Util.parseResponse(prism.getFeedHelper()
+                .delete(URLS.DELETE_URL, bundles[0].getDataSets().get(0)))
+                .getStatusCode(), 200);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.KILLED);
+
+        ServiceResponse response = prism.getFeedHelper()
+            .submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, bundles[0].getDataSets().get(0));
+        AssertUtil.assertSucceeded(response);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.RUNNING);
+    }
+
+    /**
+     * Suspends feed, submit and schedules it. Checks that response is successful,
+     * feed status hasn't changed.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void snsSuspendedFeed() throws Exception {
+        submitFirstClusterScheduleFirstFeed();
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.RUNNING);
+        Assert.assertEquals(Util.parseResponse(
+                prism.getFeedHelper()
+                    .suspend(URLS.SUSPEND_URL, bundles[0].getDataSets().get(0)))
+                .getStatusCode(),
+            200);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.SUSPENDED);
+        ServiceResponse response = prism.getFeedHelper()
+            .submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, bundles[0].getDataSets().get(0));
+
+        AssertUtil.assertSucceeded(response);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, bundles[0], Job.Status.SUSPENDED);
+    }
+}
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/bdcf001f/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java
new file mode 100644
index 0000000..a50c32a
--- /dev/null
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression;
+
+
+import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.falcon.regression.core.response.ServiceResponse;
+import org.apache.falcon.regression.core.util.AssertUtil;
+import org.apache.falcon.regression.core.util.BundleUtil;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.falcon.regression.core.util.Util.URLS;
+import org.apache.falcon.regression.testHelper.BaseTestClass;
+import org.apache.log4j.Logger;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.lang.reflect.Method;
+
+/**
+ * Feed submission tests.
+ */
+@Test(groups = "embedded")
+public class FeedSubmitTest extends BaseTestClass {
+
+    private ColoHelper cluster = servers.get(0);
+    private String feed;
+    private String aggregateWorkflowDir = baseHDFSDir + "/FeedSubmitTest/aggregator";
+    private static final Logger LOGGER = Logger.getLogger(FeedSubmitTest.class);
+
+    public void uploadWorkflow() throws Exception {
+        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
+    }
+
+    @BeforeMethod(alwaysRun = true)
+    public void setUp(Method method) throws Exception {
+        LOGGER.info("test name: " + method.getName());
+        bundles[0] = BundleUtil.readELBundle();
+        bundles[0].generateUniqueBundle();
+        bundles[0] = new Bundle(bundles[0], cluster);
+        bundles[0].setProcessWorkflow(aggregateWorkflowDir);
+
+        //submit the cluster
+        ServiceResponse response =
+            prism.getClusterHelper().submitEntity(URLS.SUBMIT_URL, bundles[0].getClusters().get(0));
+        AssertUtil.assertSucceeded(response);
+        feed = bundles[0].getInputFeedFromBundle();
+    }
+
+    @AfterMethod(alwaysRun = true)
+    public void tearDown() {
+        removeBundles();
+    }
+
+    /**
+     * Submit correctly adjusted feed. Response should reflect success.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void submitValidFeed() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+    }
+
+    /**
+     * Submit and remove feed. Try to submit it again. Response should reflect success.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void submitValidFeedPostDeletion() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().delete(URLS.DELETE_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+    }
+
+    /**
+     * Submit feed. Get its definition. Try to submit it again. Should succeed.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void submitValidFeedPostGet() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().getEntityDefinition(URLS.GET_ENTITY_DEFINITION, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+    }
+
+    /**
+     * Try to submit correctly adjusted feed twice. Should succeed.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void submitValidFeedTwice() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/bdcf001f/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java
new file mode 100644
index 0000000..10bdddb
--- /dev/null
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression;
+
+import org.apache.falcon.entity.v0.EntityType;
+import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.falcon.regression.core.response.ServiceResponse;
+import org.apache.falcon.regression.core.util.AssertUtil;
+import org.apache.falcon.regression.core.util.BundleUtil;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.falcon.regression.core.util.Util.URLS;
+import org.apache.falcon.regression.testHelper.BaseTestClass;
+import org.apache.log4j.Logger;
+import org.apache.oozie.client.Job;
+import org.apache.oozie.client.OozieClient;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.lang.reflect.Method;
+
+/**
+ * Feed suspend tests.
+ */
+@Test(groups = "embedded")
+public class FeedSuspendTest extends BaseTestClass {
+
+    private ColoHelper cluster = servers.get(0);
+    private OozieClient clusterOC = serverOC.get(0);
+    private String feed;
+    private String aggregateWorkflowDir = baseHDFSDir + "/FeedSuspendTest/aggregator";
+    private static final Logger LOGGER = Logger.getLogger(FeedSuspendTest.class);
+
+    public void uploadWorkflow() throws Exception {
+        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
+    }
+
+    @BeforeMethod(alwaysRun = true)
+    public void setUp(Method method) throws Exception {
+        LOGGER.info("test name: " + method.getName());
+        bundles[0] = BundleUtil.readELBundle();
+        bundles[0].generateUniqueBundle();
+        bundles[0] = new Bundle(bundles[0], cluster);
+        bundles[0].setProcessWorkflow(aggregateWorkflowDir);
+
+        //submit the cluster
+        ServiceResponse response =
+            prism.getClusterHelper().submitEntity(URLS.SUBMIT_URL, bundles[0].getClusters().get(0));
+        AssertUtil.assertSucceeded(response);
+
+        feed = bundles[0].getInputFeedFromBundle();
+    }
+
+    @AfterMethod(alwaysRun = true)
+    public void tearDown() {
+        removeBundles();
+    }
+
+    /**
+     * Schedule feed, suspend it. Check that web response reflects success and feed status is
+     * "suspended".
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void suspendScheduledFeed() throws Exception {
+        ServiceResponse response =
+            prism.getFeedHelper().submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().suspend(URLS.SUSPEND_URL, feed);
+        AssertUtil.assertSucceeded(response);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED);
+    }
+
+    /**
+     * Try to suspend running feed twice. Response should reflect success,
+     * feed status should be suspended.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void suspendAlreadySuspendedFeed() throws Exception {
+        ServiceResponse response =
+            prism.getFeedHelper().submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().suspend(URLS.SUSPEND_URL, feed);
+        AssertUtil.assertSucceeded(response);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED);
+        response = prism.getFeedHelper().suspend(URLS.SUSPEND_URL, feed);
+
+        AssertUtil.assertSucceeded(response);
+        AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED);
+    }
+
+    /**
+     * Remove feed. Attempt to suspend it should fail.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void suspendDeletedFeed() throws Exception {
+        ServiceResponse response =
+            prism.getFeedHelper().submitAndSchedule(URLS.SUBMIT_AND_SCHEDULE_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().delete(URLS.DELETE_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().suspend(URLS.SUSPEND_URL, feed);
+        AssertUtil.assertFailed(response);
+    }
+
+    /**
+     * Attempt to suspend non existent feed should fail.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void suspendNonExistentFeed() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().suspend(URLS.SCHEDULE_URL, feed);
+        AssertUtil.assertFailed(response);
+    }
+
+    /**
+     * Attempt to suspend non scheduled feed should fail.
+     *
+     * @throws Exception
+     */
+    @Test(groups = {"singleCluster"})
+    public void suspendSubmittedFeed() throws Exception {
+        ServiceResponse response = prism.getFeedHelper().submitEntity(URLS.SUBMIT_URL, feed);
+        AssertUtil.assertSucceeded(response);
+
+        response = prism.getFeedHelper().suspend(URLS.SUSPEND_URL, feed);
+        AssertUtil.assertFailed(response);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/bdcf001f/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java
new file mode 100644
index 0000000..ad2e832
--- /dev/null
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression;
+
+import org.apache.falcon.entity.v0.EntityType;
+import org.apache.falcon.entity.v0.feed.ClusterType;
+import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.falcon.regression.core.response.InstancesResult;
+import org.apache.falcon.regression.core.util.BundleUtil;
+import org.apache.falcon.regression.core.util.HadoopUtil;
+import org.apache.falcon.regression.core.util.InstanceUtil;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.falcon.regression.core.util.OozieUtil;
+import org.apache.falcon.regression.core.util.TimeUtil;
+import org.apache.falcon.regression.core.util.Util;
+import org.apache.falcon.regression.testHelper.BaseTestClass;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.log4j.Logger;
+import org.apache.oozie.client.CoordinatorAction;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.OozieClientException;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import javax.xml.bind.JAXBException;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.net.URISyntaxException;
+
+/**
+ * tests for instance option params.
+ */
+public class InstanceParamTest extends BaseTestClass {
+
+    /**
+     * test cases for https://issues.apache.org/jira/browse/FALCON-263.
+     */
+
+    private String baseTestHDFSDir = baseHDFSDir + "/InstanceParamTest";
+    private String feedInputPath = baseTestHDFSDir
+            +
+        "/testInputData/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}";
+    private String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator";
+    private String startTime;
+    private String endTime;
+
+    private ColoHelper cluster1 = servers.get(0);
+    private OozieClient oC1 = serverOC.get(0);
+    private Bundle processBundle;
+    private static final Logger LOGGER = Logger.getLogger(InstanceParamTest.class);
+
+
+    @BeforeClass(alwaysRun = true)
+    public void createTestData() throws Exception {
+        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
+        startTime = TimeUtil.get20roundedTime(TimeUtil
+            .getTimeWrtSystemTime(-20));
+        endTime = TimeUtil.getTimeWrtSystemTime(60);
+    }
+
+    @BeforeMethod(alwaysRun = true)
+    public void setup(Method method) throws Exception {
+        LOGGER.info("test name: " + method.getName());
+        processBundle = BundleUtil.readELBundle();
+        processBundle = new Bundle(processBundle, cluster1);
+        processBundle.generateUniqueBundle();
+        processBundle.setInputFeedDataPath(feedInputPath);
+        processBundle.setProcessWorkflow(aggregateWorkflowDir);
+        for (int i = 0; i < 3; i++) {
+            bundles[i] = new Bundle(processBundle, servers.get(i));
+            bundles[i].generateUniqueBundle();
+            bundles[i].setProcessWorkflow(aggregateWorkflowDir);
+        }
+    }
+    @Test(timeOut = 1200000, enabled = false)
+    public void getParamsValidRequestInstanceWaiting()
+        throws URISyntaxException, JAXBException, AuthenticationException, IOException,
+        OozieClientException {
+        processBundle.setProcessValidity(startTime, endTime);
+        processBundle.addClusterToBundle(bundles[1].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.addClusterToBundle(bundles[2].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.submitFeedsScheduleProcess(prism);
+        InstanceUtil.waitTillInstancesAreCreated(cluster1, processBundle.getProcessData(), 0);
+        InstancesResult r = prism.getProcessHelper()
+            .getInstanceParams(Util.readEntityName(processBundle.getProcessData()),
+                "?start="+startTime);
+        r.getMessage();
+    }
+
+    @Test(timeOut = 1200000, enabled = true)
+    public void getParamsValidRequestInstanceSucceeded()
+        throws URISyntaxException, JAXBException, AuthenticationException, IOException,
+        OozieClientException {
+        processBundle.setProcessValidity(startTime, endTime);
+        processBundle.addClusterToBundle(bundles[1].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.addClusterToBundle(bundles[2].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.submitFeedsScheduleProcess(prism);
+        InstanceUtil.waitTillInstancesAreCreated(cluster1, processBundle.getProcessData(), 0);
+        OozieUtil.createMissingDependencies(cluster1, EntityType.PROCESS,
+            processBundle.getProcessName(), 0);
+        InstanceUtil.waitTillInstanceReachState(oC1, processBundle.getProcessName(), 1,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS, 10);
+        InstancesResult r = prism.getProcessHelper()
+            .getInstanceParams(Util.readEntityName(processBundle.getProcessData()),
+                "?start="+startTime);
+        LOGGER.info(r.getMessage());
+    }
+
+    @Test(timeOut = 1200000, enabled = false)
+    public void getParamsValidRequestInstanceKilled()
+        throws URISyntaxException, JAXBException, AuthenticationException, IOException,
+        OozieClientException {
+        processBundle.setProcessValidity(startTime, endTime);
+        processBundle.addClusterToBundle(bundles[1].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.addClusterToBundle(bundles[2].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.submitFeedsScheduleProcess(prism);
+        InstanceUtil.waitTillInstancesAreCreated(cluster1, processBundle.getProcessData(), 0);
+        OozieUtil.createMissingDependencies(cluster1, EntityType.PROCESS,
+            processBundle.getProcessName(), 0);
+        InstanceUtil.waitTillInstanceReachState(oC1, processBundle.getProcessName(), 0,
+            CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
+        InstancesResult r = prism.getProcessHelper()
+            .getInstanceParams(Util.readEntityName(processBundle.getProcessData()),
+                "?start="+startTime);
+        r.getMessage();
+
+    }
+
+    @AfterMethod(alwaysRun = true)
+    public void tearDown() throws IOException {
+        processBundle.deleteBundle(prism);
+        removeBundles();
+        for (FileSystem fs : serverFS) {
+            HadoopUtil.deleteDirIfExists(Util.getPathPrefix(feedInputPath), fs);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/bdcf001f/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
new file mode 100644
index 0000000..c541620
--- /dev/null
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
@@ -0,0 +1,311 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.regression;
+
+import org.apache.falcon.regression.core.bundle.Bundle;
+import org.apache.falcon.entity.v0.EntityType;
+import org.apache.falcon.entity.v0.feed.ActionType;
+import org.apache.falcon.entity.v0.feed.ClusterType;
+import org.apache.falcon.regression.core.helpers.ColoHelper;
+import org.apache.falcon.regression.core.response.InstancesSummaryResult;
+import org.apache.falcon.regression.core.util.BundleUtil;
+import org.apache.falcon.regression.core.util.HadoopUtil;
+import org.apache.falcon.regression.core.util.InstanceUtil;
+import org.apache.falcon.regression.core.util.OSUtil;
+import org.apache.falcon.regression.core.util.TimeUtil;
+import org.apache.falcon.regression.core.util.Util;
+import org.apache.falcon.regression.core.util.XmlUtil;
+import org.apache.falcon.regression.testHelper.BaseTestClass;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.log4j.Logger;
+import org.apache.oozie.client.CoordinatorAction.Status;
+import org.apache.oozie.client.OozieClientException;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import javax.xml.bind.JAXBException;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.net.URISyntaxException;
+import java.text.ParseException;
+import java.util.List;
+
+/*
+this test currently provide minimum verification. More detailed test should
+ be added
+ */
+public class InstanceSummaryTest extends BaseTestClass {
+
+    //1. process : test summary single cluster few instance some future some past
+    //2. process : test multiple cluster, full past on one cluster,
+    // full future on one cluster, half future / past on third one
+
+    // 3. feed : same as test 1 for feed
+    // 4. feed : same as test 2 for feed
+
+
+    String baseTestHDFSDir = baseHDFSDir + "/InstanceSummaryTest";
+    String feedInputPath = baseTestHDFSDir +
+        "/testInputData/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}";
+    String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator";
+    String startTime;
+    String endTime;
+
+    ColoHelper cluster3 = servers.get(2);
+
+    Bundle processBundle;
+    private static final Logger logger = Logger.getLogger(InstanceSummaryTest.class);
+
+    @BeforeClass(alwaysRun = true)
+    public void createTestData() throws Exception {
+        uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
+        startTime = TimeUtil.get20roundedTime(TimeUtil
+            .getTimeWrtSystemTime
+                (-20));
+        endTime = TimeUtil.getTimeWrtSystemTime(60);
+        String startTimeData = TimeUtil.addMinsToTime(startTime, -100);
+        List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(startTimeData, endTime, 20);
+
+        for (FileSystem fs : serverFS) {
+            HadoopUtil.deleteDirIfExists(Util.getPathPrefix(feedInputPath), fs);
+            HadoopUtil.flattenAndPutDataInFolder(fs, OSUtil.NORMAL_INPUT,
+                Util.getPathPrefix(feedInputPath), dataDates);
+        }
+    }
+
+    @BeforeMethod(alwaysRun = true)
+    public void setup(Method method) throws Exception {
+        logger.info("test name: " + method.getName());
+        processBundle = BundleUtil.readELBundle();
+        processBundle = new Bundle(processBundle, cluster3);
+        processBundle.generateUniqueBundle();
+        processBundle.setInputFeedDataPath(feedInputPath);
+        processBundle.setProcessWorkflow(aggregateWorkflowDir);
+
+        for (int i = 0; i < 3; i++) {
+            bundles[i] = new Bundle(processBundle, servers.get(i));
+            bundles[i].generateUniqueBundle();
+            bundles[i].setProcessWorkflow(aggregateWorkflowDir);
+        }
+    }
+
+    @Test(enabled = true, timeOut = 1200000)
+    public void testSummarySingleClusterProcess()
+        throws URISyntaxException, JAXBException, IOException, ParseException,
+        OozieClientException, AuthenticationException {
+        processBundle.setProcessValidity(startTime, endTime);
+        processBundle.submitFeedsScheduleProcess(prism);
+        InstanceUtil.waitTillInstancesAreCreated(cluster3,
+            processBundle.getProcessData(), 0);
+
+        // start only at start time
+        InstancesSummaryResult r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime);
+
+        InstanceUtil.waitTillInstanceReachState(serverOC.get(2),
+            Util.readEntityName(processBundle.getProcessData()), 2,
+            Status.SUCCEEDED, EntityType.PROCESS);
+
+
+        //AssertUtil.assertSucceeded(r);
+
+        //start only before process start
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime, -100));
+        //AssertUtil.assertFailed(r,"response should have failed");
+
+        //start only after process end
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime, 120));
+
+
+        //start only at mid specific instance
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime,
+                    +10));
+
+        //start only in between 2 instance
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime,
+                    7));
+
+        //start and end at start and end
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + endTime);
+
+        //start in between and end at end
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime,
+                    14) + "&end=" + endTime);
+
+        //start at start and end between
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(endTime,
+                    -20));
+
+        // start and end in between
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime,
+                    20) + "&end=" + TimeUtil.addMinsToTime(endTime, -13));
+
+        //start before start with end in between
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime,
+                    -100) + "&end=" + TimeUtil.addMinsToTime(endTime, -37));
+
+        //start in between and end after end
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime,
+                    60) + "&end=" + TimeUtil.addMinsToTime(endTime, 100));
+
+        // both start end out od range
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + TimeUtil.addMinsToTime(startTime,
+                    -100) + "&end=" + TimeUtil.addMinsToTime(endTime, 100));
+
+        // end only
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?end=" + TimeUtil.addMinsToTime(endTime, -30));
+    }
+
+    @Test(enabled = true, timeOut = 1200000)
+    public void testSummaryMultiClusterProcess() throws JAXBException,
+        ParseException, IOException, URISyntaxException, AuthenticationException {
+        processBundle.setProcessValidity(startTime, endTime);
+        processBundle.addClusterToBundle(bundles[1].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.addClusterToBundle(bundles[2].getClusters().get(0),
+            ClusterType.SOURCE, null, null);
+        processBundle.submitFeedsScheduleProcess(prism);
+        InstancesSummaryResult r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime);
+
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + endTime);
+
+
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + endTime);
+
+
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + endTime);
+
+
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + endTime);
+
+
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + endTime);
+
+
+        r = prism.getProcessHelper()
+            .getInstanceSummary(Util.readEntityName(processBundle.getProcessData()),
+                "?start=" + startTime + "&end=" + endTime);
+    }
+
+    @Test(enabled = true, timeOut = 1200000)
+    public void testSummaryMultiClusterFeed() throws JAXBException, ParseException, IOException,
+        URISyntaxException, OozieClientException, AuthenticationException {
+
+        //create desired feed
+        String feed = bundles[0].getDataSets().get(0);
+
+        //cluster_1 is target, cluster_2 is source and cluster_3 is neutral
+
+        feed = InstanceUtil.setFeedCluster(feed,
+            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
+            XmlUtil.createRtention("days(100000)", ActionType.DELETE), null,
+            ClusterType.SOURCE, null);
+
+        feed = InstanceUtil
+            .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-10-01T12:10Z"),
+                XmlUtil.createRtention("days(100000)", ActionType.DELETE),
+                Util.readEntityName(bundles[2].getClusters().get(0)), null, null);
+
+        feed = InstanceUtil
+            .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-10-01T12:25Z"),
+                XmlUtil.createRtention("days(100000)", ActionType.DELETE),
+                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET,
+                null,
+                feedInputPath);
+
+        feed = InstanceUtil
+            .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
+                XmlUtil.createRtention("days(100000)", ActionType.DELETE),
+                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
+                null, feedInputPath);
+
+        //submit clusters
+        Bundle.submitCluster(bundles[0], bundles[1], bundles[2]);
+
+        //create test data on cluster_2
+      /*InstanceUtil.createDataWithinDatesAndPrefix(cluster2,
+        InstanceUtil.oozieDateToDate(startTime),
+        InstanceUtil.oozieDateToDate(InstanceUtil.getTimeWrtSystemTime(60)),
+        feedInputPath, 1);*/
+
+        //submit and schedule feed
+        prism.getFeedHelper().submitAndSchedule(Util.URLS
+            .SUBMIT_AND_SCHEDULE_URL, feed);
+
+        InstancesSummaryResult r = prism.getFeedHelper()
+            .getInstanceSummary(Util.readEntityName(feed),
+                "?start=" + startTime);
+
+        r = prism.getFeedHelper()
+            .getInstanceSummary(Util.readEntityName(feed),
+                "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(endTime,
+                    -20));
+
+    }
+
+
+    @AfterMethod(alwaysRun = true)
+    public void tearDown() throws IOException {
+        processBundle.deleteBundle(prism);
+        removeBundles();
+        for (FileSystem fs : serverFS) {
+            HadoopUtil.deleteDirIfExists(Util.getPathPrefix(feedInputPath), fs);
+        }
+    }
+}


Mime
View raw message