falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From srik...@apache.org
Subject [05/12] FALCON-85 Hive (HCatalog) integration. Contributed by Venkatesh Seetharam FALCON-163 Merge FALCON-85 branch into main line. Contributed by Venkatesh Seetharam
Date Tue, 12 Nov 2013 11:05:22 GMT
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java b/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
index 54bbecf..03230f9 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
@@ -56,7 +56,6 @@ public class RerunEventFactory<T extends RerunEvent> {
                 map.get("entityType"), map.get("entityName"), map.get("instance"),
                 Integer.parseInt(map.get("runId")), Integer.parseInt(map.get("attempts")),
                 Integer.parseInt(map.get("failRetryCount")));
-
     }
 
     private Map<String, String> getMap(String message) {
@@ -68,5 +67,4 @@ public class RerunEventFactory<T extends RerunEvent> {
         }
         return map;
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java b/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
index 44bf96e..1396f19 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
@@ -25,7 +25,7 @@ public class RetryEvent extends RerunEvent {
     private int attempts;
     private int failRetryCount;
 
-    //SUSPEND CHECKSTYLE CHECK VisibilityModifierCheck
+    //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
     public RetryEvent(String clusterName, String wfId, long msgInsertTime,
                       long delay, String entityType, String entityName, String instance,
                       int runId, int attempts, int failRetryCount) {
@@ -34,7 +34,7 @@ public class RetryEvent extends RerunEvent {
         this.attempts = attempts;
         this.failRetryCount = failRetryCount;
     }
-    //RESUME CHECKSTYLE CHECK VisibilityModifierCheck
+    //RESUME CHECKSTYLE CHECK ParameterNumberCheck
 
     public int getAttempts() {
         return attempts;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
index 8a41bf8..ab7f472 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
@@ -46,9 +46,8 @@ public abstract class AbstractRerunHandler<T extends RerunEvent, M extends Delay
         this.delayQueue.init();
     }
 
-    public abstract void handleRerun(String cluster, String entityType,
-                                     String entityName, String nominalTime, String runId, String wfId,
-                                     long msgReceivedTime);
+    public abstract void handleRerun(String cluster, String entityType, String entityName,
+                                     String nominalTime, String runId, String wfId, long msgReceivedTime);
 
     public AbstractWorkflowEngine getWfEngine() {
         return wfEngine;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
index 4088a59..fffd5cd 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
@@ -25,6 +25,7 @@ import org.apache.falcon.entity.v0.process.LateInput;
 import org.apache.falcon.latedata.LateDataHandler;
 import org.apache.falcon.rerun.event.LaterunEvent;
 import org.apache.falcon.rerun.queue.DelayedQueue;
+import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -71,11 +72,9 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
                 return;
             }
 
-            LOG.info("Late changes detected in the following feeds: "
-                    + detectLate);
+            LOG.info("Late changes detected in the following feeds: " + detectLate);
 
-            handler.getWfEngine().reRun(message.getClusterName(),
-                    message.getWfId(), null);
+            handler.getWfEngine().reRun(message.getClusterName(), message.getWfId(), null);
             LOG.info("Scheduled late rerun for wf-id: " + message.getWfId()
                     + " on cluster: " + message.getClusterName());
         } catch (Exception e) {
@@ -95,54 +94,48 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
 
     public String detectLate(LaterunEvent message) throws Exception {
         LateDataHandler late = new LateDataHandler();
-        String falconInputFeeds = handler.getWfEngine().getWorkflowProperty(
-                message.getClusterName(), message.getWfId(), "falconInputFeeds");
-        String logDir = handler.getWfEngine().getWorkflowProperty(
-                message.getClusterName(), message.getWfId(), "logDir");
-        String falconInPaths = handler.getWfEngine().getWorkflowProperty(
-                message.getClusterName(), message.getWfId(), "falconInPaths");
-        String nominalTime = handler.getWfEngine().getWorkflowProperty(
-                message.getClusterName(), message.getWfId(), "nominalTime");
-        String srcClusterName = handler.getWfEngine().getWorkflowProperty(
-                message.getClusterName(), message.getWfId(), "srcClusterName");
-
-        Configuration conf = handler.getConfiguration(message.getClusterName(),
-                message.getWfId());
-        Path lateLogPath = handler.getLateLogPath(logDir, nominalTime,
-                srcClusterName);
+        Properties properties = handler.getWfEngine().getWorkflowProperties(
+                message.getClusterName(), message.getWfId());
+        String falconInputFeeds = properties.getProperty("falconInputFeeds");
+        String falconInPaths = properties.getProperty("falconInPaths");
+        String falconInputFeedStorageTypes = properties.getProperty("falconInputFeedStorageTypes");
+        String logDir = properties.getProperty("logDir");
+        String nominalTime = properties.getProperty("nominalTime");
+        String srcClusterName = properties.getProperty("srcClusterName");
+        Path lateLogPath = handler.getLateLogPath(logDir, nominalTime, srcClusterName);
+
+        final String storageEndpoint = properties.getProperty(AbstractWorkflowEngine.NAME_NODE);
+        Configuration conf = LateRerunHandler.getConfiguration(storageEndpoint);
         FileSystem fs = FileSystem.get(conf);
         if (!fs.exists(lateLogPath)) {
             LOG.warn("Late log file:" + lateLogPath + " not found:");
             return "";
         }
-        Map<String, Long> feedSizes = new LinkedHashMap<String, Long>();
+
         String[] pathGroups = falconInPaths.split("#");
         String[] inputFeeds = falconInputFeeds.split("#");
-        Entity entity = EntityUtil.getEntity(message.getEntityType(),
-                message.getEntityName());
+        String[] inputFeedStorageTypes = falconInputFeedStorageTypes.split("#");
 
-        List<String> lateFeed = new ArrayList<String>();
+        Map<String, Long> computedMetrics = new LinkedHashMap<String, Long>();
+        Entity entity = EntityUtil.getEntity(message.getEntityType(), message.getEntityName());
         if (EntityUtil.getLateProcess(entity) != null) {
-            for (LateInput li : EntityUtil.getLateProcess(entity)
-                    .getLateInputs()) {
+            List<String> lateFeed = new ArrayList<String>();
+            for (LateInput li : EntityUtil.getLateProcess(entity).getLateInputs()) {
                 lateFeed.add(li.getInput());
             }
+
             for (int index = 0; index < pathGroups.length; index++) {
                 if (lateFeed.contains(inputFeeds[index])) {
-                    long usage = 0;
-                    for (String pathElement : pathGroups[index].split(",")) {
-                        Path inPath = new Path(pathElement);
-                        usage += late.usage(inPath, conf);
-                    }
-                    feedSizes.put(inputFeeds[index], usage);
+                    long computedMetric = late.computeStorageMetric(
+                            pathGroups[index], inputFeedStorageTypes[index], conf);
+                    computedMetrics.put(inputFeeds[index], computedMetric);
                 }
             }
         } else {
             LOG.warn("Late process is not configured for entity: "
-                    + message.getEntityType() + "(" + message.getEntityName()
-                    + ")");
+                    + message.getEntityType() + "(" + message.getEntityName() + ")");
         }
 
-        return late.detectChanges(lateLogPath, feedSizes, conf);
+        return late.detectChanges(lateLogPath, computedMetrics, conf);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
index 1e4bd25..ee12332 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import java.util.Date;
+import java.util.*;
 
 /**
  * An implementation of handler for late reruns.
@@ -49,10 +49,8 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
         AbstractRerunHandler<LaterunEvent, M> {
 
     @Override
-    public void handleRerun(String cluster, String entityType,
-                            String entityName, String nominalTime, String runId, String wfId,
-                            long msgReceivedTime) {
-
+    public void handleRerun(String cluster, String entityType, String entityName,
+                            String nominalTime, String runId, String wfId, long msgReceivedTime) {
         try {
             Entity entity = EntityUtil.getEntity(entityType, entityName);
             try {
@@ -72,15 +70,17 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
             Long wait = getEventDelay(entity, nominalTime);
             if (wait == -1) {
                 LOG.info("Late rerun expired for entity: " + entityType + "(" + entityName + ")");
-                String logDir = this.getWfEngine().getWorkflowProperty(cluster,
-                        wfId, "logDir");
-                String srcClusterName = this.getWfEngine().getWorkflowProperty(
-                        cluster, wfId, "srcClusterName");
+
+                java.util.Properties properties =
+                        this.getWfEngine().getWorkflowProperties(cluster, wfId);
+                String logDir = properties.getProperty("logDir");
+                String srcClusterName = properties.getProperty("srcClusterName");
                 Path lateLogPath = this.getLateLogPath(logDir,
                         EntityUtil.fromUTCtoURIDate(nominalTime), srcClusterName);
+
                 LOG.info("Going to delete path:" + lateLogPath);
-                FileSystem fs = FileSystem.get(getConfiguration(cluster,
-                        wfId));
+                final String storageEndpoint = properties.getProperty(AbstractWorkflowEngine.NAME_NODE);
+                FileSystem fs = FileSystem.get(getConfiguration(storageEndpoint));
                 if (fs.exists(lateLogPath)) {
                     boolean deleted = fs.delete(lateLogPath, true);
                     if (deleted) {
@@ -93,9 +93,8 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
             LOG.debug("Scheduling the late rerun for entity instance : "
                     + entityType + "(" + entityName + ")" + ":" + nominalTime
                     + " And WorkflowId: " + wfId);
-            LaterunEvent event = new LaterunEvent(cluster, wfId,
-                    msgInsertTime.getTime(), wait, entityType, entityName,
-                    nominalTime, intRunId);
+            LaterunEvent event = new LaterunEvent(cluster, wfId, msgInsertTime.getTime(),
+                    wait, entityType, entityName, nominalTime, intRunId);
             offerToQueue(event);
         } catch (Exception e) {
             LOG.error("Unable to schedule late rerun for entity instance : "
@@ -216,12 +215,9 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
 
     }
 
-    public Configuration getConfiguration(String cluster, String wfId) throws FalconException {
+    public static Configuration getConfiguration(String storageEndpoint) throws FalconException {
         Configuration conf = new Configuration();
-        conf.set(
-                CommonConfigurationKeys.FS_DEFAULT_NAME_KEY,
-                this.getWfEngine().getWorkflowProperty(cluster, wfId,
-                        AbstractWorkflowEngine.NAME_NODE));
+        conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, storageEndpoint);
         return conf;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
----------------------------------------------------------------------
diff --git a/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java b/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
index a0c7bb7..372e2d3 100644
--- a/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
+++ b/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
@@ -30,6 +30,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.TimeZone;
@@ -46,9 +47,17 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.el.ExpressionEvaluatorImpl;
+import org.apache.falcon.FalconException;
 import org.apache.falcon.Pair;
+import org.apache.falcon.catalog.CatalogPartition;
+import org.apache.falcon.catalog.CatalogServiceFactory;
+import org.apache.falcon.entity.CatalogStorage;
+import org.apache.falcon.entity.FeedHelper;
+import org.apache.falcon.entity.FileSystemStorage;
+import org.apache.falcon.entity.Storage;
 import org.apache.falcon.entity.common.FeedDataPath;
 import org.apache.falcon.entity.common.FeedDataPath.VARS;
+import org.apache.falcon.entity.v0.feed.Location;
 import org.apache.falcon.expression.ExpressionHelper;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -72,7 +81,6 @@ public class FeedEvictor extends Configured implements Tool {
     private static final ExpressionHelper RESOLVER = ExpressionHelper.get();
 
     public static final AtomicReference<PrintStream> OUT = new AtomicReference<PrintStream>(System.out);
-//    static PrintStream stream = System.out;
 
     private static final String FORMAT = "yyyyMMddHHmm";
 
@@ -88,7 +96,6 @@ public class FeedEvictor extends Configured implements Tool {
         }
     }
 
-    private FileSystem fs;
     private final Map<VARS, String> map = new TreeMap<VARS, String>();
     private final StringBuffer instancePaths = new StringBuffer("instancePaths=");
     private final StringBuffer buffer = new StringBuffer();
@@ -97,47 +104,73 @@ public class FeedEvictor extends Configured implements Tool {
     public int run(String[] args) throws Exception {
 
         CommandLine cmd = getCommand(args);
-        String feedBasePath = cmd.getOptionValue("feedBasePath").replaceAll("\\?\\{", "\\$\\{");
+        String feedBasePath = cmd.getOptionValue("feedBasePath")
+                .replaceAll(Storage.QUESTION_EXPR_START_REGEX, Storage.DOLLAR_EXPR_START_REGEX);
         String retentionType = cmd.getOptionValue("retentionType");
         String retentionLimit = cmd.getOptionValue("retentionLimit");
         String timeZone = cmd.getOptionValue("timeZone");
         String frequency = cmd.getOptionValue("frequency"); //to write out smart path filters
         String logFile = cmd.getOptionValue("logFile");
+        String feedStorageType = cmd.getOptionValue("falconFeedStorageType");
 
-        String[] feedLocs = feedBasePath.split("#");
-        for (String path : feedLocs) {
-            evictor(path, retentionType, retentionLimit, timeZone, frequency);
-        }
+        LOG.info("Applying retention on " + feedBasePath + " type: " + retentionType
+                + ", Limit: " + retentionLimit + ", timezone: " + timeZone
+                + ", frequency: " + frequency + ", storage" + feedStorageType);
+
+        Storage storage = FeedHelper.createStorage(feedStorageType, feedBasePath);
+        evict(storage, retentionLimit, timeZone);
 
         logInstancePaths(new Path(logFile));
+
         int len = buffer.length();
         if (len > 0) {
             OUT.get().println("instances=" + buffer.substring(0, len - 1));
         } else {
             OUT.get().println("instances=NULL");
         }
+
         return 0;
     }
 
-    private void evictor(String feedBasePath, String retentionType,
-                         String retentionLimit, String timeZone, String frequency) throws IOException, ELException {
+    private void evict(Storage storage, String retentionLimit, String timeZone)
+        throws Exception {
+
+        if (storage.getType() == Storage.TYPE.FILESYSTEM) {
+            evictFS((FileSystemStorage) storage, retentionLimit, timeZone);
+        } else if (storage.getType() == Storage.TYPE.TABLE) {
+            evictTable((CatalogStorage) storage, retentionLimit, timeZone);
+        }
+    }
+
+    private void evictFS(FileSystemStorage storage, String retentionLimit, String timeZone)
+        throws Exception {
+
+        for (Location location : storage.getLocations()) {
+            fileSystemEvictor(storage.getUriTemplate(location.getType()), retentionLimit, timeZone);
+        }
+    }
+
+    private void fileSystemEvictor(String feedBasePath, String retentionLimit, String timeZone)
+        throws IOException, ELException {
+
         Path normalizedPath = new Path(feedBasePath);
-        fs = normalizedPath.getFileSystem(getConf());
+        FileSystem fs = normalizedPath.getFileSystem(getConf());
         feedBasePath = normalizedPath.toUri().getPath();
         LOG.info("Normalized path : " + feedBasePath);
+
         Pair<Date, Date> range = getDateRange(retentionLimit);
         String dateMask = getDateFormatInPath(feedBasePath);
-        List<Path> toBeDeleted = discoverInstanceToDelete(feedBasePath,
-                timeZone, dateMask, range.first);
 
-        LOG.info("Applying retention on " + feedBasePath + " type: "
-                + retentionType + ", Limit: " + retentionLimit + ", timezone: "
-                + timeZone + ", frequency: " + frequency);
+        List<Path> toBeDeleted = discoverInstanceToDelete(feedBasePath, timeZone, dateMask, range.first, fs);
+        if (toBeDeleted.isEmpty()) {
+            LOG.info("No instances to delete.");
+            return;
+        }
 
         DateFormat dateFormat = new SimpleDateFormat(FORMAT);
         dateFormat.setTimeZone(TimeZone.getTimeZone(timeZone));
         for (Path path : toBeDeleted) {
-            if (deleteInstance(path)) {
+            if (deleteInstance(fs, path)) {
                 LOG.info("Deleted instance " + path);
                 Date date = getDate(path, feedBasePath, dateMask, timeZone);
                 buffer.append(dateFormat.format(date)).append(',');
@@ -165,10 +198,10 @@ public class FeedEvictor extends Configured implements Tool {
         return Pair.of(start, end);
     }
 
-    private List<Path> discoverInstanceToDelete(String inPath, String timeZone,
-                                                String dateMask, Date start) throws IOException {
+    private List<Path> discoverInstanceToDelete(String inPath, String timeZone, String dateMask,
+                                                Date start, FileSystem fs) throws IOException {
 
-        FileStatus[] files = findFilesForFeed(inPath);
+        FileStatus[] files = findFilesForFeed(fs, inPath);
         if (files == null || files.length == 0) {
             return Collections.emptyList();
         }
@@ -196,7 +229,7 @@ public class FeedEvictor extends Configured implements Tool {
                 .replaceAll(VARS.MINUTE.regex(), "mm");
     }
 
-    private FileStatus[] findFilesForFeed(String feedBasePath) throws IOException {
+    private FileStatus[] findFilesForFeed(FileSystem fs, String feedBasePath) throws IOException {
 
         Matcher matcher = FeedDataPath.PATTERN.matcher(feedBasePath);
         while (matcher.find()) {
@@ -272,13 +305,13 @@ public class FeedEvictor extends Configured implements Tool {
         return date.compareTo(start) >= 0;
     }
 
-    private boolean deleteInstance(Path path) throws IOException {
+    private boolean deleteInstance(FileSystem fs, Path path) throws IOException {
         return fs.delete(path, true);
     }
 
-    private void debug(FileSystem myfs, Path outPath) throws IOException {
+    private void debug(FileSystem fs, Path outPath) throws IOException {
         ByteArrayOutputStream writer = new ByteArrayOutputStream();
-        InputStream instance = myfs.open(outPath);
+        InputStream instance = fs.open(outPath);
         IOUtils.copyBytes(instance, writer, 4096, true);
         LOG.debug("Instance Paths copied to " + outPath);
         LOG.debug("Written " + writer);
@@ -286,29 +319,128 @@ public class FeedEvictor extends Configured implements Tool {
 
     private CommandLine getCommand(String[] args) throws org.apache.commons.cli.ParseException {
         Options options = new Options();
-        Option opt;
-        opt = new Option("feedBasePath", true,
+
+        Option opt = new Option("feedBasePath", true,
                 "base path for feed, ex /data/feed/${YEAR}-${MONTH}");
         opt.setRequired(true);
         options.addOption(opt);
+
+        opt = new Option("falconFeedStorageType", true, "feed storage type, FileSystem or Table");
+        opt.setRequired(true);
+        options.addOption(opt);
+
         opt = new Option("retentionType", true,
                 "type of retention policy like delete, archive etc");
         opt.setRequired(true);
         options.addOption(opt);
+
         opt = new Option("retentionLimit", true,
                 "time limit for retention, ex hours(5), months(2), days(90)");
         opt.setRequired(true);
         options.addOption(opt);
+
         opt = new Option("timeZone", true, "timezone for feed, ex UTC");
         opt.setRequired(true);
         options.addOption(opt);
+
         opt = new Option("frequency", true,
                 "frequency of feed,  ex hourly, daily, monthly, minute, weekly, yearly");
         opt.setRequired(true);
         options.addOption(opt);
+
         opt = new Option("logFile", true, "log file for capturing size of feed");
         opt.setRequired(true);
         options.addOption(opt);
+
         return new GnuParser().parse(options, args);
     }
+
+    private void evictTable(CatalogStorage storage, String retentionLimit, String timeZone)
+        throws Exception {
+
+        LOG.info("Applying retention on " + storage.getTable()
+                + ", Limit: " + retentionLimit + ", timezone: " + timeZone);
+
+        String datedPartitionKey = storage.getDatedPartitionKey();
+        String datePattern = storage.getPartitionValue(datedPartitionKey);
+        String dateMask = datePattern.replaceAll(VARS.YEAR.regex(), "yyyy")
+                .replaceAll(VARS.MONTH.regex(), "MM")
+                .replaceAll(VARS.DAY.regex(), "dd")
+                .replaceAll(VARS.HOUR.regex(), "HH")
+                .replaceAll(VARS.MINUTE.regex(), "mm");
+
+        List<CatalogPartition> toBeDeleted = discoverPartitionsToDelete(
+                storage, retentionLimit, timeZone, dateMask);
+        if (toBeDeleted.isEmpty()) {
+            LOG.info("No partitions to delete.");
+            return;
+        }
+
+        final boolean isTableExternal = CatalogServiceFactory.getCatalogService().isTableExternal(
+                storage.getCatalogUrl(), storage.getDatabase(), storage.getTable());
+
+        dropPartitions(storage, toBeDeleted, isTableExternal);
+    }
+
+    private List<CatalogPartition> discoverPartitionsToDelete(CatalogStorage storage, String retentionLimit,
+                                                           String timeZone, String dateMask)
+        throws FalconException, ELException {
+
+        final String filter = createFilter(storage, retentionLimit, timeZone, dateMask);
+        return CatalogServiceFactory.getCatalogService().listPartitionsByFilter(
+                storage.getCatalogUrl(), storage.getDatabase(), storage.getTable(), filter);
+    }
+
+    private String createFilter(CatalogStorage storage, String retentionLimit,
+                                String timeZone, String dateMask) throws ELException {
+
+        Pair<Date, Date> range = getDateRange(retentionLimit);
+        DateFormat dateFormat = new SimpleDateFormat(dateMask);
+        dateFormat.setTimeZone(TimeZone.getTimeZone(timeZone));
+        String beforeDate = dateFormat.format(range.first);
+
+        String datedPartitionKey = storage.getDatedPartitionKey();
+
+        StringBuilder filterBuffer = new StringBuilder();
+        filterBuffer.append(datedPartitionKey)
+                .append(" < ")
+                .append("'")
+                .append(beforeDate)
+                .append("'");
+
+        return filterBuffer.toString();
+    }
+
+    private void dropPartitions(CatalogStorage storage, List<CatalogPartition> partitionsToDelete,
+                                boolean isTableExternal) throws FalconException, IOException {
+
+        for (CatalogPartition partitionToDrop : partitionsToDelete) {
+            if (dropPartition(storage, partitionToDrop, isTableExternal)) {
+                LOG.info("Deleted partition: " + partitionToDrop.getValues());
+                buffer.append(partitionToDrop.getValues().get(0)).append(',');
+                instancePaths.append(partitionToDrop.getValues()).append(",");
+            }
+        }
+    }
+
+    private boolean dropPartition(CatalogStorage storage, CatalogPartition partitionToDrop,
+                                  boolean isTableExternal) throws FalconException, IOException {
+
+        String datedPartitionKey = storage.getDatedPartitionKey();
+
+        Map<String, String> partitions = new HashMap<String, String>();
+        partitions.put(datedPartitionKey, partitionToDrop.getValues().get(0));
+
+        boolean dropped = CatalogServiceFactory.getCatalogService().dropPartitions(
+                storage.getCatalogUrl(), storage.getDatabase(), storage.getTable(), partitions);
+
+        boolean deleted = true;
+        if (isTableExternal) { // nuke the dirs if an external table
+            final String location = partitionToDrop.getLocation();
+            final Path path = new Path(location);
+            deleted = path.getFileSystem(new Configuration()).delete(path, true);
+        }
+
+        return dropped && deleted;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
----------------------------------------------------------------------
diff --git a/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java b/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
index 5377c48..529418b 100644
--- a/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
+++ b/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
@@ -18,6 +18,19 @@
 
 package org.apache.falcon.retention;
 
+import org.apache.falcon.Pair;
+import org.apache.falcon.cluster.util.EmbeddedCluster;
+import org.apache.falcon.entity.Storage;
+import org.apache.falcon.entity.v0.feed.LocationType;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -33,17 +46,6 @@ import java.util.Map;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.falcon.Pair;
-import org.apache.falcon.cluster.util.EmbeddedCluster;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
 /**
  * Test for FeedEvictor.
  */
@@ -100,20 +102,28 @@ public class FeedEvictorTest {
             fs.delete(new Path("/"), true);
             stream.clear();
 
-            Pair<List<String>, List<String>> pair;
-            pair = createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data");
-            String dataPath = "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}";
+            Pair<List<String>, List<String>> pair =
+                    createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data");
+            final String storageUrl = cluster.getConf().get("fs.default.name");
+            String dataPath = LocationType.DATA.name() + "="
+                    + storageUrl + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-01-00.csv";
 
             FeedEvictor.main(new String[]{
-                "-feedBasePath", cluster.getConf().get("fs.default.name") + dataPath,
-                "-retentionType", "instance", "-retentionLimit", "days(10)", "-timeZone", "UTC", "-frequency",
-                "daily", "-logFile", logFile, });
+                "-feedBasePath", dataPath,
+                "-retentionType", "instance",
+                "-retentionLimit", "days(10)",
+                "-timeZone", "UTC",
+                "-frequency", "daily",
+                "-logFile", logFile,
+                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+            });
 
             assertFailures(fs, pair);
             compare(map.get("feed1"), stream.getBuffer());
 
-            Assert.assertEquals(readLogFile(new Path(logFile)), getExpectedInstancePaths(dataPath));
+            Assert.assertEquals(readLogFile(new Path(logFile)),
+                    getExpectedInstancePaths(dataPath.replaceAll(storageUrl, "")));
 
         } catch (Exception e) {
             Assert.fail("Unknown exception", e);
@@ -121,7 +131,7 @@ public class FeedEvictorTest {
     }
 
     private String getExpectedInstancePaths(String dataPath) {
-        StringBuffer newBuffer = new StringBuffer("instancePaths=");
+        StringBuilder newBuffer = new StringBuilder("instancePaths=");
         DateFormat format = new SimpleDateFormat("yyyyMMddHHmm");
         format.setTimeZone(TimeZone.getTimeZone("UTC"));
         String[] locs = dataPath.split("#");
@@ -132,7 +142,9 @@ public class FeedEvictorTest {
 
         for (int i = 0; i < locs.length; i++) {
             for (int j = 0, k = i * instances.length / locs.length; j < instances.length / locs.length; j++) {
-                String instancePath = locs[i].replaceAll("\\?\\{YEAR\\}", instances[j + k].substring(0, 4));
+                String[] paths = locs[i].split("=");
+                String path = paths[1];
+                String instancePath = path.replaceAll("\\?\\{YEAR\\}", instances[j + k].substring(0, 4));
                 instancePath = instancePath.replaceAll("\\?\\{MONTH\\}", instances[j + k].substring(4, 6));
                 instancePath = instancePath.replaceAll("\\?\\{DAY\\}", instances[j + k].substring(6, 8));
                 instancePath = instancePath.replaceAll("\\?\\{HOUR\\}", instances[j + k].substring(8, 10));
@@ -182,19 +194,27 @@ public class FeedEvictorTest {
             fs.delete(new Path("/"), true);
             stream.clear();
 
-            Pair<List<String>, List<String>> pair;
-            pair = createTestData("feed2", "yyyyMMddHH/'more'/yyyy", 5, TimeUnit.HOURS, "/data");
-            String dataPath = "/data/YYYY/feed2/mmHH/dd/MM/?{YEAR}?{MONTH}?{DAY}?{HOUR}/more/?{YEAR}";
+            Pair<List<String>, List<String>> pair =
+                    createTestData("feed2", "yyyyMMddHH/'more'/yyyy", 5, TimeUnit.HOURS, "/data");
+            final String storageUrl = cluster.getConf().get("fs.default.name");
+            String dataPath = LocationType.DATA.name() + "="
+                    + storageUrl + "/data/YYYY/feed2/mmHH/dd/MM/?{YEAR}?{MONTH}?{DAY}?{HOUR}/more/?{YEAR}";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-02-00.csv";
             FeedEvictor.main(new String[]{
-                "-feedBasePath", cluster.getConf().get("fs.default.name") + dataPath,
-                "-retentionType", "instance", "-retentionLimit", "hours(5)", "-timeZone", "UTC", "-frequency",
-                "hourly", "-logFile", logFile, });
+                "-feedBasePath", dataPath,
+                "-retentionType", "instance",
+                "-retentionLimit", "hours(5)",
+                "-timeZone", "UTC",
+                "-frequency", "hourly",
+                "-logFile", logFile,
+                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+            });
             assertFailures(fs, pair);
 
             compare(map.get("feed2"), stream.getBuffer());
 
-            Assert.assertEquals(readLogFile(new Path(logFile)), getExpectedInstancePaths(dataPath));
+            Assert.assertEquals(readLogFile(new Path(logFile)),
+                    getExpectedInstancePaths(dataPath.replaceAll(storageUrl, "")));
 
         } catch (Exception e) {
             Assert.fail("Unknown exception", e);
@@ -210,21 +230,31 @@ public class FeedEvictorTest {
             fs.delete(new Path("/"), true);
             stream.clear();
 
-            Pair<List<String>, List<String>> pair;
-            pair = createTestData("/data");
+            Pair<List<String>, List<String>> pair = createTestData("/data");
             FeedEvictor.main(new String[] {
-                "-feedBasePath",
-                cluster.getConf().get("fs.default.name") + "/data/YYYY/feed3/dd/MM/?{MONTH}/more/?{HOUR}",
-                "-retentionType", "instance", "-retentionLimit", "months(5)", "-timeZone",
-                "UTC", "-frequency", "hourly", "-logFile", hdfsUrl + "/falcon/staging/feed/2012-01-01-04-00", });
+                "-feedBasePath", LocationType.DATA.name() + "="
+                    + cluster.getConf().get("fs.default.name") + "/data/YYYY/feed3/dd/MM/?{MONTH}/more/?{HOUR}",
+                "-retentionType", "instance",
+                "-retentionLimit", "months(5)",
+                "-timeZone", "UTC",
+                "-frequency", "hourly",
+                "-logFile", conf.get("fs.default.name") + "/falcon/staging/feed/2012-01-01-04-00",
+                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+            });
             Assert.assertEquals("instances=NULL", stream.getBuffer());
 
             stream.clear();
             String dataPath = "/data/YYYY/feed4/dd/MM/02/more/hello";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-02-00.csv";
-            FeedEvictor.main(new String[] {"-feedBasePath",
-                cluster.getConf().get("fs.default.name") + dataPath, "-retentionType", "instance",
-                "-retentionLimit", "hours(5)", "-timeZone", "UTC", "-frequency", "hourly", "-logFile", logFile, });
+            FeedEvictor.main(new String[] {
+                "-feedBasePath", LocationType.DATA.name() + "=" + cluster.getConf().get("fs.default.name") + dataPath,
+                "-retentionType", "instance",
+                "-retentionLimit", "hours(5)",
+                "-timeZone", "UTC",
+                "-frequency", "hourly",
+                "-logFile", logFile,
+                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+            });
             Assert.assertEquals("instances=NULL", stream.getBuffer());
 
             Assert.assertEquals(readLogFile(new Path(logFile)), getExpectedInstancePaths(dataPath));
@@ -243,28 +273,38 @@ public class FeedEvictorTest {
             fs.delete(new Path("/"), true);
             stream.clear();
 
-            Pair<List<String>, List<String>> pair, statsPair, metaPair, tmpPair;
-            pair = createTestData("/data");
-            statsPair = createTestData("/stats");
-            metaPair = createTestData("/meta");
-            tmpPair = createTestData("/tmp");
-            FeedEvictor.main(new String[] {"-feedBasePath",
-                getFeedBasePath("/data") + "#"
-                    + getFeedBasePath("/stats") + "#"
-                    + getFeedBasePath("/meta") + "#"
-                    + getFeedBasePath("/tmp"),
-                "-retentionType", "instance", "-retentionLimit", "months(5)", "-timeZone",
-                "UTC", "-frequency", "hourly", "-logFile", hdfsUrl + "/falcon/staging/feed/2012-01-01-04-00", });
+            Pair<List<String>, List<String>> pair = createTestData("/data");
+            createTestData("/stats");
+            createTestData("/meta");
+            createTestData("/tmp");
+            final String storageUrl = cluster.getConf().get("fs.default.name");
+            FeedEvictor.main(new String[] {
+                "-feedBasePath", getFeedBasePath(LocationType.DATA, storageUrl)
+                + "#" + getFeedBasePath(LocationType.STATS, storageUrl)
+                    + "#" + getFeedBasePath(LocationType.META, storageUrl)
+                    + "#" + getFeedBasePath(LocationType.TMP, storageUrl),
+                "-retentionType", "instance",
+                "-retentionLimit", "months(5)",
+                "-timeZone", "UTC",
+                "-frequency", "hourly",
+                "-logFile", conf.get("fs.default.name") + "/falcon/staging/feed/2012-01-01-04-00",
+                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+            });
             Assert.assertEquals("instances=NULL", stream.getBuffer());
 
             stream.clear();
-            String dataPath = "/data/YYYY/feed4/dd/MM/02/more/hello";
+            String dataPath = LocationType.DATA.name() + "="
+                    + cluster.getConf().get("fs.default.name") + "/data/YYYY/feed4/dd/MM/02/more/hello";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-02-00.csv";
-            FeedEvictor.main(new String[]{"-feedBasePath",
-                                          cluster.getConf().get("fs.default.name") + dataPath,
-                                          "-retentionType", "instance", "-retentionLimit",
-                                          "hours(5)", "-timeZone", "UTC", "-frequency", "hourly",
-                                          "-logFile", logFile, });
+            FeedEvictor.main(new String[]{
+                "-feedBasePath", dataPath,
+                "-retentionType", "instance",
+                "-retentionLimit", "hours(5)",
+                "-timeZone", "UTC",
+                "-frequency", "hourly",
+                "-logFile", logFile,
+                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+            });
             Assert.assertEquals("instances=NULL", stream.getBuffer());
 
             Assert.assertEquals(readLogFile(new Path(logFile)), getExpectedInstancePaths(dataPath));
@@ -283,29 +323,32 @@ public class FeedEvictorTest {
             fs.delete(new Path("/"), true);
             stream.clear();
 
-            Pair<List<String>, List<String>> pair, statsPair, metaPair;
-            pair = createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data");
-            statsPair = createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/stats");
-            metaPair = createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/meta");
-            String dataPath = cluster.getConf().get("fs.default.name")
-                    + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
-                    + "#"
-                    + cluster.getConf().get("fs.default.name")
-                    + "/stats/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
-                    + "#"
-                    + cluster.getConf().get("fs.default.name")
-                    + "/meta/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}";
+            Pair<List<String>, List<String>> pair =
+                    createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data");
+            createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/stats");
+            createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/meta");
+
+            final String storageUrl = cluster.getConf().get("fs.default.name");
+            String dataPath =
+                    "DATA=" + storageUrl + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
+                    + "#STATS=" + storageUrl + "/stats/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
+                    + "#META=" + storageUrl + "/meta/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}";
             String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-01-00.csv";
 
-            FeedEvictor.main(new String[] {"-feedBasePath", dataPath, "-retentionType", "instance",
-                "-retentionLimit", "days(10)", "-timeZone", "UTC", "-frequency", "daily", "-logFile", logFile, });
+            FeedEvictor.main(new String[] {
+                "-feedBasePath", dataPath,
+                "-retentionType", "instance",
+                "-retentionLimit", "days(10)",
+                "-timeZone", "UTC",
+                "-frequency", "daily",
+                "-logFile", logFile,
+                "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
+            });
 
             assertFailures(fs, pair);
 
             Assert.assertEquals(readLogFile(new Path(logFile)),
-                    getExpectedInstancePaths(dataPath.replaceAll(cluster
-                            .getConf().get("fs.default.name"), "")));
-
+                    getExpectedInstancePaths(dataPath.replaceAll(storageUrl, "")));
 
         } catch (Exception e) {
             Assert.fail("Unknown exception", e);
@@ -375,10 +418,10 @@ public class FeedEvictorTest {
                 outOfRange.add(path);
             } else {
                 inRange.add(path);
-                buffer.append((displayFormat.format(date) + "0000").
-                        substring(0, 12)).append(',');
+                buffer.append((displayFormat.format(date) + "0000").substring(0, 12)).append(',');
             }
         }
+
         map.put(feed, "instances=" + buffer.substring(0, buffer.length() - 1));
         return Pair.of(inRange, outOfRange);
     }
@@ -387,10 +430,9 @@ public class FeedEvictorTest {
         fs.create(new Path(path)).close();
     }
 
-    private String getFeedBasePath(String locationType) {
-        return cluster.getConf().get("fs.default.name")
-                + "/data/YYYY/feed3/dd/MM/"
-                + "?{MONTH}/more/?{HOUR}";
+    private String getFeedBasePath(LocationType locationType, String storageUrl) {
+        return locationType.name() + "=" + storageUrl
+                + "/" + locationType.name().toLowerCase() + "/data/YYYY/feed3/dd/MM/?{MONTH}/more/?{HOUR}";
     }
 
     private static class InMemoryWriter extends PrintStream {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/bin/falcon
----------------------------------------------------------------------
diff --git a/src/bin/falcon b/src/bin/falcon
index 696f606..08478d7 100755
--- a/src/bin/falcon
+++ b/src/bin/falcon
@@ -28,27 +28,11 @@ done
 
 BASEDIR=`dirname ${PRG}`
 BASEDIR=`cd ${BASEDIR}/..;pwd`
-
-FALCONCPPATH="$FALCON_CONF:${BASEDIR}/conf:"
-for i in "${BASEDIR}/client/lib/"*.jar; do
-  FALCONCPPATH="${FALCONCPPATH}:$i"
-done
-for i in "${BASEDIR}/server/webapp/"*/WEB-INF/lib/*.jar; do
-  FALCONCPPATH="${FALCONCPPATH}:$i"
-done
-
-
-if test -z ${JAVA_HOME}
-then
-    JAVA_BIN=java
-else
-    JAVA_BIN=${JAVA_HOME}/bin/java
-fi
+. ${BASEDIR}/bin/falcon-config.sh 'client'
 
 JAVA_PROPERTIES="$FALCON_OPTS"
 while [[ ${1} =~ ^\-D ]]; do
   JAVA_PROPERTIES="${JAVA_PROPERTIES} ${1}"
   shift
 done
-${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${FALCONCPPATH} org.apache.falcon.cli.FalconCLI "${@}"
-
+${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${FALCONCPPATH} org.apache.falcon.cli.FalconCLI "${@}"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/bin/falcon-config.sh
----------------------------------------------------------------------
diff --git a/src/bin/falcon-config.sh b/src/bin/falcon-config.sh
new file mode 100644
index 0000000..3f4616a
--- /dev/null
+++ b/src/bin/falcon-config.sh
@@ -0,0 +1,120 @@
+#!/bin/bash
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License. See accompanying LICENSE file.
+#
+
+# resolve links - $0 may be a softlink
+PRG="${0}"
+
+while [ -h "${PRG}" ]; do
+  ls=`ls -ld "${PRG}"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '/.*' > /dev/null; then
+    PRG="$link"
+  else
+    PRG=`dirname "${PRG}"`/"$link"
+  fi
+done
+
+BASEDIR=`dirname ${PRG}`
+BASEDIR=`cd ${BASEDIR}/..;pwd`
+
+if [ -z "$FALCON_CONF" ]; then
+  FALCON_CONF=${BASEDIR}/conf
+fi
+export FALCON_CONF
+
+if [ -f "${FALCON_CONF}/falcon-env.sh" ]; then
+  . "${FALCON_CONF}/falcon-env.sh"
+fi
+
+if test -z ${JAVA_HOME}
+then
+    JAVA_BIN=java
+    JAR_BIN=jar
+else
+    JAVA_BIN=${JAVA_HOME}/bin/java
+    JAR_BIN=${JAVA_HOME}/bin/jar
+fi
+export JAVA_BIN
+
+# default the heap size to 1GB
+DEFAULT_JAVA_HEAP_MAX=-Xmx1024m
+FALCON_OPTS="$DEFAULT_JAVA_HEAP_MAX $FALCON_OPTS"
+
+type="$1"
+shift
+case $type in
+  client)
+    # set the client class path
+    FALCONCPPATH="$FALCON_CONF:${BASEDIR}/client/lib/*"
+    for i in `ls ${BASEDIR}/server/webapp`; do
+      FALCONCPPATH="${FALCONCPPATH}:${i}/WEB-INF/lib/*"
+    done
+    FALCON_OPTS="$FALCON_OPTS $FALCON_CLIENT_OPTS $FALCON_CLIENT_HEAP"
+  ;;
+  server)
+    app="$1"
+    if [ 'prism' == "$app" ]; then
+      FALCON_OPTS="$FALCON_OPTS $FALCON_PRISM_OPTS $FALCON_PRISM_HEAP"
+    elif [ 'falcon' == "$app" ]; then
+      FALCON_OPTS="$FALCON_OPTS $FALCON_SERVER_OPTS $FALCON_SERVER_HEAP"
+    else
+      echo "Invalid option for app: ${app}. Valid choices are falcon and prism"
+      exit 1
+    fi
+    FALCON_EXPANDED_WEBAPP_DIR=${FALCON_EXPANDED_WEBAPP_DIR:-${BASEDIR}/server/webapp}
+    export FALCON_EXPANDED_WEBAPP_DIR
+    # set the server classpath
+    if [ ! -d ${FALCON_EXPANDED_WEBAPP_DIR}/$app/WEB-INF ]; then
+      mkdir -p ${FALCON_EXPANDED_WEBAPP_DIR}/$app
+      cd ${FALCON_EXPANDED_WEBAPP_DIR}/$app
+      $JAR_BIN -xf ${BASEDIR}/server/webapp/$app.war
+      cd -
+    fi
+    
+    FALCONCPPATH="$FALCON_CONF:${FALCON_EXPANDED_WEBAPP_DIR}/$app/WEB-INF/classes"
+    FALCONCPPATH="${FALCONCPPATH}:${FALCON_EXPANDED_WEBAPP_DIR}/$app/WEB-INF/lib/*:${BASEDIR}/libext/*"
+    
+    HADOOPDIR=`which hadoop`
+    if [ "$HADOOPDIR" != "" ]; then
+      echo "Hadoop is installed, adding hadoop classpath to falcon classpath"
+      FALCONCPPATH="${FALCONCPPATH}:`hadoop classpath`"
+    elif [ "$HADOOP_HOME" != "" ]; then
+      echo "Hadoop home is set, adding ${HADOOP_HOME}/lib/* into falcon classpath"
+      FALCONCPPATH="${FALCONCPPATH}:${HADOOP_HOME}/lib/*"
+    else
+      echo "Could not find installed hadoop and HADOOP_HOME is not set."
+      echo "Using the default jars bundled in ${BASEDIR}/hadooplibs/"
+      FALCONCPPATH="${FALCONCPPATH}:${BASEDIR}/hadooplibs/*"
+    fi
+    # log and pid dirs for applications
+    FALCON_LOG_DIR="${FALCON_LOG_DIR:-$BASEDIR/logs}"
+    export FALCON_LOG_DIR
+    FALCON_PID_DIR="${FALCON_PID_DIR:-$BASEDIR/logs}"
+    # create the pid dir if its not there
+    [ -w "$FALCON_PID_DIR" ] ||  mkdir -p "$FALCON_PID_DIR"
+    export FALCON_PID_DIR
+    FALCON_PID_FILE=${FALCON_PID_DIR}/${app}.pid
+    export FALCON_PID_FILE
+    FALCON_DATA_DIR=${FALCON_DATA_DIR:-${BASEDIR}/logs/data}
+    FALCON_HOME_DIR="${FALCON_HOME_DIR:-$BASEDIR}"
+    export FALCON_HOME_DIR
+  ;;
+  *)
+    echo "Invalid option for type: $type"
+    exit 1
+  ;;
+esac
+export FALCONCPPATH
+export FALCON_OPTS

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/bin/package.sh
----------------------------------------------------------------------
diff --git a/src/bin/package.sh b/src/bin/package.sh
index b685aff..417e7e6 100755
--- a/src/bin/package.sh
+++ b/src/bin/package.sh
@@ -48,18 +48,18 @@ popd
 
 mkdir -p ${PACKAGE_HOME}
 pushd ${PACKAGE_HOME}
-rm -rf oozie-3.2.0-incubating*
-echo "Getting oozie release tar ball of version 3.2.0-incubating ..."
-curl "http://www.gtlib.gatech.edu/pub/apache/oozie/3.2.0-incubating/oozie-3.2.0-incubating.tar.gz" -o oozie-3.2.0-incubating.tgz
-tar -xzvf oozie-3.2.0-incubating.tgz 2> /dev/null
-rm oozie-3.2.0-incubating.tgz
-cd oozie-3.2.0-incubating
-echo "Patching oozie with falcon extensions and marking version as 3.2.2 (custom) ..."
-patch -p0 < ${FALCON_SRC}/oozie-3.2.0-incubating-el.patch
-patch -p0 < ${FALCON_SRC}/oozie-bundle-el-extension.patch
+rm -rf oozie-*
+echo "Getting oozie release tar ball of version 4.0.0 ..."
+curl "http://www.apache.org/dist/oozie/4.0.0/oozie-4.0.0.tar.gz" -o oozie-4.0.0.tgz
+tar -xzvf oozie-4.0.0.tgz 2> /dev/null
+rm oozie-4.0.0.tgz
+cd oozie-4.0.0
+echo "Patching oozie with falcon extensions and marking version as 4.0.0 ..."
+patch -p0 < ${FALCON_SRC}/build-tools/src/patch/oozie-4.0.0-falcon.patch
+patch -p0 < ${FALCON_SRC}/build-tools/src/patch/oozie-bundle-el-extension.patch
 echo "Building oozie & creating tar ball ..."
 bin/mkdistro.sh -DskipTests > /dev/null
 
 echo "Falcon pacakge is available in ${FALCON_SRC}/target/falcon-<<version>>/falcon-<<version>>.tar.gz"
-echo "Oozie pacakge is available in ${FALCON_SRC}/target/package/oozie-3.2.0-incubating/distro/target/oozie-3.2.2-distro.tar.gz"
+echo "Oozie pacakge is available in ${FALCON_SRC}/target/package/oozie-4.0.0/distro/target/oozie-4.0.0-distro.tar.gz"
 popd

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/bin/service-start.sh
----------------------------------------------------------------------
diff --git a/src/bin/service-start.sh b/src/bin/service-start.sh
index f72e723..d4950d5 100755
--- a/src/bin/service-start.sh
+++ b/src/bin/service-start.sh
@@ -28,60 +28,22 @@ done
 
 BASEDIR=`dirname ${PRG}`
 BASEDIR=`cd ${BASEDIR}/..;pwd`
-
-
-mkdir -p ${BASEDIR}/logs
-
-if test -z ${JAVA_HOME}
-then
-    JAVA_BIN=java
-else
-    JAVA_BIN=${JAVA_HOME}/bin/java
-fi
-
-pushd ${BASEDIR} > /dev/null
-
 APP_TYPE=$1
-if [ ! -d ${BASEDIR}/server/webapp/$APP_TYPE/WEB-INF ]; then
-  mkdir -p ${BASEDIR}/server/webapp/$APP_TYPE
-  cd ${BASEDIR}/server/webapp/$APP_TYPE
-  jar -xf ../$APP_TYPE.war
-  cd -
-fi
+. ${BASEDIR}/bin/falcon-config.sh 'server' "$APP_TYPE"
 
-FALCONCPPATH="$FALCON_CONF:${BASEDIR}/conf:${BASEDIR}/server/webapp/$APP_TYPE/WEB-INF/classes:"
-for i in "${BASEDIR}/server/webapp/$APP_TYPE/WEB-INF/lib/"*.jar; do
-  FALCONCPPATH="${FALCONCPPATH}:$i"
-done
+# make sure the process is not running
+if [ -f $FALCON_PID_FILE ]; then
+  if kill -0 `cat $FALCON_PID_FILE` > /dev/null 2>&1; then
+    echo $APP_TYPE running as process `cat $FALCON_PID_FILE`.  Stop it first.
+    exit 1
+  fi
+fi
 
-for i in "${BASEDIR}/libext/"*.jar; do
-  FALCONCPPATH="${FALCONCPPATH}:$i"
-done
+mkdir -p $FALCON_LOG_DIR
 
-HADOOPDIR=`which hadoop`
-if [ "$HADOOPDIR" != "" ]; then
-  echo "Hadoop is installed, adding hadoop classpath to falcon classpath"
-  FALCONCPPATH="${FALCONCPPATH}:`hadoop classpath`"
-elif [ "$HADOOP_HOME" != "" ]; then
-  echo "Hadoop home is set, adding ${HADOOP_HOME}/lib/* into falcon classpath"
-  for i in "${HADOOP_HOME}/lib/"*.jar; do
-    FALCONCPPATH="${FALCONCPPATH}:$i"
-  done
-else
-  echo "Could not find installed hadoop and HADOOP_HOME is not set."
-  echo "Using the default jars bundled in ${BASEDIR}/hadooplibs/"
-  for i in "${BASEDIR}/hadooplibs/"*.jar; do
-    FALCONCPPATH="${FALCONCPPATH}:$i"
-  done
-fi
+pushd ${BASEDIR} > /dev/null
 
-if [ -z "$FALCON_CONF" ]; then
-  CONF_PATH=${BASEDIR}/conf
-else
-  CONF_PATH=$FALCON_CONF
-fi
- 
-JAVA_PROPERTIES="$FALCON_OPTS $FALCON_PROPERTIES -Dfalcon.embeddedmq.data=${BASEDIR}/logs/data -Dfalcon.home=${BASEDIR} -Dconfig.location=$CONF_PATH"
+JAVA_PROPERTIES="$FALCON_OPTS $FALCON_PROPERTIES -Dfalcon.log.dir=$FALCON_LOG_DIR -Dfalcon.embeddedmq.data=$FALCON_DATA_DIR -Dfalcon.home=${FALCON_HOME_DIR} -Dconfig.location=$FALCON_CONF -Dfalcon.app.type=$APP_TYPE"
 shift
 
 while [[ ${1} =~ ^\-D ]]; do
@@ -90,9 +52,8 @@ while [[ ${1} =~ ^\-D ]]; do
 done
 TIME=`date +%Y%m%d%H%M%s`
 
-
-nohup ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${FALCONCPPATH} org.apache.falcon.Main -app ${BASEDIR}/server/webapp/*.war $* 2> ${BASEDIR}/logs/$APP_TYPE.out.$TIME &
-echo $! > ${BASEDIR}/logs/$APP_TYPE.pid
+nohup ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${FALCONCPPATH} org.apache.falcon.Main -app ${BASEDIR}/server/webapp/${APP_TYPE}.war $* > "${FALCON_LOG_DIR}/$APP_TYPE.out.$TIME" 2>&1 < /dev/null &
+echo $! > $FALCON_PID_FILE
 popd > /dev/null
 
-echo "Falcon started using hadoop version: " `${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${FALCONCPPATH} org.apache.hadoop.util.VersionInfo | head -1`
+echo "$APP_TYPE started using hadoop version: " `${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${FALCONCPPATH} org.apache.hadoop.util.VersionInfo | head -1`

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/bin/service-stop.sh
----------------------------------------------------------------------
diff --git a/src/bin/service-stop.sh b/src/bin/service-stop.sh
index 4dc62a6..5c08c29 100755
--- a/src/bin/service-stop.sh
+++ b/src/bin/service-stop.sh
@@ -30,11 +30,12 @@ BASEDIR=`dirname ${PRG}`
 BASEDIR=`cd ${BASEDIR}/..;pwd`
 
 APP_TYPE=$1
-PID_FILE=${BASEDIR}/logs/$APP_TYPE.pid
-if [ -f $PID_FILE ]
+. ${BASEDIR}/bin/falcon-config.sh 'server' "$APP_TYPE"
+
+if [ -f $FALCON_PID_FILE ]
 then
-   kill -15 `cat $PID_FILE`
+   kill -15 `cat $FALCON_PID_FILE`
 else
-   echo "pid file $PID_FILE not present"
+   echo "pid file $FALCON_PID_FILE not present"
 fi
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/conf/falcon-env.sh
----------------------------------------------------------------------
diff --git a/src/conf/falcon-env.sh b/src/conf/falcon-env.sh
new file mode 100644
index 0000000..7d4a55e
--- /dev/null
+++ b/src/conf/falcon-env.sh
@@ -0,0 +1,55 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# The java implementation to use. If JAVA_HOME is not found we expect java and jar to be in path
+#export JAVA_HOME=
+
+# any additional java opts you want to set. This will apply to both client and server operations
+#export FALCON_OPTS=
+
+# any additional java opts that you want to set for client only
+#export FALCON_CLIENT_OPTS=
+
+# java heap size we want to set for the client. Default is 1024MB
+#export FALCON_CLIENT_HEAP=
+
+# any additional opts you want to set for prisim service.
+#export FALCON_PRISM_OPTS=
+
+# java heap size we want to set for the prisim service. Default is 1024MB
+#export FALCON_PRISM_HEAP=
+
+# any additional opts you want to set for falcon service.
+#export FALCON_SERVER_OPTS=
+
+# java heap size we want to set for the falcon server. Default is 1024MB
+#export FALCON_SERVER_HEAP=
+
+# What is is considered as falcon home dir. Default is the base locaion of the installed software
+#export FALCON_HOME_DIR=
+
+# Where log files are stored. Defatult is logs directory under the base install location
+#export FALCON_LOG_DIR=
+
+# Where pid files are stored. Defatult is logs directory under the base install location
+#export FALCON_PID_DIR=
+
+# where the falcon active mq data is stored. Defatult is logs/data directory under the base install location
+#export FALCON_DATA_DIR=
+
+# Where do you want to expand the war file. By Default it is in /server/webapp dir under the base install dir.
+#export FALCON_EXPANDED_WEBAPP_DIR=

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/conf/log4j.xml
----------------------------------------------------------------------
diff --git a/src/conf/log4j.xml b/src/conf/log4j.xml
index 53b9a96..0b28ddd 100644
--- a/src/conf/log4j.xml
+++ b/src/conf/log4j.xml
@@ -24,9 +24,8 @@
 <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
 
 <log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
-
     <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="File" value="${user.dir}/logs/application.log"/>
+        <param name="File" value="${falcon.log.dir}/${falcon.app.type}.application.log"/>
         <param name="Append" value="true"/>
         <param name="Threshold" value="debug"/>
         <layout class="org.apache.log4j.PatternLayout">
@@ -35,7 +34,7 @@
     </appender>
 
     <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="File" value="${user.dir}/logs/audit.log"/>
+        <param name="File" value="${falcon.log.dir}/${falcon.app.type}.audit.log"/>
         <param name="Append" value="true"/>
         <param name="Threshold" value="debug"/>
         <layout class="org.apache.log4j.PatternLayout">
@@ -44,7 +43,7 @@
     </appender>
 
     <appender name="TRANSACTIONLOG" class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="File" value="${user.dir}/logs/tranlog.log"/>
+        <param name="File" value="${falcon.log.dir}/${falcon.app.type}.tranlog.log"/>
         <param name="Append" value="true"/>
         <param name="Threshold" value="debug"/>
         <layout class="org.apache.log4j.PatternLayout">
@@ -53,7 +52,7 @@
     </appender>
 
     <appender name="METRIC" class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="File" value="${user.dir}/logs/metric.log"/>
+        <param name="File" value="${falcon.log.dir}/${falcon.app.type}.metric.log"/>
         <param name="Append" value="true"/>
         <param name="Threshold" value="debug"/>
         <layout class="org.apache.log4j.PatternLayout">

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/conf/runtime.properties
----------------------------------------------------------------------
diff --git a/src/conf/runtime.properties b/src/conf/runtime.properties
index 2192126..677e5e3 100644
--- a/src/conf/runtime.properties
+++ b/src/conf/runtime.properties
@@ -20,7 +20,7 @@
 ####    This is used for falcon packaging only. ####
 ####################################################
 
-*.domain=prism
+*.domain=${falcon.app.type}
 
 *.log.cleanup.frequency.minutes.retention=hours(6)
 *.log.cleanup.frequency.hours.retention=minutes(1)

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/conf/startup.properties
----------------------------------------------------------------------
diff --git a/src/conf/startup.properties b/src/conf/startup.properties
index 6b8617e..3d55aee 100644
--- a/src/conf/startup.properties
+++ b/src/conf/startup.properties
@@ -20,7 +20,7 @@
 ####    This is used for falcon packaging only. ####
 ####################################################
 
-*.domain=server
+*.domain=${falcon.app.type}
 
 ######### Implementation classes #########
 ## DONT MODIFY UNLESS SURE ABOUT CHANGE ##
@@ -31,6 +31,8 @@
 *.SchedulableEntityManager.impl=org.apache.falcon.resource.SchedulableEntityManager
 *.ConfigSyncService.impl=org.apache.falcon.resource.ConfigSyncService
 *.ProcessInstanceManager.impl=org.apache.falcon.resource.InstanceManager
+*.catalog.service.impl=org.apache.falcon.catalog.HiveCatalogService
+
 *.application.services=org.apache.falcon.entity.store.ConfigurationStore,\
                         org.apache.falcon.service.ProcessSubscriberService,\
                         org.apache.falcon.rerun.service.RetryService,\
@@ -54,7 +56,7 @@ prism.configstore.listeners=org.apache.falcon.entity.v0.EntityGraph,\
 *.system.lib.location=${falcon.home}/server/webapp/falcon/WEB-INF/lib
 prism.system.lib.location=${falcon.home}/server/webapp/prism/WEB-INF/lib
 *.broker.url=tcp://localhost:61616
-*.retry.recorder.path=${falcon.home}/logs/retry
+*.retry.recorder.path=${falcon.log.dir}/retry
 
 *.falcon.cleanup.service.frequency=days(1)
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/main/assemblies/bin-package.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/bin-package.xml b/src/main/assemblies/bin-package.xml
deleted file mode 100644
index 061ad19..0000000
--- a/src/main/assemblies/bin-package.xml
+++ /dev/null
@@ -1,106 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
-    <formats>
-        <format>tar.gz</format>
-    </formats>
-    <id>bin</id>
-    <baseDirectory>falcon-server-${project.version}</baseDirectory>
-    <fileSets>
-        <fileSet>
-            <directory>src/conf/</directory>
-            <outputDirectory>conf</outputDirectory>
-            <excludes>
-                <exclude>prism-client.properties</exclude>
-            </excludes>
-        </fileSet>
-
-        <fileSet>
-            <directory>client/target/dependency</directory>
-            <outputDirectory>client/lib</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>docs/target/site</directory>
-            <outputDirectory>docs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>webapp/target/apidocs</directory>
-            <outputDirectory>apidocs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>src/bin</directory>
-            <outputDirectory>bin</outputDirectory>
-            <fileMode>0755</fileMode>
-            <directoryMode>0755</directoryMode>
-            <includes>
-                <include>falcon</include>
-                <include>falcon-start</include>
-                <include>falcon-stop</include>
-                <include>service-start.sh</include>
-                <include>service-stop.sh</include>
-            </includes>
-        </fileSet>
-
-        <fileSet>
-            <directory>logs</directory>
-            <outputDirectory>logs</outputDirectory>
-            <directoryMode>0777</directoryMode>
-            <excludes>
-                <exclude>*</exclude>
-                <exclude>**/**</exclude>
-            </excludes>
-        </fileSet>
-
-        <fileSet>
-            <directory>./</directory>
-            <outputDirectory>/</outputDirectory>
-            <includes>
-                <include>DISCLAIMER.txt</include>
-                <include>NOTICE.txt</include>
-                <include>LICENSE.txt</include>
-                <include>CHANGES.txt</include>
-                <include>README</include>
-            </includes>
-        </fileSet>
-
-        <fileSet>
-            <directory>hadoop-dependencies/target/dependency</directory>
-            <outputDirectory>hadooplibs</outputDirectory>
-        </fileSet>
-    </fileSets>
-
-    <files>
-        <file>
-            <source>client/target/falcon-client-${project.version}.jar</source>
-            <outputDirectory>client/lib</outputDirectory>
-        </file>
-
-        <file>
-            <source>webapp/target/falcon-webapp-${project.version}.war</source>
-            <outputDirectory>server/webapp</outputDirectory>
-            <destName>falcon.war</destName>
-        </file>
-    </files>
-</assembly>
-    

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/main/assemblies/client-package.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/client-package.xml b/src/main/assemblies/client-package.xml
deleted file mode 100644
index e0c75f5..0000000
--- a/src/main/assemblies/client-package.xml
+++ /dev/null
@@ -1,73 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
-    <formats>
-        <format>tar.gz</format>
-    </formats>
-    <id>client</id>
-    <baseDirectory>falcon-client-${project.version}</baseDirectory>
-    <fileSets>
-        <fileSet>
-            <directory>client/target/dependency</directory>
-            <outputDirectory>client/lib</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>docs/target/site</directory>
-            <outputDirectory>docs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>webapp/target/apidocs</directory>
-            <outputDirectory>apidocs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>./</directory>
-            <outputDirectory>/</outputDirectory>
-            <includes>
-                <include>DISCLAIMER.txt</include>
-                <include>NOTICE.txt</include>
-                <include>LICENSE.txt</include>
-                <include>CHANGES.txt</include>
-                <include>README</include>
-            </includes>
-        </fileSet>
-    </fileSets>
-
-    <files>
-        <file>
-            <source>client/target/falcon-client-${project.version}.jar</source>
-            <outputDirectory>client/lib</outputDirectory>
-        </file>
-
-        <file>
-            <source>src/bin/falcon</source>
-            <outputDirectory>bin</outputDirectory>
-            <fileMode>0755</fileMode>
-        </file>
-
-        <file>
-            <source>src/conf/client.properties</source>
-            <outputDirectory>conf</outputDirectory>
-        </file>
-    </files>
-</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/main/assemblies/distributed-package.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/distributed-package.xml b/src/main/assemblies/distributed-package.xml
new file mode 100644
index 0000000..42704cb
--- /dev/null
+++ b/src/main/assemblies/distributed-package.xml
@@ -0,0 +1,112 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <formats>
+        <format>tar.gz</format>
+    </formats>
+    <id>server</id>
+    <baseDirectory>falcon-distributed-${project.version}</baseDirectory>
+    <fileSets>
+        <fileSet>
+            <directory>src/conf/</directory>
+            <outputDirectory>conf</outputDirectory>
+            <excludes>
+                <exclude>client.properties</exclude>
+                <exclude>prism-client.properties</exclude>
+            </excludes>
+        </fileSet>
+
+        <fileSet>
+            <directory>client/target/dependency</directory>
+            <outputDirectory>client/lib</outputDirectory>
+        </fileSet>
+
+        <fileSet>
+            <directory>docs/target/site</directory>
+            <outputDirectory>docs</outputDirectory>
+        </fileSet>
+
+        <fileSet>
+            <directory>webapp/target/apidocs</directory>
+            <outputDirectory>apidocs</outputDirectory>
+        </fileSet>
+
+        <fileSet>
+            <directory>src/bin</directory>
+            <outputDirectory>bin</outputDirectory>
+            <fileMode>0755</fileMode>
+            <directoryMode>0755</directoryMode>
+        </fileSet>
+
+        <fileSet>
+            <directory>logs</directory>
+            <outputDirectory>logs</outputDirectory>
+            <directoryMode>0777</directoryMode>
+            <excludes>
+                <exclude>*</exclude>
+                <exclude>**/**</exclude>
+            </excludes>
+        </fileSet>
+
+        <fileSet>
+            <directory>./</directory>
+            <outputDirectory>/</outputDirectory>
+            <includes>
+                <include>DISCLAIMER.txt</include>
+                <include>NOTICE.txt</include>
+                <include>LICENSE.txt</include>
+                <include>CHANGES.txt</include>
+                <include>README</include>
+            </includes>
+        </fileSet>
+
+        <fileSet>
+            <directory>hadoop-dependencies/target/dependency</directory>
+            <outputDirectory>hadooplibs</outputDirectory>
+        </fileSet>
+    </fileSets>
+
+    <files>
+        <file>
+            <source>src/conf/prism-client.properties</source>
+            <outputDirectory>conf</outputDirectory>
+            <destName>client.properties</destName>
+        </file>
+
+        <file>
+            <source>webapp/target/falcon-webapp-${project.version}.war</source>
+            <outputDirectory>server/webapp</outputDirectory>
+            <destName>falcon.war</destName>
+        </file>
+
+        <file>
+            <source>prism/target/falcon-prism-${project.version}.war</source>
+            <outputDirectory>server/webapp</outputDirectory>
+            <destName>prism.war</destName>
+        </file>
+
+        <file>
+            <source>client/target/falcon-client-${project.version}.jar</source>
+            <outputDirectory>client/lib</outputDirectory>
+        </file>
+    </files>
+</assembly>
+    

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/main/assemblies/prism-package.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/prism-package.xml b/src/main/assemblies/prism-package.xml
deleted file mode 100644
index 434d8bb..0000000
--- a/src/main/assemblies/prism-package.xml
+++ /dev/null
@@ -1,102 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
-    <formats>
-        <format>tar.gz</format>
-    </formats>
-    <id>prism</id>
-    <baseDirectory>falcon-prism-${project.version}</baseDirectory>
-    <fileSets>
-        <fileSet>
-            <directory>src/conf/</directory>
-            <outputDirectory>conf</outputDirectory>
-            <excludes>
-                <exclude>client.properties</exclude>
-                <exclude>prism-client.properties</exclude>
-            </excludes>
-        </fileSet>
-
-        <fileSet>
-            <directory>docs/target/site</directory>
-            <outputDirectory>docs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>webapp/target/apidocs</directory>
-            <outputDirectory>apidocs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>src/bin</directory>
-            <outputDirectory>bin</outputDirectory>
-            <fileMode>0755</fileMode>
-            <directoryMode>0755</directoryMode>
-            <includes>
-                <include>falcon</include>
-                <include>prism-start</include>
-                <include>prism-stop</include>
-                <include>service-start.sh</include>
-                <include>service-stop.sh</include>
-            </includes>
-        </fileSet>
-
-        <fileSet>
-            <directory>logs</directory>
-            <outputDirectory>logs</outputDirectory>
-            <directoryMode>0777</directoryMode>
-            <excludes>
-                <exclude>*</exclude>
-                <exclude>**/**</exclude>
-            </excludes>
-        </fileSet>
-
-        <fileSet>
-            <directory>./</directory>
-            <outputDirectory>/</outputDirectory>
-            <includes>
-                <include>DISCLAIMER.txt</include>
-                <include>NOTICE.txt</include>
-                <include>LICENSE.txt</include>
-                <include>CHANGES.txt</include>
-                <include>README</include>
-            </includes>
-        </fileSet>
-
-        <fileSet>
-            <directory>hadoop-dependencies/target/dependency</directory>
-            <outputDirectory>hadooplibs</outputDirectory>
-        </fileSet>
-    </fileSets>
-
-    <files>
-        <file>
-            <source>src/conf/prism-client.properties</source>
-            <outputDirectory>conf</outputDirectory>
-            <destName>client.properties</destName>
-        </file>
-        
-        <file>
-            <source>prism/target/falcon-prism-${project.version}.war</source>
-            <outputDirectory>server/webapp</outputDirectory>
-            <destName>prism.war</destName>
-        </file>
-    </files>
-</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/main/assemblies/server-package.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/server-package.xml b/src/main/assemblies/server-package.xml
deleted file mode 100644
index c92ad17..0000000
--- a/src/main/assemblies/server-package.xml
+++ /dev/null
@@ -1,96 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
-    <formats>
-        <format>tar.gz</format>
-    </formats>
-    <id>server</id>
-    <baseDirectory>falcon-server-${project.version}</baseDirectory>
-    <fileSets>
-        <fileSet>
-            <directory>src/conf/</directory>
-            <outputDirectory>conf</outputDirectory>
-            <excludes>
-                <exclude>prism-client.properties</exclude>
-            </excludes>
-        </fileSet>
-
-        <fileSet>
-            <directory>docs/target/site</directory>
-            <outputDirectory>docs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>webapp/target/apidocs</directory>
-            <outputDirectory>apidocs</outputDirectory>
-        </fileSet>
-
-        <fileSet>
-            <directory>src/bin</directory>
-            <outputDirectory>bin</outputDirectory>
-            <fileMode>0755</fileMode>
-            <directoryMode>0755</directoryMode>
-            <includes>
-                <include>falcon</include>
-                <include>falcon-start</include>
-                <include>falcon-stop</include>
-                <include>service-start.sh</include>
-                <include>service-stop.sh</include>
-            </includes>
-        </fileSet>
-
-        <fileSet>
-            <directory>logs</directory>
-            <outputDirectory>logs</outputDirectory>
-            <directoryMode>0777</directoryMode>
-            <excludes>
-                <exclude>*</exclude>
-                <exclude>**/**</exclude>
-            </excludes>
-        </fileSet>
-
-        <fileSet>
-            <directory>./</directory>
-            <outputDirectory>/</outputDirectory>
-            <includes>
-                <include>DISCLAIMER.txt</include>
-                <include>NOTICE.txt</include>
-                <include>LICENSE.txt</include>
-                <include>CHANGES.txt</include>
-                <include>README</include>
-            </includes>
-        </fileSet>
-
-        <fileSet>
-            <directory>hadoop-dependencies/target/dependency</directory>
-            <outputDirectory>hadooplibs</outputDirectory>
-        </fileSet>
-    </fileSets>
-
-    <files>
-        <file>
-            <source>webapp/target/falcon-webapp-${project.version}.war</source>
-            <outputDirectory>server/webapp</outputDirectory>
-            <destName>falcon.war</destName>
-        </file>
-    </files>
-</assembly>
-    

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/src/main/assemblies/standalone-package.xml
----------------------------------------------------------------------
diff --git a/src/main/assemblies/standalone-package.xml b/src/main/assemblies/standalone-package.xml
new file mode 100644
index 0000000..1f307e9
--- /dev/null
+++ b/src/main/assemblies/standalone-package.xml
@@ -0,0 +1,106 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <formats>
+        <format>tar.gz</format>
+    </formats>
+    <id>bin</id>
+    <baseDirectory>falcon-${project.version}</baseDirectory>
+    <fileSets>
+        <fileSet>
+            <directory>src/conf/</directory>
+            <outputDirectory>conf</outputDirectory>
+            <excludes>
+                <exclude>prism-client.properties</exclude>
+            </excludes>
+        </fileSet>
+
+        <fileSet>
+            <directory>client/target/dependency</directory>
+            <outputDirectory>client/lib</outputDirectory>
+        </fileSet>
+
+        <fileSet>
+            <directory>docs/target/site</directory>
+            <outputDirectory>docs</outputDirectory>
+        </fileSet>
+
+        <fileSet>
+            <directory>webapp/target/apidocs</directory>
+            <outputDirectory>apidocs</outputDirectory>
+        </fileSet>
+
+        <fileSet>
+            <directory>src/bin</directory>
+            <outputDirectory>bin</outputDirectory>
+            <fileMode>0755</fileMode>
+            <directoryMode>0755</directoryMode>
+            <includes>
+                <include>falcon</include>
+                <include>falcon-config.sh</include>
+                <include>falcon-start</include>
+                <include>falcon-stop</include>
+                <include>service-start.sh</include>
+                <include>service-stop.sh</include>
+            </includes>
+        </fileSet>
+
+        <fileSet>
+            <directory>logs</directory>
+            <outputDirectory>logs</outputDirectory>
+            <directoryMode>0777</directoryMode>
+            <excludes>
+                <exclude>*</exclude>
+                <exclude>**/**</exclude>
+            </excludes>
+        </fileSet>
+
+        <fileSet>
+            <directory>./</directory>
+            <outputDirectory>/</outputDirectory>
+            <includes>
+                <include>DISCLAIMER.txt</include>
+                <include>NOTICE.txt</include>
+                <include>LICENSE.txt</include>
+                <include>CHANGES.txt</include>
+                <include>README</include>
+            </includes>
+        </fileSet>
+
+        <fileSet>
+            <directory>hadoop-dependencies/target/dependency</directory>
+            <outputDirectory>hadooplibs</outputDirectory>
+        </fileSet>
+    </fileSets>
+
+    <files>
+        <file>
+            <source>client/target/falcon-client-${project.version}.jar</source>
+            <outputDirectory>client/lib</outputDirectory>
+        </file>
+
+        <file>
+            <source>webapp/target/falcon-webapp-${project.version}.war</source>
+            <outputDirectory>server/webapp</outputDirectory>
+            <destName>falcon.war</destName>
+        </file>
+    </files>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/17f901a6/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
----------------------------------------------------------------------
diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
index 36365c2..c443e05 100644
--- a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
+++ b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
@@ -111,22 +111,21 @@ public class EmbeddedCluster {
         interfaces.getInterfaces().add(newInterface(Interfacetype.WRITE, fsUrl, "0.1"));
         interfaces.getInterfaces().add(newInterface(Interfacetype.EXECUTE,
                 conf.get("mapred.job.tracker"), "0.1"));
-        interfaces
-                .getInterfaces()
-                .add(newInterface(
-                        Interfacetype.MESSAGING,
-                        "vm://localhost",
-                        "0.1"));
+        interfaces.getInterfaces().add(
+                newInterface(Interfacetype.REGISTRY, "thrift://localhost:49083", "0.1"));
+        interfaces.getInterfaces().add(
+                newInterface(Interfacetype.MESSAGING, "vm://localhost", "0.1"));
+
         clusterEntity.setInterfaces(interfaces);
 
         Location location = new Location();
         location.setName("staging");
-        location.setPath("/workflow/staging");
+        location.setPath("/projects/falcon/staging");
         Locations locs = new Locations();
         locs.getLocations().add(location);
         location = new Location();
         location.setName("working");
-        location.setPath("/workflow/work");
+        location.setPath("/projects/falcon/working");
         locs.getLocations().add(location);
         clusterEntity.setLocations(locs);
     }


Mime
View raw message