falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From b...@apache.org
Subject falcon git commit: FALCON-1645 Ability to export to database. Contributed by Venkatesan Ramachandran.
Date Tue, 22 Dec 2015 02:49:16 GMT
Repository: falcon
Updated Branches:
  refs/heads/master 4ba652f54 -> 88bbe14ca


  FALCON-1645 Ability to export to database. Contributed by Venkatesan Ramachandran.


Project: http://git-wip-us.apache.org/repos/asf/falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/falcon/commit/88bbe14c
Tree: http://git-wip-us.apache.org/repos/asf/falcon/tree/88bbe14c
Diff: http://git-wip-us.apache.org/repos/asf/falcon/diff/88bbe14c

Branch: refs/heads/master
Commit: 88bbe14cae68c725ffe74cbb6d3550f82bd4c891
Parents: 4ba652f
Author: bvellanki <bvellanki@hortonworks.com>
Authored: Mon Dec 21 18:49:07 2015 -0800
Committer: bvellanki <bvellanki@hortonworks.com>
Committed: Mon Dec 21 18:49:07 2015 -0800

----------------------------------------------------------------------
 CHANGES.txt                                     |   2 +
 .../main/java/org/apache/falcon/LifeCycle.java  |   3 +-
 client/src/main/java/org/apache/falcon/Tag.java |   3 +-
 client/src/main/resources/feed-0.1.xsd          |  42 +++-
 .../apache/falcon/entity/DatasourceHelper.java  |  15 +-
 .../org/apache/falcon/entity/EntityUtil.java    |  14 ++
 .../org/apache/falcon/entity/FeedHelper.java    | 149 ++++++++++++--
 .../falcon/entity/parser/FeedEntityParser.java  |  44 ++++-
 .../workflow/WorkflowExecutionContext.java      |   2 +-
 .../apache/falcon/entity/FeedHelperTest.java    |  10 +-
 .../entity/parser/FeedEntityParserTest.java     |  27 +++
 .../falcon/entity/v0/EntityGraphTest.java       |   3 +-
 .../resources/config/feed/feed-export-0.1.xml   |  66 +++++++
 .../feed/feed-export-exclude-fields-0.1.xml     |  66 +++++++
 .../resources/config/feed/feed-import-0.1.xml   |   5 -
 .../oozie/DatabaseExportWorkflowBuilder.java    | 186 ++++++++++++++++++
 .../oozie/DatabaseImportWorkflowBuilder.java    |   7 +-
 .../falcon/oozie/ExportWorkflowBuilder.java     |  85 ++++++++
 .../oozie/FeedExportCoordinatorBuilder.java     | 193 +++++++++++++++++++
 .../oozie/FeedImportCoordinatorBuilder.java     |   1 +
 .../falcon/oozie/OozieCoordinatorBuilder.java   |   3 +
 .../OozieOrchestrationWorkflowBuilder.java      |  11 ++
 .../falcon/oozie/feed/FeedBundleBuilder.java    |   5 +
 .../feed/export-sqoop-database-action.xml       |  47 +++++
 24 files changed, 937 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 2673669..3ab92d4 100755
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -9,6 +9,8 @@ Trunk (Unreleased)
   INCOMPATIBLE CHANGES
 
   NEW FEATURES
+    FALCON-1645 Ability to export to database(Venkat Ramachandran via Balu Vellanki)
+
     FALCON-1639 Implement update feature for native scheduler (Pallavi Rao)
 
     FALCON-1636 Add Rerun API In Falcon Native Scheduler(Pavan Kumar Kolamuri via Ajay Yadava)

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/client/src/main/java/org/apache/falcon/LifeCycle.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/LifeCycle.java b/client/src/main/java/org/apache/falcon/LifeCycle.java
index d4d39e8..0ecddd1 100644
--- a/client/src/main/java/org/apache/falcon/LifeCycle.java
+++ b/client/src/main/java/org/apache/falcon/LifeCycle.java
@@ -26,7 +26,8 @@ public enum LifeCycle {
     EXECUTION(Tag.DEFAULT),
     EVICTION(Tag.RETENTION),
     REPLICATION(Tag.REPLICATION),
-    IMPORT(Tag.IMPORT);
+    IMPORT(Tag.IMPORT),
+    EXPORT(Tag.EXPORT);
 
     private final Tag tag;
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/client/src/main/java/org/apache/falcon/Tag.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/Tag.java b/client/src/main/java/org/apache/falcon/Tag.java
index 5027ac0..a8d60b6 100644
--- a/client/src/main/java/org/apache/falcon/Tag.java
+++ b/client/src/main/java/org/apache/falcon/Tag.java
@@ -24,7 +24,8 @@ import org.apache.falcon.entity.v0.EntityType;
  * Tag to include in the entity type.
  */
 public enum Tag {
-    DEFAULT(EntityType.PROCESS), RETENTION(EntityType.FEED), REPLICATION(EntityType.FEED), IMPORT(EntityType.FEED);
+    DEFAULT(EntityType.PROCESS), RETENTION(EntityType.FEED), REPLICATION(EntityType.FEED),
+        IMPORT(EntityType.FEED), EXPORT(EntityType.FEED);
 
     private final EntityType entityType;
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/client/src/main/resources/feed-0.1.xsd
----------------------------------------------------------------------
diff --git a/client/src/main/resources/feed-0.1.xsd b/client/src/main/resources/feed-0.1.xsd
index 2974dd6..92a3efc 100644
--- a/client/src/main/resources/feed-0.1.xsd
+++ b/client/src/main/resources/feed-0.1.xsd
@@ -157,6 +157,7 @@
             <xs:element type="retention" name="retention"/>
             <xs:element type="sla" name="sla" minOccurs="0" maxOccurs="1"/>
             <xs:element type="import" name="import" minOccurs="0" maxOccurs="1"/>
+            <xs:element type="export" name="export" minOccurs="0" maxOccurs="1"/>
             <xs:choice minOccurs="0" maxOccurs="1">
                 <xs:element type="locations" name="locations" minOccurs="0"/>
                 <xs:element type="catalog-table" name="table"/>
@@ -450,27 +451,44 @@
     </xs:simpleType>
     <xs:complexType name="import">
        <xs:sequence>
-            <xs:element type="source" name="source"/>
+            <xs:element type="datasource" name="source"/>
             <xs:element type="arguments" name="arguments" minOccurs="0"/>
         </xs:sequence>
     </xs:complexType>
-    <xs:complexType name="source">
+    <xs:complexType name="export">
+        <xs:sequence>
+            <xs:element type="datasource" name="target"/>
+            <xs:element type="arguments" name="arguments" minOccurs="0"/>
+        </xs:sequence>
+    </xs:complexType>
+    <xs:complexType name="datasource">
         <xs:annotation>
             <xs:documentation>
-                Specifies the source entity name from which data will be imported.
-                This can be Database or other data source types in the future.
-                Table name specifies the table to import.
+                Specifies the source entity name from which data can be imported or exported.
+                This can be Database or other data source types in the future. The connection
+                and authentication details of the data source are defined in the Datasource
+                entity.
+                Table name specifies the table to import or export depending on the action type.
                 Extract type specifies a extraction method (full or incremental).
                 DeltaColumn specifies the column name on source databbase table
                 to identify the new data since the last extraction.
                 Merge type specifies how the data will be organized on Hadoop.
                 The supported types are snapshot (as in a particular time) or append
                 (as in timeseries partitions).
+                Load type specifies if new rows are inserted (load type=insertallowed) into
+                the database table or updated (load type=updateonly). If updateonly load type
+                is specified, then update columns need to be passed via the arguments.
+                Fields can be specified as includes or excludes of fields. If exlusion list
+                is specified, all column except the ones specified will not be imported or exported.
+                If inclusion list is specified, only the specified columns are exported or imported.
             </xs:documentation>
         </xs:annotation>
        <xs:sequence>
-            <xs:element type="extract" name="extract" minOccurs="1"/>
-            <xs:element type="fields-type" name="fields" minOccurs="0"/>
+           <xs:choice minOccurs="1" maxOccurs="1">
+               <xs:element type="extract" name="extract"/>
+               <xs:element type="load" name="load"/>
+           </xs:choice>
+           <xs:element type="fields-type" name="fields" minOccurs="0" maxOccurs="1"/>
         </xs:sequence>
         <xs:attribute type="non-empty-string" name="name" use="required"/>
         <xs:attribute type="non-empty-string" name="tableName" use="required"/>
@@ -482,12 +500,22 @@
         </xs:sequence>
         <xs:attribute type="extract-method" name="type" use="required"/>
     </xs:complexType>
+    <xs:complexType name="load">
+        <xs:attribute type="load-method" name="type" use="required"/>
+    </xs:complexType>
     <xs:simpleType name="extract-method">
         <xs:restriction base="xs:string">
             <xs:enumeration value="full"/>
             <xs:enumeration value="incremental"/>
         </xs:restriction>
     </xs:simpleType>
+    <xs:simpleType name="load-method">
+        <xs:restriction base="xs:string">
+            <xs:enumeration value="updateonly"/>
+            <xs:enumeration value="allowinsert"/>
+        </xs:restriction>
+    </xs:simpleType>
+
     <xs:simpleType name="merge-type">
         <xs:restriction base="xs:string">
             <xs:enumeration value="snapshot"/>

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java b/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
index f9b3966..1f1a193 100644
--- a/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
+++ b/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
@@ -30,7 +30,6 @@ import org.apache.falcon.entity.v0.datasource.DatasourceType;
 import org.apache.falcon.entity.v0.datasource.Interface;
 import org.apache.falcon.entity.v0.datasource.Interfaces;
 import org.apache.falcon.entity.v0.datasource.Interfacetype;
-import org.apache.falcon.entity.v0.feed.Cluster;
 import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -51,18 +50,18 @@ public final class DatasourceHelper {
 
     private static final ConfigurationStore STORE = ConfigurationStore.get();
 
-    public static DatasourceType getImportSourceType(Cluster feedCluster) throws FalconException {
-        Datasource ds = STORE.get(EntityType.DATASOURCE, feedCluster.getImport().getSource().getName());
-        return ds.getType();
+    public static DatasourceType getDatasourceType(String datasourceName) throws FalconException {
+        return getDatasource(datasourceName).getType();
     }
 
     private DatasourceHelper() {}
 
-    public static Datasource getDatasource(Cluster feedCluster) throws FalconException {
-        return STORE.get(EntityType.DATASOURCE, feedCluster.getImport().getSource().getName());
+    public static Datasource getDatasource(String datasourceName) throws FalconException {
+        return STORE.get(EntityType.DATASOURCE, datasourceName);
     }
-    public static String getReadOnlyEndpoint(Datasource db) {
-        return getInterface(db, Interfacetype.READONLY);
+
+    public static String getReadOnlyEndpoint(Datasource datasource) {
+        return getInterface(datasource, Interfacetype.READONLY);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/EntityUtil.java b/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
index 66dba6f..f448d70 100644
--- a/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
+++ b/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
@@ -1016,6 +1016,20 @@ public final class EntityUtil {
         return FeedHelper.getImportDatasourceType(cluster, feed);
     }
 
+    /**
+     * Returns Data Source Type given a feed with Export policy.
+     *
+     * @param cluster
+     * @param feed
+     * @return
+     * @throws FalconException
+     */
+
+    public static DatasourceType getExportDatasourceType(
+            Cluster cluster, Feed feed) throws FalconException {
+        return FeedHelper.getExportDatasourceType(cluster, feed);
+    }
+
     public static EntityNotification getEntityNotification(Entity entity) {
         switch (entity.getEntityType()) {
         case FEED:

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/main/java/org/apache/falcon/entity/FeedHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/FeedHelper.java b/common/src/main/java/org/apache/falcon/entity/FeedHelper.java
index 18d5152..150e0bd 100644
--- a/common/src/main/java/org/apache/falcon/entity/FeedHelper.java
+++ b/common/src/main/java/org/apache/falcon/entity/FeedHelper.java
@@ -27,16 +27,17 @@ import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.Frequency;
 import org.apache.falcon.entity.v0.datasource.DatasourceType;
-import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.CatalogTable;
 import org.apache.falcon.entity.v0.feed.Cluster;
 import org.apache.falcon.entity.v0.feed.ClusterType;
 import org.apache.falcon.entity.v0.feed.ExtractMethod;
+import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.FieldIncludeExclude;
 import org.apache.falcon.entity.v0.feed.Lifecycle;
+import org.apache.falcon.entity.v0.feed.Load;
 import org.apache.falcon.entity.v0.feed.Location;
-import org.apache.falcon.entity.v0.feed.Locations;
 import org.apache.falcon.entity.v0.feed.LocationType;
+import org.apache.falcon.entity.v0.feed.Locations;
 import org.apache.falcon.entity.v0.feed.MergeType;
 import org.apache.falcon.entity.v0.feed.Property;
 import org.apache.falcon.entity.v0.feed.RetentionStage;
@@ -817,7 +818,11 @@ public final class FeedHelper {
             org.apache.falcon.entity.v0.cluster.Cluster clusterEntity,
             Feed feed) throws FalconException {
         Cluster feedCluster = getCluster(feed, clusterEntity.getName());
-        return DatasourceHelper.getImportSourceType(feedCluster);
+        if (isImportEnabled(feedCluster)) {
+            return DatasourceHelper.getDatasourceType(getImportDatasourceName(feedCluster));
+        } else {
+            return null;
+        }
     }
 
     /**
@@ -834,6 +839,8 @@ public final class FeedHelper {
         return false;
     }
 
+
+
     /**
      * Returns the data source name associated with the Feed's import policy.
      *
@@ -848,6 +855,8 @@ public final class FeedHelper {
         }
     }
 
+
+
     /**
      * Returns Datasource table name.
      *
@@ -863,6 +872,8 @@ public final class FeedHelper {
         }
     }
 
+
+
     /**
      * Returns the extract method type.
      *
@@ -878,6 +889,8 @@ public final class FeedHelper {
         }
     }
 
+
+
     /**
      * Returns the merge type of the Feed import policy.
      *
@@ -893,22 +906,16 @@ public final class FeedHelper {
     }
 
     /**
-     * Returns the initial instance date for the import data set or coorinator.
-     *
-     * For snapshot merge type, a latest time will be used since the source data is dumped in whole.
-     * For incremental merge type, start date specified in the cluster validity will be used.
+     * Returns the initial instance date for the import data set for coorinator.
      *
      * @param feedCluster
      * @return Feed cluster validity start date or recent time
      */
     public static Date getImportInitalInstance(org.apache.falcon.entity.v0.feed.Cluster feedCluster) {
-        Date initialInstance = new Date();
-        if (!FeedHelper.isSnapshotMergeType(feedCluster)) {
-            initialInstance = feedCluster.getValidity().getStart();
-        }
-        return initialInstance;
+        return feedCluster.getValidity().getStart();
     }
 
+
     /**
      * Helper method to check if the merge type is snapshot.
      *
@@ -941,6 +948,9 @@ public final class FeedHelper {
         return argsMap;
     }
 
+
+
+
     /**
      * Returns Fields list specified in the Import Policy.
      *
@@ -948,7 +958,7 @@ public final class FeedHelper {
      * @return List of String
      * @throws FalconException
      */
-    public static List<String> getFieldList(org.apache.falcon.entity.v0.feed.Cluster feedCluster)
+    public static List<String> getImportFieldList(org.apache.falcon.entity.v0.feed.Cluster feedCluster)
         throws FalconException {
         if (feedCluster.getImport().getSource().getFields() == null) {
             return null;
@@ -965,15 +975,15 @@ public final class FeedHelper {
     /**
      * Returns true if exclude field lists are used. This is a TBD feature.
      *
-     * @param feedCluster
+     * @param ds Feed Datasource
      * @return true of exclude field list is used or false.
      * @throws FalconException
      */
 
-    public static boolean isFieldExcludes(org.apache.falcon.entity.v0.feed.Cluster feedCluster)
+    public static boolean isFieldExcludes(org.apache.falcon.entity.v0.feed.Datasource ds)
         throws FalconException {
-        if (feedCluster.getImport().getSource().getFields() != null) {
-            org.apache.falcon.entity.v0.feed.FieldsType fieldType = feedCluster.getImport().getSource().getFields();
+        if (ds.getFields() != null) {
+            org.apache.falcon.entity.v0.feed.FieldsType fieldType = ds.getFields();
             FieldIncludeExclude excludeFileds = fieldType.getExcludes();
             if ((excludeFileds != null) && (excludeFileds.getFields().size() > 0)) {
                 return true;
@@ -1013,6 +1023,111 @@ public final class FeedHelper {
         return  retentionFrequency;
     }
 
+    /**
+     * Returns the data source type associated with the Feed's export policy.
+     *
+     * @param clusterEntity
+     * @param feed
+     * @return {@link org.apache.falcon.entity.v0.datasource.DatasourceType}
+     * @throws FalconException
+     */
+    public static DatasourceType getExportDatasourceType(
+            org.apache.falcon.entity.v0.cluster.Cluster clusterEntity,
+            Feed feed) throws FalconException {
+        Cluster feedCluster = getCluster(feed, clusterEntity.getName());
+        if (isExportEnabled(feedCluster)) {
+            return DatasourceHelper.getDatasourceType(getExportDatasourceName(feedCluster));
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * Return if Export policy is enabled in the Feed definition.
+     *
+     * @param feedCluster
+     * @return true if export policy is enabled else false
+     */
+
+    public static boolean isExportEnabled(org.apache.falcon.entity.v0.feed.Cluster feedCluster) {
+        return (feedCluster.getExport() != null);
+    }
+
+    /**
+     * Returns the data source name associated with the Feed's export policy.
+     *
+     * @param feedCluster
+     * @return DataSource name defined in the Datasource Entity
+     */
+    public static String getExportDatasourceName(org.apache.falcon.entity.v0.feed.Cluster feedCluster) {
+        if (isExportEnabled(feedCluster)) {
+            return feedCluster.getExport().getTarget().getName();
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * Returns Datasource table name.
+     *
+     * @param feedCluster
+     * @return Table or Topic name of the Datasource
+     */
+
+    public static String getExportDataSourceTableName(org.apache.falcon.entity.v0.feed.Cluster feedCluster) {
+        if (isExportEnabled(feedCluster)) {
+            return feedCluster.getExport().getTarget().getTableName();
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * Returns the export load type.
+     *
+     * @param feedCluster
+     * @return {@link org.apache.falcon.entity.v0.feed.Load}
+     */
+
+    public static Load getExportLoadMethod(org.apache.falcon.entity.v0.feed.Cluster feedCluster) {
+        if (isExportEnabled(feedCluster)) {
+            return feedCluster.getExport().getTarget().getLoad();
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * Returns the initial instance date for the export data set for coorinator.
+     *
+     * @param feedCluster
+     * @return Feed cluster validity start date or recent time
+     */
+    public static Date getExportInitalInstance(org.apache.falcon.entity.v0.feed.Cluster feedCluster) {
+        return feedCluster.getValidity().getStart();
+    }
+
+    /**
+     * Returns extra arguments specified in the Feed export policy.
+     *
+     * @param feedCluster
+     * @return
+     * @throws FalconException
+     */
+    public static Map<String, String> getExportArguments(org.apache.falcon.entity.v0.feed.Cluster feedCluster)
+        throws FalconException {
+
+        Map<String, String> argsMap = new HashMap<String, String>();
+        if (feedCluster.getExport().getArguments() == null) {
+            return argsMap;
+        }
+
+        for(org.apache.falcon.entity.v0.feed.Argument p : feedCluster.getExport().getArguments().getArguments()) {
+            argsMap.put(p.getName().toLowerCase(), p.getValue());
+        }
+        return argsMap;
+    }
+
     public static Frequency getOldRetentionFrequency(Feed feed) {
         Frequency feedFrequency = feed.getFrequency();
         Frequency defaultFrequency = new Frequency("hours(24)");

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
index 0b48e66..981e730 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
@@ -58,9 +58,10 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.HashSet;
 import java.util.Set;
 import java.util.TimeZone;
 
@@ -105,6 +106,11 @@ public class FeedEntityParser extends EntityParser<Feed> {
                 validateFeedImportArgs(cluster);
                 validateFeedImportFieldExcludes(cluster);
             }
+            if (FeedHelper.isExportEnabled(cluster)) {
+                validateEntityExists(EntityType.DATASOURCE, FeedHelper.getExportDatasourceName(cluster));
+                validateFeedExportArgs(cluster);
+                validateFeedExportFieldExcludes(cluster);
+            }
         }
 
         validateFeedStorage(feed);
@@ -597,6 +603,14 @@ public class FeedEntityParser extends EntityParser<Feed> {
      */
     private void validateFeedImportArgs(Cluster feedCluster) throws FalconException {
         Map<String, String> args = FeedHelper.getImportArguments(feedCluster);
+        validateSqoopArgs(args);
+    }
+
+    /**
+     * Validate sqoop arguments.
+     * @param args Map<String, String> arguments
+     */
+    private void validateSqoopArgs(Map<String, String> args) throws FalconException {
         int numMappers = 1;
         if (args.containsKey("--num-mappers")) {
             numMappers = Integer.parseInt(args.get("--num-mappers"));
@@ -608,7 +622,33 @@ public class FeedEntityParser extends EntityParser<Feed> {
     }
 
     private void validateFeedImportFieldExcludes(Cluster feedCluster) throws FalconException {
-        if (FeedHelper.isFieldExcludes(feedCluster)) {
+        if (FeedHelper.isFieldExcludes(feedCluster.getImport().getSource())) {
+            throw new ValidationException(String.format("Field excludes are not supported "
+                    + "currently in Feed import policy"));
+        }
+    }
+
+    /**
+     * Validate export arguments.
+     * @param feedCluster Cluster referenced in the feed
+     */
+    private void validateFeedExportArgs(Cluster feedCluster) throws FalconException {
+        Map<String, String> args = FeedHelper.getExportArguments(feedCluster);
+        Map<String, String> validArgs = new HashMap<String, String>();
+        validArgs.put("--num-mappers", "");
+        validArgs.put("--update-key" , "");
+        validArgs.put("--input-null-string", "");
+        validArgs.put("--input-null-non-string", "");
+
+        for(Map.Entry<String, String> e : args.entrySet()) {
+            if (!validArgs.containsKey(e.getKey())) {
+                throw new ValidationException(String.format("Feed export argument %s is invalid.", e.getKey()));
+            }
+        }
+    }
+
+    private void validateFeedExportFieldExcludes(Cluster feedCluster) throws FalconException {
+        if (FeedHelper.isFieldExcludes(feedCluster.getExport().getTarget())) {
             throw new ValidationException(String.format("Field excludes are not supported "
                     + "currently in Feed import policy"));
         }

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/main/java/org/apache/falcon/workflow/WorkflowExecutionContext.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/workflow/WorkflowExecutionContext.java b/common/src/main/java/org/apache/falcon/workflow/WorkflowExecutionContext.java
index f206ff1..e51ad28 100644
--- a/common/src/main/java/org/apache/falcon/workflow/WorkflowExecutionContext.java
+++ b/common/src/main/java/org/apache/falcon/workflow/WorkflowExecutionContext.java
@@ -75,7 +75,7 @@ public class WorkflowExecutionContext {
      * Entity operations supported.
      */
     public enum EntityOperations {
-        GENERATE, DELETE, ARCHIVE, REPLICATE, CHMOD, IMPORT
+        GENERATE, DELETE, ARCHIVE, REPLICATE, CHMOD, IMPORT, EXPORT
     }
 
     public static final WorkflowExecutionArgs[] USER_MESSAGE_ARGS = {

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/test/java/org/apache/falcon/entity/FeedHelperTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/FeedHelperTest.java b/common/src/test/java/org/apache/falcon/entity/FeedHelperTest.java
index 98cdf6b..9841083 100644
--- a/common/src/test/java/org/apache/falcon/entity/FeedHelperTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/FeedHelperTest.java
@@ -44,7 +44,7 @@ import org.apache.falcon.entity.v0.feed.LocationType;
 import org.apache.falcon.entity.v0.feed.Locations;
 import org.apache.falcon.entity.v0.feed.MergeType;
 import org.apache.falcon.entity.v0.feed.RetentionStage;
-import org.apache.falcon.entity.v0.feed.Source;
+import org.apache.falcon.entity.v0.feed.Datasource;
 import org.apache.falcon.entity.v0.feed.Validity;
 import org.apache.falcon.entity.v0.process.Input;
 import org.apache.falcon.entity.v0.process.Inputs;
@@ -906,7 +906,7 @@ public class FeedHelperTest extends AbstractTestBase {
         Assert.assertEquals(getDate("2012-02-07 00:00 UTC"), feedCluster.getValidity().getStart());
         Assert.assertTrue(FeedHelper.isImportEnabled(feedCluster));
         Assert.assertEquals(MergeType.SNAPSHOT, FeedHelper.getImportMergeType(feedCluster));
-        Assert.assertNotEquals(startInstResult, feedCluster.getValidity().getStart());
+        Assert.assertEquals(startInstResult, feedCluster.getValidity().getStart());
     }
 
     @Test
@@ -915,9 +915,9 @@ public class FeedHelperTest extends AbstractTestBase {
         Feed feed = importFeedSnapshot(cluster, "hours(1)", "2012-02-07 00:00 UTC", "2020-02-25 00:00 UTC");
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
         Date startInstResult = FeedHelper.getImportInitalInstance(feedCluster);
-        List<String> fieldList = FeedHelper.getFieldList(feedCluster);
+        List<String> fieldList = FeedHelper.getImportFieldList(feedCluster);
         Assert.assertEquals(2, fieldList.size());
-        Assert.assertFalse(FeedHelper.isFieldExcludes(feedCluster));
+        Assert.assertFalse(FeedHelper.isFieldExcludes(feedCluster.getImport().getSource()));
     }
 
     @Test
@@ -1028,7 +1028,7 @@ public class FeedHelperTest extends AbstractTestBase {
         FieldsType fields = new FieldsType();
         fields.setIncludes(fieldInclude);
 
-        Source source = new Source();
+        Datasource source = new Datasource();
         source.setName("test-db");
         source.setTableName("test-table");
         source.setExtract(extract);

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/test/java/org/apache/falcon/entity/parser/FeedEntityParserTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/parser/FeedEntityParserTest.java b/common/src/test/java/org/apache/falcon/entity/parser/FeedEntityParserTest.java
index a583f52..83e2550 100644
--- a/common/src/test/java/org/apache/falcon/entity/parser/FeedEntityParserTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/parser/FeedEntityParserTest.java
@@ -1208,4 +1208,31 @@ public class FeedEntityParserTest extends AbstractTestBase {
         feed.getClusters().getClusters().get(0).getValidity().setEnd(null);
         parser.validate(feed);
     }
+
+    @Test (expectedExceptions = ValidationException.class)
+    public void testExportFeedSqoopExcludeFields() throws Exception {
+
+        storeEntity(EntityType.CLUSTER, "testCluster");
+        InputStream feedStream = this.getClass().getResourceAsStream("/config/feed/feed-export-exclude-fields-0.1.xml");
+        Feed feed = parser.parseAndValidate(feedStream);
+        Assert.fail("An exception should have been thrown: Feed Export policy not yet implement Field exclusion.");
+    }
+
+    @Test (expectedExceptions = ValidationException.class)
+    public void testExportFeedSqoopArgsNumMapper() throws Exception {
+        final InputStream inputStream = this.getClass().getResourceAsStream("/config/feed/feed-export-0.1.xml");
+        Feed exportFeed = parser.parse(inputStream);
+
+        org.apache.falcon.entity.v0.feed.Arguments args =
+                exportFeed.getClusters().getClusters().get(0).getExport().getArguments();
+        Argument numMappersArg = new Argument();
+        numMappersArg.setName("--split-by");
+        numMappersArg.setValue("id");
+
+        args.getArguments().clear();
+        args.getArguments().add(numMappersArg);
+
+        parser.validate(exportFeed);
+        Assert.fail("An exception should have been thrown: Feed export should specify --split-by");
+    }
 }

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/test/java/org/apache/falcon/entity/v0/EntityGraphTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/v0/EntityGraphTest.java b/common/src/test/java/org/apache/falcon/entity/v0/EntityGraphTest.java
index f49362f..23f69d7 100644
--- a/common/src/test/java/org/apache/falcon/entity/v0/EntityGraphTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/v0/EntityGraphTest.java
@@ -31,7 +31,6 @@ import org.apache.falcon.entity.v0.feed.FieldsType;
 import org.apache.falcon.entity.v0.feed.FieldIncludeExclude;
 import org.apache.falcon.entity.v0.feed.Import;
 import org.apache.falcon.entity.v0.feed.MergeType;
-import org.apache.falcon.entity.v0.feed.Source;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.datasource.Datasource;
 import org.apache.falcon.entity.v0.process.Input;
@@ -162,7 +161,7 @@ public class EntityGraphTest extends AbstractTestBase {
         fieldInclude.getFields().add("name");
         fields.setIncludes(fieldInclude);
 
-        Source source = new Source();
+        org.apache.falcon.entity.v0.feed.Datasource source = new org.apache.falcon.entity.v0.feed.Datasource();
         source.setName(ds.getName());
         source.setTableName("test-table");
         source.setExtract(extract);

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/test/resources/config/feed/feed-export-0.1.xml
----------------------------------------------------------------------
diff --git a/common/src/test/resources/config/feed/feed-export-0.1.xml b/common/src/test/resources/config/feed/feed-export-0.1.xml
new file mode 100644
index 0000000..d92ee17
--- /dev/null
+++ b/common/src/test/resources/config/feed/feed-export-0.1.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+<feed description="Customer data" name="CustomerFeed" xmlns="uri:falcon:feed:0.1">
+    <tags>consumer=consumer@xyz.com, owner=producer@xyz.com, _department_type=forecasting</tags>
+    <partitions>
+        <partition name="fraud"/>
+        <partition name="good"/>
+    </partitions>
+
+    <groups>online,bi</groups>
+    <availabilityFlag>_SUCCESS</availabilityFlag>
+
+    <frequency>hours(1)</frequency>
+    <sla slaLow="hours(2)" slaHigh="hours(3)"/>
+    <timezone>UTC</timezone>
+
+    <late-arrival cut-off="hours(6)"/>
+
+    <clusters>
+        <cluster name="testCluster" type="source">
+            <validity start="2011-11-01T00:00Z" end="2011-12-31T00:00Z"/>
+            <retention limit="hours(48)" action="delete"/>
+            <!-- Limit can be in Time or Instances 100, Action ENUM DELETE,ARCHIVE -->
+            <sla slaLow="hours(3)" slaHigh="hours(4)"/>
+            <export>
+                <target name="test-hsql-db" tableName="customer">
+                    <load type="updateonly"/>
+                    <fields>
+                        <excludes>
+                            <field>id</field>
+                            <field>name</field>
+                        </excludes>
+                    </fields>
+                </target>
+                <arguments>
+                    <argument name="--num-mappers" value="2"/>
+                </arguments>
+            </export>
+        </cluster>
+    </clusters>
+
+    <locations>
+        <location type="data" path="/projects/falcon/clicks"/>
+        <location type="stats" path="/projects/falcon/clicksStats"/>
+        <location type="meta" path="/projects/falcon/clicksMetaData"/>
+    </locations>
+
+    <ACL owner="testuser" group="group" permission="0x755"/>
+    <schema location="/schema/clicks" provider="protobuf"/>
+</feed>

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/test/resources/config/feed/feed-export-exclude-fields-0.1.xml
----------------------------------------------------------------------
diff --git a/common/src/test/resources/config/feed/feed-export-exclude-fields-0.1.xml b/common/src/test/resources/config/feed/feed-export-exclude-fields-0.1.xml
new file mode 100644
index 0000000..6753a00
--- /dev/null
+++ b/common/src/test/resources/config/feed/feed-export-exclude-fields-0.1.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+<feed description="Customer data" name="CustomerFeed" xmlns="uri:falcon:feed:0.1">
+    <tags>consumer=consumer@xyz.com, owner=producer@xyz.com, _department_type=forecasting</tags>
+    <partitions>
+        <partition name="fraud"/>
+        <partition name="good"/>
+    </partitions>
+
+    <groups>online,bi</groups>
+    <availabilityFlag>_SUCCESS</availabilityFlag>
+
+    <frequency>hours(1)</frequency>
+    <sla slaLow="hours(2)" slaHigh="hours(3)"/>
+    <timezone>UTC</timezone>
+
+    <late-arrival cut-off="hours(6)"/>
+
+    <clusters>
+        <cluster name="testCluster" type="source">
+            <validity start="2011-11-01T00:00Z" end="2011-12-31T00:00Z"/>
+            <retention limit="hours(48)" action="delete"/>
+            <!-- Limit can be in Time or Instances 100, Action ENUM DELETE,ARCHIVE -->
+            <sla slaLow="hours(3)" slaHigh="hours(4)"/>
+            <export>
+                <target name="test-hsql-db" tableName="customer">
+                    <load type="updateonly"/>
+                    <fields>
+                        <excludes>
+                            <field>id</field>
+                            <field>name</field>
+                        </excludes>
+                    </fields>
+                </target>
+                <arguments>
+                    <argument name="--update-key" value="id"/>
+                </arguments>
+            </export>
+        </cluster>
+    </clusters>
+
+    <locations>
+        <location type="data" path="/projects/falcon/clicks"/>
+        <location type="stats" path="/projects/falcon/clicksStats"/>
+        <location type="meta" path="/projects/falcon/clicksMetaData"/>
+    </locations>
+
+    <ACL owner="testuser" group="group" permission="0x755"/>
+    <schema location="/schema/clicks" provider="protobuf"/>
+</feed>

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/common/src/test/resources/config/feed/feed-import-0.1.xml
----------------------------------------------------------------------
diff --git a/common/src/test/resources/config/feed/feed-import-0.1.xml b/common/src/test/resources/config/feed/feed-import-0.1.xml
index 798d6b0..69f7ede 100644
--- a/common/src/test/resources/config/feed/feed-import-0.1.xml
+++ b/common/src/test/resources/config/feed/feed-import-0.1.xml
@@ -55,11 +55,6 @@
                     <argument name="--num-mappers" value="2"/>
                 </arguments>
             </import>
-            <locations>
-                <location type="data" path="/projects/falcon/clicks"/>
-                <location type="stats" path="/projects/falcon/clicksStats"/>
-                <location type="meta" path="/projects/falcon/clicksMetaData"/>
-            </locations>
         </cluster>
     </clusters>
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java
new file mode 100644
index 0000000..f1fb337
--- /dev/null
+++ b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.oozie;
+
+import org.apache.falcon.FalconException;
+import org.apache.falcon.Pair;
+import org.apache.falcon.Tag;
+import org.apache.falcon.entity.DatasourceHelper;
+import org.apache.falcon.entity.FeedHelper;
+import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.entity.v0.datasource.Credentialtype;
+import org.apache.falcon.entity.v0.datasource.Datasource;
+import org.apache.falcon.entity.v0.feed.Feed;
+import org.apache.falcon.entity.v0.feed.LoadMethod;
+import org.apache.falcon.oozie.workflow.ACTION;
+import org.apache.falcon.oozie.workflow.WORKFLOWAPP;
+import org.apache.falcon.workflow.WorkflowExecutionContext;
+
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Builds Datasource export workflow for Oozie.
+ */
+
+public class DatabaseExportWorkflowBuilder extends ExportWorkflowBuilder {
+    protected static final String EXPORT_SQOOP_ACTION_TEMPLATE = "/action/feed/export-sqoop-database-action.xml";
+    protected static final String EXPORT_ACTION_NAME="db-export-sqoop";
+
+    private static final String ARG_SEPARATOR = " ";
+
+    public DatabaseExportWorkflowBuilder(Feed entity) { super(entity); }
+
+    @Override
+    protected WorkflowExecutionContext.EntityOperations getOperation() {
+        return WorkflowExecutionContext.EntityOperations.EXPORT;
+    }
+
+    @Override
+    protected Properties getWorkflow(Cluster cluster, WORKFLOWAPP workflow) throws FalconException {
+
+        addLibExtensionsToWorkflow(cluster, workflow, Tag.EXPORT);
+
+        ACTION sqoopExport = unmarshalAction(EXPORT_SQOOP_ACTION_TEMPLATE);
+        addTransition(sqoopExport, SUCCESS_POSTPROCESS_ACTION_NAME, FAIL_POSTPROCESS_ACTION_NAME);
+        workflow.getDecisionOrForkOrJoin().add(sqoopExport);
+
+        //Add post-processing actions
+        ACTION success = getSuccessPostProcessAction();
+        // delete addHDFSServersConfig(success, src, target);
+        addTransition(success, OK_ACTION_NAME, FAIL_ACTION_NAME);
+        workflow.getDecisionOrForkOrJoin().add(success);
+
+        ACTION fail = getFailPostProcessAction();
+        // delete addHDFSServersConfig(fail, src, target);
+        addTransition(fail, FAIL_ACTION_NAME, FAIL_ACTION_NAME);
+        workflow.getDecisionOrForkOrJoin().add(fail);
+
+        decorateWorkflow(workflow, workflow.getName(), EXPORT_ACTION_NAME);
+        addLibExtensionsToWorkflow(cluster, workflow, Tag.EXPORT);
+
+        // build the sqoop command and put it in the properties
+        String sqoopCmd = buildSqoopCommand(cluster, entity);
+        LOG.info("SQOOP EXPORT COMMAND : " + sqoopCmd);
+        Properties props = new Properties();
+        props.put("sqoopCommand", sqoopCmd);
+        return props;
+    }
+
+    private String buildSqoopCommand(Cluster cluster, Feed feed) throws FalconException {
+        Map<String, String> extraArgs = getArguments(cluster);
+        StringBuilder sqoopCmd = new StringBuilder();
+        sqoopCmd.append("export").append(ARG_SEPARATOR);
+        //buildDriverArgs(sqoopCmd, cluster).append(ARG_SEPARATOR);
+        buildConnectArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
+        buildTableArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
+        buildUserPasswordArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
+        buildNumMappers(sqoopCmd, extraArgs).append(ARG_SEPARATOR);
+        buildArguments(sqoopCmd, extraArgs).append(ARG_SEPARATOR);
+        buildLoadType(sqoopCmd, cluster).append(ARG_SEPARATOR);
+        buildExportDirArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
+        return sqoopCmd.toString();
+    }
+
+    private StringBuilder buildDriverArgs(StringBuilder builder, Cluster cluster) throws FalconException {
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
+        Datasource db = DatasourceHelper.getDatasource(FeedHelper.getExportDatasourceName(feedCluster));
+        if ((db.getDriver() != null) && (db.getDriver().getClazz() != null)) {
+            builder.append("--driver").append(ARG_SEPARATOR).append(db.getDriver().getClazz());
+        }
+        return builder;
+    }
+
+    private StringBuilder buildConnectArg(StringBuilder builder, Cluster cluster) throws FalconException {
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
+        return builder.append("--connect").append(ARG_SEPARATOR)
+                .append(DatasourceHelper.getReadOnlyEndpoint(
+                        DatasourceHelper.getDatasource(FeedHelper.getExportDatasourceName(feedCluster))));
+    }
+
+    private StringBuilder buildTableArg(StringBuilder builder, Cluster cluster) throws FalconException {
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
+        return builder.append("--table").append(ARG_SEPARATOR)
+                .append(FeedHelper.getExportDataSourceTableName(feedCluster));
+    }
+
+    private StringBuilder buildUserPasswordArg(StringBuilder builder, Cluster cluster) throws FalconException {
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
+        Datasource db = DatasourceHelper.getDatasource(FeedHelper.getExportDatasourceName(feedCluster));
+        Pair<String, String> userPasswdInfo = DatasourceHelper.getReadPasswordInfo(db);
+        builder.append("--username").append(ARG_SEPARATOR)
+                .append(userPasswdInfo.first)
+                .append(ARG_SEPARATOR);
+        if (DatasourceHelper.getReadPasswordType(db) == Credentialtype.PASSWORD_FILE) {
+            builder.append("--password-file");
+        } else {
+            builder.append("--password");
+        }
+        builder.append(ARG_SEPARATOR).append(userPasswdInfo.second);
+        return builder;
+    }
+
+    private StringBuilder buildLoadType(StringBuilder builder, Cluster cluster)
+        throws FalconException {
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
+        builder.append("--update-mode").append(ARG_SEPARATOR);
+        String modeType = LoadMethod.UPDATEONLY.value();
+        if (FeedHelper.getExportLoadMethod(feedCluster).getType() != null) {
+            modeType = FeedHelper.getExportLoadMethod(feedCluster).getType().value();
+        }
+        return builder.append(modeType);
+    }
+
+    private StringBuilder buildExportDirArg(StringBuilder builder, Cluster cluster)
+        throws FalconException {
+        return builder.append("--export-dir").append(ARG_SEPARATOR)
+                .append(String.format("${coord:dataIn('%s')}",
+                        FeedExportCoordinatorBuilder.EXPORT_DATAIN_NAME));
+    }
+
+    private StringBuilder buildArguments(StringBuilder builder, Map<String, String> extraArgs)
+        throws FalconException {
+        for(Map.Entry<String, String> e : extraArgs.entrySet()) {
+            builder.append(e.getKey()).append(ARG_SEPARATOR).append(e.getValue()).append(ARG_SEPARATOR);
+        }
+        return builder;
+    }
+
+    /**
+     *
+     * Feed validation checks to make sure --split-by column is supplied when --num-mappers > 1
+     * if --num-mappers is not specified, set it to 1.
+     *
+     * @param builder contains command
+     * @param extraArgs map of extra arguments
+     * @return command string
+     */
+
+    private StringBuilder buildNumMappers(StringBuilder builder, Map<String, String> extraArgs) {
+        if (!extraArgs.containsKey("--num-mappers")) {
+            builder.append("--num-mappers").append(ARG_SEPARATOR).append(1);
+        }
+        return builder;
+    }
+
+    private Map<String, String> getArguments(Cluster cluster) throws FalconException {
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
+        return FeedHelper.getExportArguments(feedCluster);
+    }
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
index 45f46d7..19fa931 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
@@ -99,7 +99,7 @@ public class DatabaseImportWorkflowBuilder extends ImportWorkflowBuilder {
 
     private StringBuilder buildDriverArgs(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        Datasource db = DatasourceHelper.getDatasource(feedCluster);
+        Datasource db = DatasourceHelper.getDatasource(FeedHelper.getImportDatasourceName(feedCluster));
         if ((db.getDriver() != null) && (db.getDriver().getClazz() != null)) {
             builder.append("--driver").append(ARG_SEPARATOR).append(db.getDriver().getClazz());
         }
@@ -109,7 +109,8 @@ public class DatabaseImportWorkflowBuilder extends ImportWorkflowBuilder {
     private StringBuilder buildConnectArg(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
         return builder.append("--connect").append(ARG_SEPARATOR)
-                .append(DatasourceHelper.getReadOnlyEndpoint(DatasourceHelper.getDatasource(feedCluster)));
+                .append(DatasourceHelper.getReadOnlyEndpoint(
+                        DatasourceHelper.getDatasource(FeedHelper.getImportDatasourceName(feedCluster))));
     }
 
     private StringBuilder buildTableArg(StringBuilder builder, Cluster cluster) throws FalconException {
@@ -120,7 +121,7 @@ public class DatabaseImportWorkflowBuilder extends ImportWorkflowBuilder {
 
     private StringBuilder buildUserPasswordArg(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        Datasource db = DatasourceHelper.getDatasource(feedCluster);
+        Datasource db = DatasourceHelper.getDatasource(FeedHelper.getImportDatasourceName(feedCluster));
         Pair<String, String> userPasswdInfo = DatasourceHelper.getReadPasswordInfo(db);
         builder.append("--username").append(ARG_SEPARATOR)
                 .append(userPasswdInfo.first)

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/ExportWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/ExportWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/ExportWorkflowBuilder.java
new file mode 100644
index 0000000..a55656c
--- /dev/null
+++ b/oozie/src/main/java/org/apache/falcon/oozie/ExportWorkflowBuilder.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.oozie;
+
+import org.apache.falcon.FalconException;
+import org.apache.falcon.LifeCycle;
+import org.apache.falcon.Tag;
+import org.apache.falcon.entity.EntityUtil;
+import org.apache.falcon.entity.FeedHelper;
+import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.entity.v0.feed.Feed;
+import org.apache.falcon.oozie.workflow.WORKFLOWAPP;
+import org.apache.falcon.workflow.WorkflowExecutionArgs;
+import org.apache.hadoop.fs.Path;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Properties;
+
+/**
+ * Builds oozie workflow for Datasource export.
+ */
+
+public abstract class ExportWorkflowBuilder extends OozieOrchestrationWorkflowBuilder<Feed> {
+
+    public ExportWorkflowBuilder(Feed feed) {
+        super(feed, LifeCycle.EXPORT);
+    }
+
+    @Override public Properties build(Cluster cluster, Path buildPath) throws FalconException {
+
+        WORKFLOWAPP workflow = new WORKFLOWAPP();
+        String wfName = EntityUtil.getWorkflowName(Tag.EXPORT, entity).toString();
+        workflow.setName(wfName);
+        Properties p = getWorkflow(cluster, workflow);
+        marshal(cluster, workflow, buildPath);
+
+        Properties props = FeedHelper.getFeedProperties(entity);
+        if (props == null) {
+            props = new Properties();
+        }
+        props.putAll(getProperties(buildPath, wfName));
+        if (createDefaultConfiguration(cluster) != null) {
+            props.putAll(createDefaultConfiguration(cluster));
+        }
+        if (FeedHelper.getUserWorkflowProperties(getLifecycle()) != null) {
+            props.putAll(FeedHelper.getUserWorkflowProperties(getLifecycle()));
+        }
+        props.put(WorkflowExecutionArgs.OUTPUT_FEED_NAMES.getName(), NONE);
+        props.put(WorkflowExecutionArgs.OUTPUT_FEED_PATHS.getName(), NONE);
+
+        props.put(WorkflowExecutionArgs.INPUT_FEED_NAMES.getName(), entity.getName());
+        props.put(WorkflowExecutionArgs.INPUT_FEED_PATHS.getName(),
+                String.format("${coord:dataIn('%s')}", FeedExportCoordinatorBuilder.EXPORT_DATAIN_NAME));
+        props.setProperty("srcClusterName", "NA");
+        props.put(WorkflowExecutionArgs.CLUSTER_NAME.getName(), cluster.getName());
+
+        if (StringUtils.isEmpty(FeedHelper.getExportDatasourceName(
+                FeedHelper.getCluster(entity, cluster.getName())))) {
+            throw new FalconException("Datasource name is null or empty");
+        }
+
+        props.put(WorkflowExecutionArgs.DATASOURCE_NAME.getName(),
+                FeedHelper.getExportDatasourceName(FeedHelper.getCluster(entity, cluster.getName())));
+        props.putAll(p);
+        return props;
+    }
+
+    protected abstract Properties getWorkflow(Cluster cluster, WORKFLOWAPP workflow) throws FalconException;
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/FeedExportCoordinatorBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/FeedExportCoordinatorBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/FeedExportCoordinatorBuilder.java
new file mode 100644
index 0000000..1bfacc2
--- /dev/null
+++ b/oozie/src/main/java/org/apache/falcon/oozie/FeedExportCoordinatorBuilder.java
@@ -0,0 +1,193 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.oozie;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.falcon.FalconException;
+import org.apache.falcon.LifeCycle;
+import org.apache.falcon.Tag;
+import org.apache.falcon.entity.FeedHelper;
+import org.apache.falcon.entity.Storage;
+import org.apache.falcon.entity.v0.SchemaHelper;
+import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.entity.v0.feed.Feed;
+import org.apache.falcon.entity.v0.feed.LocationType;
+import org.apache.falcon.oozie.coordinator.ACTION;
+import org.apache.falcon.oozie.coordinator.COORDINATORAPP;
+import org.apache.falcon.oozie.coordinator.DATAIN;
+import org.apache.falcon.oozie.coordinator.DATASETS;
+import org.apache.falcon.oozie.coordinator.INPUTEVENTS;
+import org.apache.falcon.oozie.coordinator.SYNCDATASET;
+import org.apache.falcon.oozie.coordinator.WORKFLOW;
+import org.apache.hadoop.fs.Path;
+import org.slf4j.LoggerFactory;
+
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * Builds Oozie coordinator for database export.
+ */
+
+public class FeedExportCoordinatorBuilder extends OozieCoordinatorBuilder<Feed> {
+    public FeedExportCoordinatorBuilder(Feed entity) {
+        super(entity, LifeCycle.EXPORT);
+    }
+
+    public static final String EXPORT_DATASET_NAME = "export-dataset";
+
+    public static final String EXPORT_DATAIN_NAME = "export-input";
+
+    private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(FeedExportCoordinatorBuilder.class);
+
+
+    @Override
+    public List<Properties> buildCoords(Cluster cluster, Path buildPath) throws FalconException {
+
+        LOG.info("Generating Feed EXPORT coordinator.");
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster((Feed) entity, cluster.getName());
+        if (!FeedHelper.isExportEnabled(feedCluster)) {
+            return null;
+        }
+
+        if (feedCluster.getValidity().getEnd().before(new Date())) {
+            LOG.warn("Feed IMPORT is not applicable as Feed's end time for cluster {} is not in the future",
+                    cluster.getName());
+            return null;
+        }
+
+        COORDINATORAPP coord = new COORDINATORAPP();
+        initializeCoordAttributes(coord, (Feed) entity, cluster);
+        Properties props = createCoordDefaultConfiguration(getEntityName());
+        initializeInputPath(coord, cluster, props);
+
+        props.putAll(FeedHelper.getUserWorkflowProperties(getLifecycle()));
+
+        WORKFLOW workflow = new WORKFLOW();
+        Path coordPath = getBuildPath(buildPath);
+        Properties wfProp = OozieOrchestrationWorkflowBuilder.get(entity, cluster, Tag.EXPORT).build(cluster,
+                coordPath);
+        workflow.setAppPath(getStoragePath(wfProp.getProperty(OozieEntityBuilder.ENTITY_PATH)));
+        props.putAll(wfProp);
+        workflow.setConfiguration(getConfig(props));
+        ACTION action = new ACTION();
+        action.setWorkflow(workflow);
+
+        coord.setAction(action);
+
+        Path marshalPath = marshal(cluster, coord, coordPath);
+        return Arrays.asList(getProperties(marshalPath, getEntityName()));
+    }
+
+    private void initializeInputPath(COORDINATORAPP coord, Cluster cluster, Properties props)
+        throws FalconException {
+
+        if (coord.getDatasets() == null) {
+            coord.setDatasets(new DATASETS());
+        }
+
+        if (coord.getOutputEvents() == null) {
+            coord.setInputEvents(new INPUTEVENTS());
+        }
+
+        Storage storage = FeedHelper.createStorage(cluster, (Feed) entity);
+        SYNCDATASET syncdataset = createDataSet((Feed) entity, cluster, storage,
+                EXPORT_DATASET_NAME, LocationType.DATA);
+
+        if (syncdataset == null) {
+            return;
+        }
+        coord.getDatasets().getDatasetOrAsyncDataset().add(syncdataset);
+        DATAIN datain = createDataIn(entity, cluster);
+        coord.getInputEvents().getDataIn().add(datain);
+    }
+
+    private DATAIN createDataIn(Feed feed, Cluster cluster) {
+        DATAIN datain = new DATAIN();
+        datain.setName(EXPORT_DATAIN_NAME);
+        datain.setDataset(EXPORT_DATASET_NAME);
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
+        datain.getInstance().add(SchemaHelper.formatDateUTC(feedCluster.getValidity().getStart()));
+        return datain;
+    }
+
+    /**
+     * Create DataSet. The start instance is set to current date if the merge type is snapshot.
+     * Otherwise, the Feed cluster start data will be used as start instance.
+     *
+     * @param feed
+     * @param cluster
+     * @param storage
+     * @param datasetName
+     * @param locationType
+     * @return
+     * @throws FalconException
+     */
+    private SYNCDATASET createDataSet(Feed feed, Cluster cluster, Storage storage,
+                                      String datasetName, LocationType locationType) throws FalconException {
+        SYNCDATASET syncdataset = new SYNCDATASET();
+        syncdataset.setName(datasetName);
+        syncdataset.setFrequency("${coord:" + feed.getFrequency().toString() + "}");
+
+        String uriTemplate = storage.getUriTemplate(locationType);
+        if (StringUtils.isBlank(uriTemplate)) {
+            return null;
+        }
+        if (storage.getType() == Storage.TYPE.TABLE) {
+            uriTemplate = uriTemplate.replace("thrift", "hcat"); // Oozie requires this!!!
+        }
+        syncdataset.setUriTemplate(uriTemplate);
+
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
+        Date initialInstance = FeedHelper.getImportInitalInstance(feedCluster);
+        syncdataset.setInitialInstance(SchemaHelper.formatDateUTC(initialInstance));
+        syncdataset.setTimezone(feed.getTimezone().getID());
+
+        if (StringUtils.isNotBlank(feed.getAvailabilityFlag())) {
+            syncdataset.setDoneFlag(feed.getAvailabilityFlag());
+        } else {
+            syncdataset.setDoneFlag("");
+        }
+
+        return syncdataset;
+    }
+
+    /**
+     * Initialize the coordinator with current data as start if the merge type is snapshot.
+     * Otherwise, use the feed cluster validate as the coordinator start date.
+     *
+     * @param coord
+     * @param feed
+     * @param cluster
+     */
+
+    private void initializeCoordAttributes(COORDINATORAPP coord, Feed feed, Cluster cluster) {
+        coord.setName(getEntityName());
+        // for feeds with snapshot layout, the start date will be the time of scheduling since it dumps whole table
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
+        Date initialInstance = FeedHelper.getImportInitalInstance(feedCluster);
+        coord.setStart(SchemaHelper.formatDateUTC(initialInstance));
+        coord.setEnd(SchemaHelper.formatDateUTC(feedCluster.getValidity().getEnd()));
+        coord.setTimezone(entity.getTimezone().getID());
+        coord.setFrequency("${coord:" + entity.getFrequency().toString() + "}");
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/FeedImportCoordinatorBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/FeedImportCoordinatorBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/FeedImportCoordinatorBuilder.java
index 70289d0..f391cc7 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/FeedImportCoordinatorBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/FeedImportCoordinatorBuilder.java
@@ -61,6 +61,7 @@ public class FeedImportCoordinatorBuilder extends OozieCoordinatorBuilder<Feed>
 
     @Override
     public List<Properties> buildCoords(Cluster cluster, Path buildPath) throws FalconException {
+        LOG.info("Generating Feed IMPORT coordinator.");
 
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster((Feed) entity, cluster.getName());
         if (!FeedHelper.isImportEnabled(feedCluster)) {

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/OozieCoordinatorBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/OozieCoordinatorBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/OozieCoordinatorBuilder.java
index b1db186..f555b64 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/OozieCoordinatorBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/OozieCoordinatorBuilder.java
@@ -78,6 +78,9 @@ public abstract class OozieCoordinatorBuilder<T extends Entity> extends OozieEnt
             case IMPORT:
                 return new FeedImportCoordinatorBuilder((Feed)entity);
 
+            case EXPORT:
+                return new FeedExportCoordinatorBuilder((Feed)entity);
+
             default:
                 throw new IllegalArgumentException("Unhandled type " + entity.getEntityType() + ", lifecycle " + tag);
             }

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
index 026f79f..e137e11 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/OozieOrchestrationWorkflowBuilder.java
@@ -143,6 +143,17 @@ public abstract class OozieOrchestrationWorkflowBuilder<T extends Entity> extend
                 }
                 break;
 
+            case EXPORT:
+                dsType = EntityUtil.getExportDatasourceType(cluster, feed);
+                if ((dsType == DatasourceType.MYSQL)
+                        || (dsType == DatasourceType.ORACLE)
+                        || (dsType == DatasourceType.HSQL)) {
+                    return new DatabaseExportWorkflowBuilder(feed);
+                } else {
+                    LOG.info("Export policy not implemented for DataSourceType : " + dsType);
+                }
+                break;
+
             default:
                 throw new IllegalArgumentException("Unhandled type " + entity.getEntityType()
                        + ", lifecycle " + lifecycle);

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/java/org/apache/falcon/oozie/feed/FeedBundleBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/feed/FeedBundleBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/feed/FeedBundleBuilder.java
index 9fbc5b2..1205d91 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/feed/FeedBundleBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/feed/FeedBundleBuilder.java
@@ -77,6 +77,11 @@ public class FeedBundleBuilder extends OozieBundleBuilder<Feed> {
             props.addAll(importProps);
         }
 
+        List<Properties> exportProps = OozieCoordinatorBuilder.get(entity, Tag.EXPORT).buildCoords(cluster, buildPath);
+        if (exportProps != null) {
+            props.addAll(exportProps);
+        }
+
         if (!props.isEmpty()) {
             copySharedLibs(cluster, new Path(getLibPath(buildPath)));
         }

http://git-wip-us.apache.org/repos/asf/falcon/blob/88bbe14c/oozie/src/main/resources/action/feed/export-sqoop-database-action.xml
----------------------------------------------------------------------
diff --git a/oozie/src/main/resources/action/feed/export-sqoop-database-action.xml b/oozie/src/main/resources/action/feed/export-sqoop-database-action.xml
new file mode 100644
index 0000000..0367edc
--- /dev/null
+++ b/oozie/src/main/resources/action/feed/export-sqoop-database-action.xml
@@ -0,0 +1,47 @@
+<!--
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * -->
+
+<action name="db-export-sqoop" xmlns='uri:oozie:workflow:0.3'>
+    <sqoop xmlns="uri:oozie:sqoop-action:0.3">
+        <job-tracker>${jobTracker}</job-tracker>
+        <name-node>${nameNode}</name-node>
+        <configuration>
+            <property>
+                <name>mapred.job.queue.name</name>
+                <value>${queueName}</value>
+            </property>
+            <property>
+                <name>oozie.launcher.mapred.job.priority</name>
+                <value>${jobPriority}</value>
+            </property>
+            <property>
+                <name>mapred.compress.map.output</name>
+                <value>true</value>
+            </property>
+            <!-- Assuming the connectors are in oozie share lib -->
+            <property>
+            <!-- Will enable using sharelib -->
+                <name>oozie.use.system.libpath</name>
+                <value>true</value>
+            </property>
+        </configuration>
+        <command>${sqoopCommand}</command>
+    </sqoop>
+    <ok to="end"/>
+    <error to="fail"/>
+</action>


Mime
View raw message