falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From b...@apache.org
Subject [2/2] falcon git commit: FALCON-1729 Database ingest to support password alias via keystore file ( Contributed by Venkatesan Ramachandran)
Date Thu, 04 Feb 2016 21:16:38 GMT
 FALCON-1729 Database ingest to support password alias via keystore file ( Contributed by Venkatesan Ramachandran)


Project: http://git-wip-us.apache.org/repos/asf/falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/falcon/commit/5abb1557
Tree: http://git-wip-us.apache.org/repos/asf/falcon/tree/5abb1557
Diff: http://git-wip-us.apache.org/repos/asf/falcon/diff/5abb1557

Branch: refs/heads/master
Commit: 5abb155754af391b57e328dc16f0ca8ee5b9a07a
Parents: 1840784
Author: bvellanki <bvellanki@hortonworks.com>
Authored: Thu Feb 4 13:16:29 2016 -0800
Committer: bvellanki <bvellanki@hortonworks.com>
Committed: Thu Feb 4 13:16:29 2016 -0800

----------------------------------------------------------------------
 CHANGES.txt                                     |   2 +
 client/src/main/resources/datasource-0.1.xsd    |  26 +++-
 .../apache/falcon/entity/DatasourceHelper.java  | 142 +++++++++++------
 .../entity/parser/DatasourceEntityParser.java   |  25 ++-
 .../security/CredentialProviderHelper.java      |  89 +++++++++++
 .../org/apache/falcon/util/HdfsClassLoader.java |   6 +-
 .../parser/DatasourceEntityParserTest.java      |  18 +++
 .../config/datasource/datasource-file-0.1.xml   |   6 +-
 .../config/datasource/datasource-file-0.2.xml   |  48 ++++++
 .../oozie/DatabaseExportWorkflowBuilder.java    |  62 +++-----
 .../oozie/DatabaseImportWorkflowBuilder.java    |  60 +++-----
 .../apache/falcon/oozie/ImportExportCommon.java |  73 +++++++++
 .../falcon/oozie/ImportWorkflowBuilder.java     |   1 +
 pom.xml                                         |  10 +-
 webapp/pom.xml                                  |   1 +
 .../apache/falcon/lifecycle/FeedImportIT.java   | 153 +++++++++++++++++--
 .../org/apache/falcon/resource/TestContext.java |   5 +-
 .../org/apache/falcon/util/HsqldbTestUtils.java |  81 +++++++---
 .../test/resources/credential_provider.jceks    | Bin 0 -> 504 bytes
 .../src/test/resources/datasource-template.xml  |  46 ------
 .../src/test/resources/datasource-template1.xml |  46 ++++++
 .../src/test/resources/datasource-template2.xml |  46 ++++++
 .../src/test/resources/datasource-template3.xml |  49 ++++++
 .../src/test/resources/datasource-template4.xml |  36 +++++
 webapp/src/test/resources/feed-template3.xml    |   2 +-
 webapp/src/test/resources/feed-template4.xml    |  59 +++++++
 26 files changed, 871 insertions(+), 221 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index d1456a4..afefbc8 100755
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -11,6 +11,8 @@ Trunk
     FALCON-1230 Data based notification Service to notify execution instances when data becomes available(Pavan Kumar Kolamuri via Ajay Yadava)
 
   IMPROVEMENTS
+    FALCON-1729 Database ingest to support password alias via keystore file (Venkatesan Ramachandran via Balu Vellanki)
+
     FALCON-1751 Support assembly:single mojo(Ruoyu Wang via Ajay Yadava)
 
     FALCON-763 Support feed listing for CatalogStorage (Balu Vellanki)

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/client/src/main/resources/datasource-0.1.xsd
----------------------------------------------------------------------
diff --git a/client/src/main/resources/datasource-0.1.xsd b/client/src/main/resources/datasource-0.1.xsd
index beb82cc..1202ba1 100644
--- a/client/src/main/resources/datasource-0.1.xsd
+++ b/client/src/main/resources/datasource-0.1.xsd
@@ -176,19 +176,38 @@
                         </xs:documentation>
                     </xs:annotation>
                 </xs:element>
-
                 <xs:element name="passwordText" type="xs:string">
-                <xs:annotation>
+                    <xs:annotation>
                         <xs:documentation>
                             Plain text password.
                         </xs:documentation>
                     </xs:annotation>
                 </xs:element>
+                <xs:element name="passwordAlias" type="passwordAliasType">
+                    <xs:annotation>
+                        <xs:documentation>
+                            Password alias using hadoop credential store.
+                        </xs:documentation>
+                    </xs:annotation>
+                </xs:element>
             </xs:choice>
         </xs:sequence>
         <xs:attribute name="type" type="credentialtype" use="required"/>
     </xs:complexType>
-
+    <xs:complexType name="passwordAliasType">
+        <xs:sequence minOccurs="1" maxOccurs="1">
+            <xs:element name="alias" type="xs:string">
+                <xs:annotation>
+                    <xs:documentation> Provide password alias. </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+            <xs:element name="providerPath" type="xs:string">
+                <xs:annotation>
+                    <xs:documentation>jceks provider HDFS file path </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+        </xs:sequence>
+    </xs:complexType>
     <xs:simpleType name="credentialtype">
         <xs:annotation>
             <xs:documentation>
@@ -198,6 +217,7 @@
         <xs:restriction base="xs:string">
             <xs:enumeration value="password-file" />
             <xs:enumeration value="password-text" />
+            <xs:enumeration value="password-alias" />
         </xs:restriction>
     </xs:simpleType>
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java b/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
index 1f1a193..51ce898 100644
--- a/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
+++ b/common/src/main/java/org/apache/falcon/entity/DatasourceHelper.java
@@ -19,8 +19,8 @@
 package org.apache.falcon.entity;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.falcon.FalconException;
-import org.apache.falcon.Pair;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.datasource.Credential;
@@ -30,15 +30,22 @@ import org.apache.falcon.entity.v0.datasource.DatasourceType;
 import org.apache.falcon.entity.v0.datasource.Interface;
 import org.apache.falcon.entity.v0.datasource.Interfaces;
 import org.apache.falcon.entity.v0.datasource.Interfacetype;
+import org.apache.falcon.entity.v0.datasource.PasswordAliasType;
+import org.apache.falcon.security.CurrentUser;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.falcon.hadoop.HadoopClientFactory;
+import org.apache.falcon.security.CredentialProviderHelper;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.StringWriter;
+import java.net.URI;
+import java.security.PrivilegedExceptionAction;
 
 /**
  * DataSource entity helper methods.
@@ -46,6 +53,8 @@ import java.io.StringWriter;
 
 public final class DatasourceHelper {
 
+    public static final String HADOOP_CREDENTIAL_PROVIDER_FILEPATH = "hadoop.security.credential.provider.path";
+
     private static final Logger LOG = LoggerFactory.getLogger(DatasourceHelper.class);
 
     private static final ConfigurationStore STORE = ConfigurationStore.get();
@@ -64,18 +73,32 @@ public final class DatasourceHelper {
         return getInterface(datasource, Interfacetype.READONLY);
     }
 
+    public static String getWriteEndpoint(Datasource datasource) {
+        return getInterface(datasource, Interfacetype.WRITE);
+    }
+
     /**
      * Returns user name and password pair as it is specified in the XML. If the credential type is
      * password-file, the path name is returned.
      *
      * @param db
-     * @return user name and password pair
+     * @return Credential
      * @throws FalconException
      */
-    public static Pair<String, String> getReadPasswordInfo(Datasource db) throws FalconException {
+
+    public static Credential getReadPasswordInfo(Datasource db) throws FalconException {
         for (Interface ifs : db.getInterfaces().getInterfaces()) {
             if ((ifs.getType() == Interfacetype.READONLY) && (ifs.getCredential() != null)) {
-                return getPasswordInfo(ifs.getCredential());
+                return ifs.getCredential();
+            }
+        }
+        return getDefaultPasswordInfo(db.getInterfaces());
+    }
+
+    public static Credential getWritePasswordInfo(Datasource db) throws FalconException {
+        for (Interface ifs : db.getInterfaces().getInterfaces()) {
+            if ((ifs.getType() == Interfacetype.WRITE) && (ifs.getCredential() != null)) {
+                return ifs.getCredential();
             }
         }
         return getDefaultPasswordInfo(db.getInterfaces());
@@ -91,32 +114,37 @@ public final class DatasourceHelper {
      * @throws FalconException
      */
     public static java.util.Properties fetchReadPasswordInfo(Datasource db) throws FalconException {
-        Pair<String, String> passwdInfo = getReadPasswordInfo(db);
+        Credential cred = getReadPasswordInfo(db);
+        return fetchPasswordInfo(cred);
+    }
+
+    public static java.util.Properties fetchWritePasswordInfo(Datasource db) throws FalconException {
+        Credential cred = getWritePasswordInfo(db);
+        return fetchPasswordInfo(cred);
+    }
+
+    public static java.util.Properties fetchPasswordInfo(Credential cred) throws FalconException {
         java.util.Properties p = new java.util.Properties();
-        p.put("user", passwdInfo.first);
-        p.put("password", passwdInfo.second);
-        if (getReadPasswordType(db) == Credentialtype.PASSWORD_FILE) {
-            String actualPasswd = readPasswordInfoFromFile(passwdInfo.second);
+        p.put("user", cred.getUserName());
+        if (cred.getType() == Credentialtype.PASSWORD_TEXT) {
+            p.put("password", cred.getPasswordText());
+        } else if (cred.getType() == Credentialtype.PASSWORD_FILE) {
+            String actualPasswd = fetchPasswordInfoFromFile(cred.getPasswordFile());
+            p.put("password", actualPasswd);
+        } else if (cred.getType() == Credentialtype.PASSWORD_ALIAS) {
+            String actualPasswd = fetchPasswordInfoFromCredentialStore(cred.getPasswordAlias());
             p.put("password", actualPasswd);
         }
         return p;
     }
 
-    /**
-     * Given Datasource, return the read-only credential type. If read-only credential is missing,
-     * use interface's default credential.
-     *
-     * @param db
-     * @return Credentialtype
-     * @throws FalconException
-     */
-    public static Credentialtype getReadPasswordType(Datasource db) throws FalconException {
-        for (Interface ifs : db.getInterfaces().getInterfaces()) {
-            if ((ifs.getType() == Interfacetype.READONLY) && (ifs.getCredential() != null)) {
-                return getPasswordType(ifs.getCredential());
-            }
-        }
-        return getDefaultPasswordType(db.getInterfaces());
+    public static String buildJceksProviderPath(URI credURI) {
+        StringBuilder sb = new StringBuilder();
+        final String credProviderPath = sb.append("jceks:").append("//")
+                .append(credURI.getScheme()).append("@")
+                .append(credURI.getHost())
+                .append(credURI.getPath()).toString();
+        return credProviderPath;
     }
 
     /**
@@ -134,39 +162,61 @@ public final class DatasourceHelper {
         }
         return null;
     }
-    private static Credentialtype getPasswordType(Credential c) {
-        return c.getType();
-    }
 
-    private static Credentialtype getDefaultPasswordType(Interfaces ifs) throws FalconException {
+    private static Credential getDefaultPasswordInfo(Interfaces ifs) throws FalconException {
 
         if (ifs.getCredential() != null) {
-            return ifs.getCredential().getType();
+            return ifs.getCredential();
         } else {
             throw new FalconException("Missing Interfaces default credential");
         }
     }
 
-    private static Pair<String, String> getDefaultPasswordInfo(Interfaces ifs) throws FalconException {
-
-        if (ifs.getCredential() != null) {
-            return getPasswordInfo(ifs.getCredential());
-        } else {
-            throw new FalconException("Missing Interfaces default credential");
-        }
-    }
+    private static String fetchPasswordInfoFromCredentialStore(final PasswordAliasType c) throws FalconException {
+        try {
+            final String credPath = c.getProviderPath();
+            final URI credURI = new URI(credPath);
+            if (StringUtils.isBlank(credURI.getScheme())
+                || StringUtils.isBlank(credURI.getHost())
+                || StringUtils.isBlank(credURI.getPath())) {
+                throw new FalconException("Password alias jceks provider HDFS path is incorrect.");
+            }
+            final String alias = c.getAlias();
+            if (StringUtils.isBlank(alias)) {
+                throw new FalconException("Password alias is empty.");
+            }
 
-    private static Pair<String, String> getPasswordInfo(Credential c) throws FalconException {
-        String passwd = null;
-        if (c.getType() == Credentialtype.PASSWORD_FILE) {
-            passwd = c.getPasswordFile();
-        } else {
-            passwd = c.getPasswordText();
+            final String credProviderPath = buildJceksProviderPath(credURI);
+            LOG.info("Credential provider HDFS path : " + credProviderPath);
+
+            if (CredentialProviderHelper.isProviderAvailable()) {
+                UserGroupInformation ugi = CurrentUser.getProxyUGI();
+                String password = ugi.doAs(new PrivilegedExceptionAction<String>() {
+                    public String run() throws Exception {
+                        final Configuration conf = new Configuration();
+                        conf.set(HadoopClientFactory.FS_DEFAULT_NAME_KEY, credPath);
+                        conf.set(CredentialProviderHelper.CREDENTIAL_PROVIDER_PATH, credProviderPath);
+                        FileSystem fs = FileSystem.get(credURI, conf);
+                        if (!fs.exists(new Path(credPath))) {
+                            String msg = String.format("Credential provider hdfs path [%s] does not "
+                                   + "exist or access denied!", credPath);
+                            LOG.error(msg);
+                            throw new FalconException(msg);
+                        }
+                        return CredentialProviderHelper.resolveAlias(conf, alias);
+                    }
+                });
+                return password;
+            } else {
+                throw new FalconException("Credential Provider is not initialized");
+            }
+        } catch (Exception ioe) {
+            String msg = "Exception while trying to fetch credential alias";
+            LOG.error(msg, ioe);
+            throw new FalconException(msg, ioe);
         }
-        return new Pair<String, String>(c.getUserName(), passwd);
     }
-
-    private static String readPasswordInfoFromFile(String passwordFilePath) throws FalconException {
+    private static String fetchPasswordInfoFromFile(String passwordFilePath) throws FalconException {
         try {
             Path path = new Path(passwordFilePath);
             FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(path.toUri());

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/common/src/main/java/org/apache/falcon/entity/parser/DatasourceEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/DatasourceEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/DatasourceEntityParser.java
index e58b1e9..998f952 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/DatasourceEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/DatasourceEntityParser.java
@@ -49,29 +49,31 @@ public class DatasourceEntityParser extends EntityParser<Datasource> {
 
     @Override
     public void validate(Datasource db) throws FalconException {
-        ClassLoader previousClassLoader = Thread.currentThread().getContextClassLoader();
         try {
             ClassLoader hdfsClassLoader = HdfsClassLoader.load(db.getName(), db.getDriver().getJars());
-            Thread.currentThread().setContextClassLoader(hdfsClassLoader);
             validateInterface(db, Interfacetype.READONLY, hdfsClassLoader);
             validateInterface(db, Interfacetype.WRITE, hdfsClassLoader);
             validateACL(db);
         } catch(IOException io) {
             throw new ValidationException("Unable to copy driver jars to local dir: "
                     + Arrays.toString(db.getDriver().getJars().toArray()));
-        } finally {
-            Thread.currentThread().setContextClassLoader(previousClassLoader);
         }
     }
 
     private static void validateInterface(Datasource db, Interfacetype interfacetype, ClassLoader hdfsClassLoader)
         throws ValidationException {
         String endpoint = null;
+        Properties userPasswdInfo = null;
         try {
-            endpoint = DatasourceHelper.getReadOnlyEndpoint(db);
+            if (interfacetype == Interfacetype.READONLY) {
+                endpoint = DatasourceHelper.getReadOnlyEndpoint(db);
+                userPasswdInfo = DatasourceHelper.fetchReadPasswordInfo(db);
+            } else if (interfacetype == Interfacetype.WRITE) {
+                endpoint = DatasourceHelper.getWriteEndpoint(db);
+                userPasswdInfo = DatasourceHelper.fetchWritePasswordInfo(db);
+            }
             if (StringUtils.isNotBlank(endpoint)) {
-                LOG.info("Validating {0} endpoint {1} connection.", interfacetype.value(), endpoint);
-                Properties userPasswdInfo = DatasourceHelper.fetchReadPasswordInfo(db);
+                LOG.info("Validating {} endpoint {} connection.", interfacetype.value(), endpoint);
                 validateConnection(hdfsClassLoader, db.getDriver().getClazz(), endpoint, userPasswdInfo);
             }
         } catch(FalconException fe) {
@@ -85,9 +87,13 @@ public class DatasourceEntityParser extends EntityParser<Datasource> {
     private static void validateConnection(ClassLoader hdfsClassLoader, String driverClass,
                                     String connectUrl, Properties userPasswdInfo)
         throws FalconException {
+        ClassLoader previousClassLoader = Thread.currentThread().getContextClassLoader();
+        LOG.info("Preserving current classloader: {}", previousClassLoader.toString());
         try {
+            Thread.currentThread().setContextClassLoader(hdfsClassLoader);
+            LOG.info("Setting context classloader to : {}", hdfsClassLoader.toString());
             java.sql.Driver driver = (java.sql.Driver) hdfsClassLoader.loadClass(driverClass).newInstance();
-            LOG.info("Validating connection URL: {0} using driver: {1}", connectUrl, driver.getClass().toString());
+            LOG.info("Validating connection URL: {} using driver: {}", connectUrl, driver.getClass().toString());
             Connection con = driver.connect(connectUrl, userPasswdInfo);
             if (con == null) {
                 throw new FalconException("DriverManager.getConnection() return "
@@ -96,6 +102,9 @@ public class DatasourceEntityParser extends EntityParser<Datasource> {
         } catch (Exception ex) {
             LOG.error("Exception while validating connection : ", ex);
             throw new FalconException(ex);
+        } finally {
+            Thread.currentThread().setContextClassLoader(previousClassLoader);
+            LOG.info("Restoring original classloader {}", previousClassLoader.toString());
         }
     }
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/common/src/main/java/org/apache/falcon/security/CredentialProviderHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/CredentialProviderHelper.java b/common/src/main/java/org/apache/falcon/security/CredentialProviderHelper.java
new file mode 100644
index 0000000..fc4f745
--- /dev/null
+++ b/common/src/main/java/org/apache/falcon/security/CredentialProviderHelper.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.security;
+
+import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+/**
+ * Helper class for Hadoop credential provider functionality. Reflection to used to avoid
+ * directly referencing the classes and methods so that version dependency is not introduced
+ * as the Hadoop credential provider is only introduced in 2.6.0 and later.
+ */
+
+public final class CredentialProviderHelper {
+
+    private static final Logger LOG = LoggerFactory.getLogger(CredentialProviderHelper.class);
+
+    private static Class<?> clsCredProvider;
+    private static Class<?> clsCredProviderFactory;
+    private static Method methGetPassword;
+    private static Method methCreateCredEntry;
+    private static Method methFlush;
+    private static Method methGetProviders;
+
+    public static final String CREDENTIAL_PROVIDER_PATH = "hadoop.security.credential.provider.path";
+
+    static {
+        try {
+            LOG.debug("Reflecting credential provider classes and methods");
+            clsCredProvider = Class.forName("org.apache.hadoop.security.alias.CredentialProvider");
+            clsCredProviderFactory = Class.forName("org.apache.hadoop.security.alias.CredentialProviderFactory");
+            methCreateCredEntry = clsCredProvider.getMethod("createCredentialEntry", String.class, char[].class);
+            methFlush = clsCredProvider.getMethod("flush");
+            methGetPassword = Configuration.class.getMethod("getPassword", String.class);
+            methGetProviders = clsCredProviderFactory.getMethod("getProviders", new Class[] { Configuration.class });
+            LOG.debug("Found CredentialProviderFactory#getProviders");
+        } catch (ClassNotFoundException | NoSuchMethodException cnfe) {
+            LOG.debug("Ignoring exception", cnfe);
+        }
+    }
+
+    private CredentialProviderHelper() {
+
+    }
+
+    public static boolean isProviderAvailable() {
+        return !(clsCredProvider == null
+                || clsCredProviderFactory == null
+                || methCreateCredEntry == null
+                || methGetPassword == null
+                || methFlush == null);
+    }
+
+    public static String resolveAlias(Configuration conf, String alias) throws IOException {
+        try {
+            char[] cred = (char[]) methGetPassword.invoke(conf, alias);
+            if (cred == null) {
+                throw new IOException("The provided alias cannot be resolved");
+            }
+            return new String(cred);
+        } catch (InvocationTargetException ite) {
+            throw new RuntimeException("Error resolving password "
+                    + " from the credential providers ", ite.getTargetException());
+        } catch (IllegalAccessException iae) {
+            throw new RuntimeException("Error invoking the credential provider method", iae);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/common/src/main/java/org/apache/falcon/util/HdfsClassLoader.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/util/HdfsClassLoader.java b/common/src/main/java/org/apache/falcon/util/HdfsClassLoader.java
index 786ffea..bacc092 100644
--- a/common/src/main/java/org/apache/falcon/util/HdfsClassLoader.java
+++ b/common/src/main/java/org/apache/falcon/util/HdfsClassLoader.java
@@ -54,11 +54,9 @@ public class HdfsClassLoader extends URLClassLoader {
         }
 
         synchronized (LOCK) {
-            LOG.info("Copying jar files from HDFS to local dir");
             final URL[] urls = copyHdfsJarFilesToTempDir(name, jarHdfsPath);
+            LOG.info("Copied jar files from HDFS to local dir");
             final ClassLoader parentClassLoader = HdfsClassLoader.class.getClassLoader();
-            LOG.info("Creating a new HdfsClassLoader for name = {0} with parent = {1} using classpath = {2}",
-                    name, parentClassLoader.toString(),  Arrays.toString(jarHdfsPath.toArray()));
             HdfsClassLoader hdfsClassLoader = java.security.AccessController.doPrivileged(
                     new java.security.PrivilegedAction<HdfsClassLoader>() {
                         @Override
@@ -67,6 +65,8 @@ public class HdfsClassLoader extends URLClassLoader {
                         }
                     }
             );
+            LOG.info("Created a new HdfsClassLoader for name = {} with parent = {} using classpath = {}",
+                    name, parentClassLoader.toString(),  Arrays.toString(jarHdfsPath.toArray()));
             classLoaderCache.put(name, hdfsClassLoader);
             return hdfsClassLoader;
         }

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/common/src/test/java/org/apache/falcon/entity/parser/DatasourceEntityParserTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/parser/DatasourceEntityParserTest.java b/common/src/test/java/org/apache/falcon/entity/parser/DatasourceEntityParserTest.java
index 9567eab..3893917 100644
--- a/common/src/test/java/org/apache/falcon/entity/parser/DatasourceEntityParserTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/parser/DatasourceEntityParserTest.java
@@ -18,12 +18,16 @@
 
 package org.apache.falcon.entity.parser;
 
+import org.apache.falcon.cluster.util.EmbeddedCluster;
 import org.apache.falcon.entity.AbstractTestBase;
 import org.apache.falcon.entity.EntityUtil;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.datasource.Datasource;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
 import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.Test;
 
@@ -34,11 +38,25 @@ import java.io.InputStream;
  */
 public class DatasourceEntityParserTest extends AbstractTestBase {
 
+    private EmbeddedCluster cluster;
+    private String hdfsUrl;
+
     private final DatasourceEntityParser datasourceEntityParser =
             (DatasourceEntityParser) EntityParserFactory.getParser(EntityType.DATASOURCE);
     private final FeedEntityParser feedEntityParser =
             (FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
 
+    @BeforeClass
+    public void start() throws Exception {
+        cluster = EmbeddedCluster.newCluster("test");
+        hdfsUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
+    }
+
+    @AfterClass
+    public void close() throws Exception {
+        cluster.shutdown();
+    }
+
     @BeforeMethod
     public void setup() throws Exception {
         cleanupStore();

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/common/src/test/resources/config/datasource/datasource-file-0.1.xml
----------------------------------------------------------------------
diff --git a/common/src/test/resources/config/datasource/datasource-file-0.1.xml b/common/src/test/resources/config/datasource/datasource-file-0.1.xml
index 3ee40ed..76bf3c3 100644
--- a/common/src/test/resources/config/datasource/datasource-file-0.1.xml
+++ b/common/src/test/resources/config/datasource/datasource-file-0.1.xml
@@ -22,20 +22,20 @@
         <interface type="readonly" endpoint="jdbc:hsqldb:localhost/db1">
             <credential type="password-file">
                 <userName>SA</userName>
-                <passwordFile>"jail://global:00/falcon/passwordfile"/></passwordFile>
+                <passwordFile>/falcon/passwordfile</passwordFile>
             </credential>
         </interface>
 
         <interface type="write" endpoint="jdbc:hsqldb:localhost/db1">
             <credential type="password-file">
                 <userName>SA</userName>
-                <passwordFile>"jail://global:00/falcon/passwordfile"/></passwordFile>
+                <passwordFile>/falcon/passwordfile</passwordFile>
             </credential>
         </interface>
 
         <credential type="password-file">
             <userName>SA</userName>
-            <passwordFile>"jail://global:00/falcon/passwordfile"/></passwordFile>
+            <passwordFile>/falcon/passwordfile</passwordFile>
         </credential>
     </interfaces>
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/common/src/test/resources/config/datasource/datasource-file-0.2.xml
----------------------------------------------------------------------
diff --git a/common/src/test/resources/config/datasource/datasource-file-0.2.xml b/common/src/test/resources/config/datasource/datasource-file-0.2.xml
new file mode 100644
index 0000000..3ee40ed
--- /dev/null
+++ b/common/src/test/resources/config/datasource/datasource-file-0.2.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+<datasource colo="west-coast" description="HSQL database on west coast" type="hsql" name="test-hsql-db" xmlns="uri:falcon:datasource:0.1">
+    <tags>owner=foobar@ambari.apache.org, consumer=phoe@ambari.apache.org</tags>
+    <interfaces>
+        <interface type="readonly" endpoint="jdbc:hsqldb:localhost/db1">
+            <credential type="password-file">
+                <userName>SA</userName>
+                <passwordFile>"jail://global:00/falcon/passwordfile"/></passwordFile>
+            </credential>
+        </interface>
+
+        <interface type="write" endpoint="jdbc:hsqldb:localhost/db1">
+            <credential type="password-file">
+                <userName>SA</userName>
+                <passwordFile>"jail://global:00/falcon/passwordfile"/></passwordFile>
+            </credential>
+        </interface>
+
+        <credential type="password-file">
+            <userName>SA</userName>
+            <passwordFile>"jail://global:00/falcon/passwordfile"/></passwordFile>
+        </credential>
+    </interfaces>
+
+    <driver>
+       <clazz>org.hsqldb.jdbcDriver</clazz>
+       <jar>/user/oozie/share/lib/lib_20150721010816/sqoop/hsqldb-1.8.0.7.jar</jar>
+    </driver>
+
+    <ACL owner="testuser" group="group" permission="0x755"/>
+</datasource>

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java
index f1fb337..d69611b 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseExportWorkflowBuilder.java
@@ -19,12 +19,10 @@
 package org.apache.falcon.oozie;
 
 import org.apache.falcon.FalconException;
-import org.apache.falcon.Pair;
 import org.apache.falcon.Tag;
 import org.apache.falcon.entity.DatasourceHelper;
 import org.apache.falcon.entity.FeedHelper;
 import org.apache.falcon.entity.v0.cluster.Cluster;
-import org.apache.falcon.entity.v0.datasource.Credentialtype;
 import org.apache.falcon.entity.v0.datasource.Datasource;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.LoadMethod;
@@ -43,8 +41,6 @@ public class DatabaseExportWorkflowBuilder extends ExportWorkflowBuilder {
     protected static final String EXPORT_SQOOP_ACTION_TEMPLATE = "/action/feed/export-sqoop-database-action.xml";
     protected static final String EXPORT_ACTION_NAME="db-export-sqoop";
 
-    private static final String ARG_SEPARATOR = " ";
-
     public DatabaseExportWorkflowBuilder(Feed entity) { super(entity); }
 
     @Override
@@ -85,61 +81,50 @@ public class DatabaseExportWorkflowBuilder extends ExportWorkflowBuilder {
 
     private String buildSqoopCommand(Cluster cluster, Feed feed) throws FalconException {
         Map<String, String> extraArgs = getArguments(cluster);
-        StringBuilder sqoopCmd = new StringBuilder();
-        sqoopCmd.append("export").append(ARG_SEPARATOR);
-        //buildDriverArgs(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildConnectArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildTableArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildUserPasswordArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildNumMappers(sqoopCmd, extraArgs).append(ARG_SEPARATOR);
-        buildArguments(sqoopCmd, extraArgs).append(ARG_SEPARATOR);
-        buildLoadType(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildExportDirArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        return sqoopCmd.toString();
+        StringBuilder sqoopArgs = new StringBuilder();
+        StringBuilder sqoopOptions = new StringBuilder();
+
+        buildConnectArg(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+        buildTableArg(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+        ImportExportCommon.buildUserPasswordArg(sqoopArgs, sqoopOptions, cluster, entity)
+                .append(ImportExportCommon.ARG_SEPARATOR);
+        buildNumMappers(sqoopArgs, extraArgs).append(ImportExportCommon.ARG_SEPARATOR);
+        buildArguments(sqoopArgs, extraArgs).append(ImportExportCommon.ARG_SEPARATOR);
+        buildLoadType(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+        buildExportDirArg(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+
+        StringBuffer sqoopCmd = new StringBuffer();
+        return sqoopCmd.append("export").append(ImportExportCommon.ARG_SEPARATOR)
+                .append(sqoopOptions).append(ImportExportCommon.ARG_SEPARATOR)
+                .append(sqoopArgs).toString();
     }
 
     private StringBuilder buildDriverArgs(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
         Datasource db = DatasourceHelper.getDatasource(FeedHelper.getExportDatasourceName(feedCluster));
         if ((db.getDriver() != null) && (db.getDriver().getClazz() != null)) {
-            builder.append("--driver").append(ARG_SEPARATOR).append(db.getDriver().getClazz());
+            builder.append("--driver").append(ImportExportCommon.ARG_SEPARATOR).append(db.getDriver().getClazz());
         }
         return builder;
     }
 
     private StringBuilder buildConnectArg(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        return builder.append("--connect").append(ARG_SEPARATOR)
+        return builder.append("--connect").append(ImportExportCommon.ARG_SEPARATOR)
                 .append(DatasourceHelper.getReadOnlyEndpoint(
                         DatasourceHelper.getDatasource(FeedHelper.getExportDatasourceName(feedCluster))));
     }
 
     private StringBuilder buildTableArg(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        return builder.append("--table").append(ARG_SEPARATOR)
+        return builder.append("--table").append(ImportExportCommon.ARG_SEPARATOR)
                 .append(FeedHelper.getExportDataSourceTableName(feedCluster));
     }
 
-    private StringBuilder buildUserPasswordArg(StringBuilder builder, Cluster cluster) throws FalconException {
-        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        Datasource db = DatasourceHelper.getDatasource(FeedHelper.getExportDatasourceName(feedCluster));
-        Pair<String, String> userPasswdInfo = DatasourceHelper.getReadPasswordInfo(db);
-        builder.append("--username").append(ARG_SEPARATOR)
-                .append(userPasswdInfo.first)
-                .append(ARG_SEPARATOR);
-        if (DatasourceHelper.getReadPasswordType(db) == Credentialtype.PASSWORD_FILE) {
-            builder.append("--password-file");
-        } else {
-            builder.append("--password");
-        }
-        builder.append(ARG_SEPARATOR).append(userPasswdInfo.second);
-        return builder;
-    }
-
     private StringBuilder buildLoadType(StringBuilder builder, Cluster cluster)
         throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        builder.append("--update-mode").append(ARG_SEPARATOR);
+        builder.append("--update-mode").append(ImportExportCommon.ARG_SEPARATOR);
         String modeType = LoadMethod.UPDATEONLY.value();
         if (FeedHelper.getExportLoadMethod(feedCluster).getType() != null) {
             modeType = FeedHelper.getExportLoadMethod(feedCluster).getType().value();
@@ -149,7 +134,7 @@ public class DatabaseExportWorkflowBuilder extends ExportWorkflowBuilder {
 
     private StringBuilder buildExportDirArg(StringBuilder builder, Cluster cluster)
         throws FalconException {
-        return builder.append("--export-dir").append(ARG_SEPARATOR)
+        return builder.append("--export-dir").append(ImportExportCommon.ARG_SEPARATOR)
                 .append(String.format("${coord:dataIn('%s')}",
                         FeedExportCoordinatorBuilder.EXPORT_DATAIN_NAME));
     }
@@ -157,7 +142,8 @@ public class DatabaseExportWorkflowBuilder extends ExportWorkflowBuilder {
     private StringBuilder buildArguments(StringBuilder builder, Map<String, String> extraArgs)
         throws FalconException {
         for(Map.Entry<String, String> e : extraArgs.entrySet()) {
-            builder.append(e.getKey()).append(ARG_SEPARATOR).append(e.getValue()).append(ARG_SEPARATOR);
+            builder.append(e.getKey()).append(ImportExportCommon.ARG_SEPARATOR).append(e.getValue())
+                    .append(ImportExportCommon.ARG_SEPARATOR);
         }
         return builder;
     }
@@ -174,7 +160,7 @@ public class DatabaseExportWorkflowBuilder extends ExportWorkflowBuilder {
 
     private StringBuilder buildNumMappers(StringBuilder builder, Map<String, String> extraArgs) {
         if (!extraArgs.containsKey("--num-mappers")) {
-            builder.append("--num-mappers").append(ARG_SEPARATOR).append(1);
+            builder.append("--num-mappers").append(ImportExportCommon.ARG_SEPARATOR).append(1);
         }
         return builder;
     }

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
index 19fa931..66bfa9b 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/DatabaseImportWorkflowBuilder.java
@@ -19,12 +19,10 @@
 package org.apache.falcon.oozie;
 
 import org.apache.falcon.FalconException;
-import org.apache.falcon.Pair;
 import org.apache.falcon.Tag;
 import org.apache.falcon.entity.DatasourceHelper;
 import org.apache.falcon.entity.FeedHelper;
 import org.apache.falcon.entity.v0.cluster.Cluster;
-import org.apache.falcon.entity.v0.datasource.Credentialtype;
 import org.apache.falcon.entity.v0.datasource.Datasource;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.oozie.workflow.ACTION;
@@ -42,8 +40,6 @@ public class DatabaseImportWorkflowBuilder extends ImportWorkflowBuilder {
     protected static final String IMPORT_SQOOP_ACTION_TEMPLATE = "/action/feed/import-sqoop-database-action.xml";
     protected static final String IMPORT_ACTION_NAME="db-import-sqoop";
 
-    private static final String ARG_SEPARATOR = " ";
-
     public DatabaseImportWorkflowBuilder(Feed entity) { super(entity); }
 
     @Override
@@ -85,60 +81,49 @@ public class DatabaseImportWorkflowBuilder extends ImportWorkflowBuilder {
 
     private String buildSqoopCommand(Cluster cluster, Feed feed) throws FalconException {
         Map<String, String> extraArgs = getArguments(cluster);
-        StringBuilder sqoopCmd = new StringBuilder();
-        sqoopCmd.append("import").append(ARG_SEPARATOR);
-        buildDriverArgs(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildConnectArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildTableArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildUserPasswordArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        buildNumMappers(sqoopCmd, extraArgs).append(ARG_SEPARATOR);
-        buildArguments(sqoopCmd, extraArgs).append(ARG_SEPARATOR);
-        buildTargetDirArg(sqoopCmd, cluster).append(ARG_SEPARATOR);
-        return sqoopCmd.toString();
+        StringBuilder sqoopArgs = new StringBuilder();
+        StringBuilder sqoopOptions = new StringBuilder();
+        buildDriverArgs(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+        buildConnectArg(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+        buildTableArg(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+        ImportExportCommon.buildUserPasswordArg(sqoopArgs, sqoopOptions, cluster, entity)
+                .append(ImportExportCommon.ARG_SEPARATOR);
+        buildNumMappers(sqoopArgs, extraArgs).append(ImportExportCommon.ARG_SEPARATOR);
+        buildArguments(sqoopArgs, extraArgs).append(ImportExportCommon.ARG_SEPARATOR);
+        buildTargetDirArg(sqoopArgs, cluster).append(ImportExportCommon.ARG_SEPARATOR);
+
+        StringBuffer sqoopCmd = new StringBuffer();
+        return sqoopCmd.append("import").append(ImportExportCommon.ARG_SEPARATOR)
+                .append(sqoopOptions).append(ImportExportCommon.ARG_SEPARATOR)
+                .append(sqoopArgs).toString();
     }
 
     private StringBuilder buildDriverArgs(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
         Datasource db = DatasourceHelper.getDatasource(FeedHelper.getImportDatasourceName(feedCluster));
         if ((db.getDriver() != null) && (db.getDriver().getClazz() != null)) {
-            builder.append("--driver").append(ARG_SEPARATOR).append(db.getDriver().getClazz());
+            builder.append("--driver").append(ImportExportCommon.ARG_SEPARATOR).append(db.getDriver().getClazz());
         }
         return builder;
     }
 
     private StringBuilder buildConnectArg(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        return builder.append("--connect").append(ARG_SEPARATOR)
+        return builder.append("--connect").append(ImportExportCommon.ARG_SEPARATOR)
                 .append(DatasourceHelper.getReadOnlyEndpoint(
                         DatasourceHelper.getDatasource(FeedHelper.getImportDatasourceName(feedCluster))));
     }
 
     private StringBuilder buildTableArg(StringBuilder builder, Cluster cluster) throws FalconException {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        return builder.append("--table").append(ARG_SEPARATOR)
+        return builder.append("--table").append(ImportExportCommon.ARG_SEPARATOR)
                                     .append(FeedHelper.getImportDataSourceTableName(feedCluster));
     }
 
-    private StringBuilder buildUserPasswordArg(StringBuilder builder, Cluster cluster) throws FalconException {
-        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
-        Datasource db = DatasourceHelper.getDatasource(FeedHelper.getImportDatasourceName(feedCluster));
-        Pair<String, String> userPasswdInfo = DatasourceHelper.getReadPasswordInfo(db);
-        builder.append("--username").append(ARG_SEPARATOR)
-                .append(userPasswdInfo.first)
-                .append(ARG_SEPARATOR);
-        if (DatasourceHelper.getReadPasswordType(db) == Credentialtype.PASSWORD_FILE) {
-            builder.append("--password-file");
-        } else {
-            builder.append("--password");
-        }
-        builder.append(ARG_SEPARATOR).append(userPasswdInfo.second);
-        return builder;
-    }
-
     private StringBuilder buildTargetDirArg(StringBuilder builder, Cluster cluster)
         throws FalconException {
-        return builder.append("--delete-target-dir").append(ARG_SEPARATOR)
-                .append("--target-dir").append(ARG_SEPARATOR)
+        return builder.append("--delete-target-dir").append(ImportExportCommon.ARG_SEPARATOR)
+                .append("--target-dir").append(ImportExportCommon.ARG_SEPARATOR)
                 .append(String.format("${coord:dataOut('%s')}",
                         FeedImportCoordinatorBuilder.IMPORT_DATAOUT_NAME));
     }
@@ -146,7 +131,8 @@ public class DatabaseImportWorkflowBuilder extends ImportWorkflowBuilder {
     private StringBuilder buildArguments(StringBuilder builder, Map<String, String> extraArgs)
         throws FalconException {
         for(Map.Entry<String, String> e : extraArgs.entrySet()) {
-            builder.append(e.getKey()).append(ARG_SEPARATOR).append(e.getValue()).append(ARG_SEPARATOR);
+            builder.append(e.getKey()).append(ImportExportCommon.ARG_SEPARATOR).append(e.getValue())
+                    .append(ImportExportCommon.ARG_SEPARATOR);
         }
         return builder;
     }
@@ -163,7 +149,7 @@ public class DatabaseImportWorkflowBuilder extends ImportWorkflowBuilder {
 
     private StringBuilder buildNumMappers(StringBuilder builder, Map<String, String> extraArgs) {
         if (!extraArgs.containsKey("--num-mappers")) {
-            builder.append("--num-mappers").append(ARG_SEPARATOR).append(1);
+            builder.append("--num-mappers").append(ImportExportCommon.ARG_SEPARATOR).append(1);
         }
         return builder;
     }

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/oozie/src/main/java/org/apache/falcon/oozie/ImportExportCommon.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/ImportExportCommon.java b/oozie/src/main/java/org/apache/falcon/oozie/ImportExportCommon.java
new file mode 100644
index 0000000..19b567c
--- /dev/null
+++ b/oozie/src/main/java/org/apache/falcon/oozie/ImportExportCommon.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.oozie;
+
+import org.apache.falcon.FalconException;
+import org.apache.falcon.entity.DatasourceHelper;
+import org.apache.falcon.entity.FeedHelper;
+import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.entity.v0.datasource.Credential;
+import org.apache.falcon.entity.v0.datasource.Credentialtype;
+import org.apache.falcon.entity.v0.datasource.Datasource;
+import org.apache.falcon.entity.v0.feed.Feed;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Helper class that implements common functions across Import and Export.
+ */
+
+public final class ImportExportCommon {
+
+    static final String ARG_SEPARATOR = " ";
+
+    private ImportExportCommon() {
+    }
+
+    static StringBuilder buildUserPasswordArg(StringBuilder builder, StringBuilder sqoopOpts,
+                                                 Cluster cluster, Feed entity) throws FalconException {
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
+        Datasource db = DatasourceHelper.getDatasource(FeedHelper.getImportDatasourceName(feedCluster));
+        Credential cred = DatasourceHelper.getReadPasswordInfo(db);
+        builder.append("--username").append(ARG_SEPARATOR)
+                .append(cred.getUserName())
+                .append(ARG_SEPARATOR);
+        if (cred.getType() == Credentialtype.PASSWORD_FILE) {
+            builder.append("--password-file");
+            builder.append(ARG_SEPARATOR).append(cred.getPasswordFile());
+        } else if (cred.getType() == Credentialtype.PASSWORD_ALIAS) {
+            try {
+                sqoopOpts.append("-D")
+                        .append(DatasourceHelper.HADOOP_CREDENTIAL_PROVIDER_FILEPATH)
+                        .append("=")
+                        .append(DatasourceHelper
+                                .buildJceksProviderPath(new URI(cred.getPasswordAlias().getProviderPath())));
+            } catch(URISyntaxException uriEx) {
+                throw new FalconException(uriEx);
+            }
+            builder.append("--password-alias");
+            builder.append(ARG_SEPARATOR).append(cred.getPasswordAlias().getAlias());
+        } else {
+            builder.append("--password");
+            builder.append(ARG_SEPARATOR).append(cred.getPasswordText());
+        }
+        return builder;
+    }
+}

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/oozie/src/main/java/org/apache/falcon/oozie/ImportWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/oozie/ImportWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/oozie/ImportWorkflowBuilder.java
index 4892ecb..cae8497 100644
--- a/oozie/src/main/java/org/apache/falcon/oozie/ImportWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/oozie/ImportWorkflowBuilder.java
@@ -39,6 +39,7 @@ import java.util.Properties;
 public abstract class ImportWorkflowBuilder extends OozieOrchestrationWorkflowBuilder<Feed> {
 
     public ImportWorkflowBuilder(Feed feed) {
+
         super(feed, LifeCycle.IMPORT);
     }
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 12672bd..fbf5913 100644
--- a/pom.xml
+++ b/pom.xml
@@ -249,11 +249,12 @@
                         <version>${hadoop.version}</version>
                         <scope>provided</scope>
                     </dependency>
+
                     <dependency>
-                        <groupId>org.apache.hadoop</groupId>
-                        <artifactId>hadoop-aws</artifactId>
-                        <version>${hadoop.version}</version>
-                        <scope>provided</scope>
+                    	<groupId>org.apache.hadoop</groupId>
+                    	<artifactId>hadoop-aws</artifactId>
+                    	<version>${hadoop.version}</version>
+                    	<scope>provided</scope>
                     </dependency>
                 </dependencies>
           </dependencyManagement>
@@ -299,6 +300,7 @@
                                 <exclude>**/db1.log</exclude>
                                 <exclude>**/db1.properties</exclude>
                                 <exclude>**/db1.script</exclude>
+                                <exclude>**/credential_provider.jceks</exclude>
                             </excludes>
                         </configuration>
                         <executions>

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/pom.xml
----------------------------------------------------------------------
diff --git a/webapp/pom.xml b/webapp/pom.xml
index 7ecfbaf..3996966 100644
--- a/webapp/pom.xml
+++ b/webapp/pom.xml
@@ -606,6 +606,7 @@
                   <exclude>**/db1.log</exclude>
                   <exclude>**/db1.properties</exclude>
                   <exclude>**/db1.script</exclude>
+                  <exclude>**/credential_provider.jceks</exclude>
                 </excludes>
               </configuration>
       	    </plugin>	

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/java/org/apache/falcon/lifecycle/FeedImportIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/lifecycle/FeedImportIT.java b/webapp/src/test/java/org/apache/falcon/lifecycle/FeedImportIT.java
index b55d660..c34bcfc 100644
--- a/webapp/src/test/java/org/apache/falcon/lifecycle/FeedImportIT.java
+++ b/webapp/src/test/java/org/apache/falcon/lifecycle/FeedImportIT.java
@@ -20,15 +20,23 @@ package org.apache.falcon.lifecycle;
 
 import junit.framework.Assert;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.commons.io.IOUtils;
+import org.apache.falcon.cluster.util.EmbeddedCluster;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.resource.TestContext;
 import org.apache.falcon.util.HsqldbTestUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
 
 import java.io.File;
+import java.io.InputStream;
 import java.util.Map;
 
 /**
@@ -37,11 +45,14 @@ import java.util.Map;
 
 @Test
 public class FeedImportIT {
-    public static final Log LOG = LogFactory.getLog(HsqldbTestUtils.class.getName());
+    public static final Logger LOG =  LoggerFactory.getLogger(FeedImportIT.class);
+
+    private static final String DATASOURCE_NAME_KEY = "datasourcename";
 
     @BeforeClass
     public void setUp() throws Exception {
         HsqldbTestUtils.start();
+        HsqldbTestUtils.createSqoopUser("sqoop_user", "sqoop");
         HsqldbTestUtils.changeSAPassword("sqoop");
         HsqldbTestUtils.createAndPopulateCustomerTable();
 
@@ -70,14 +81,17 @@ public class FeedImportIT {
         LOG.info("entity -submit -type cluster -file " + filePath);
         Assert.assertEquals(TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);
 
-        filePath = TestContext.overlayParametersOverTemplate(TestContext.DATASOURCE_TEMPLATE, overlay);
-        LOG.info("entity -submit -type datasource -file " + filePath);
+        // Make a new datasource name into the overlay so that DATASOURCE_TEMPLATE1 and FEED_TEMPLATE3
+        // are populated  with the same datasource name
+        String dsName = "datasource-test-1";
+        overlay.put(DATASOURCE_NAME_KEY, dsName);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.DATASOURCE_TEMPLATE1, overlay);
+        LOG.info("Submit datatsource entity {} via entity -submit -type datasource -file {}", dsName, filePath);
         Assert.assertEquals(TestContext.executeWithURL("entity -submit -type datasource -file " + filePath), 0);
 
         filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE3, overlay);
-        LOG.info("entity -submitAndSchedule -type feed -file " + filePath);
-        Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file "
-                + filePath));
+        LOG.info("Submit feed with datasource {} via entity -submitAndSchedule -type feed -file {}", dsName, filePath);
+        Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
     }
 
     @Test
@@ -89,16 +103,127 @@ public class FeedImportIT {
         LOG.info("entity -submit -type cluster -file " + filePath);
         Assert.assertEquals(TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);
 
-        filePath = TestContext.overlayParametersOverTemplate(TestContext.DATASOURCE_TEMPLATE, overlay);
+        // Make a new datasource name into the overlay so that DATASOURCE_TEMPLATE1 and FEED_TEMPLATE3
+        // are populated  with the same datasource name
+        String dsName = "datasource-test-delete";
+        overlay.put(DATASOURCE_NAME_KEY, dsName);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.DATASOURCE_TEMPLATE1, overlay);
         LOG.info("entity -submit -type datasource -file " + filePath);
         Assert.assertEquals(TestContext.executeWithURL("entity -submit -type datasource -file " + filePath), 0);
 
         filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE3, overlay);
-        LOG.info("entity -submit -type feed -file " + filePath);
-        Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type feed -file "
-                + filePath));
+        LOG.info("Submit FEED entity with datasource {} via entity -submit -type feed -file {}", dsName, filePath);
+        Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type feed -file " + filePath));
+
+        LOG.info("Delete datasource in-use via entity -delete -type datasource -name {}", dsName);
+        Assert.assertEquals(-1, TestContext.executeWithURL("entity -delete -type datasource -name " + dsName));
+    }
+
+    @Test
+    public void testSqoopImport2() throws Exception {
+        // create a TestContext and a test embedded cluster
+        TestContext context = new TestContext();
+        context.setCluster("test");
+        EmbeddedCluster cluster = context.getCluster();
+        Configuration conf = cluster.getConf();
+        FileSystem fs = FileSystem.get(conf);
+        Map<String, String> overlay = context.getUniqueOverlay();
+
+        String filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        context.setCluster(filePath);
+        LOG.info("entity -submit -type cluster -file " + filePath);
+        Assert.assertEquals(TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);
+
+        // Make a new datasource name into the overlay for use in DATASOURCE_TEMPLATE1 and FEED_TEMPLATE3
+        String dsName = "datasource-test-2";
+        overlay.put(DATASOURCE_NAME_KEY, dsName);
+
+        // create a password file on hdfs in the following location
+        String hdfsPasswordFile = "/falcon/passwordfile";
+        FSDataOutputStream fos = fs.create(new Path(hdfsPasswordFile));
+        IOUtils.write("sqoop", fos);
+        IOUtils.closeQuietly(fos);
+
+        // put the fully qualified HDFS password file path into overlay for substitution
+        String qualifiedHdfsPath = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY) + hdfsPasswordFile;
+        LOG.info("Qualifed HDFS filepath set in the overlay {}", qualifiedHdfsPath);
+        overlay.put("passwordfile", qualifiedHdfsPath);
+
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.DATASOURCE_TEMPLATE2, overlay);
+        LOG.info("Submit datasource entity {} via entity -submit -type datasource -file {}", dsName, filePath);
+        Assert.assertEquals(TestContext.executeWithURL("entity -submit -type datasource -file " + filePath), 0);
 
-        LOG.info("entity -delete -type datasource -name datasource-test");
-        Assert.assertEquals(-1, TestContext.executeWithURL("entity -delete -type datasource -name datasource-test"));
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE3, overlay);
+        LOG.info("Submit FEED entity with datasource {} via entity -submit -type feed -file {}", dsName, filePath);
+        Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type feed -file " + filePath));
     }
+
+    @Test
+    public void testSqoopImport3() throws Exception {
+        // create a TestContext and a test embedded cluster
+        TestContext context = new TestContext();
+        context.setCluster("test");
+        EmbeddedCluster cluster = context.getCluster();
+        Configuration conf = cluster.getConf();
+        FileSystem fs = FileSystem.get(conf);
+        Map<String, String> overlay = context.getUniqueOverlay();
+
+        String filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        context.setCluster(filePath);
+        LOG.info("entity -submit -type cluster -file " + filePath);
+        Assert.assertEquals(TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);
+
+        // Make a new datasource name into the overlay for use in DATASOURCE_TEMPLATE1 and FEED_TEMPLATE3
+        String dsName = "datasource-test-3";
+        overlay.put(DATASOURCE_NAME_KEY, dsName);
+
+        // read the jceks provider file from resources and copy to hdfs provider path
+        InputStream is = this.getClass().getResourceAsStream("/credential_provider.jceks");
+        LOG.info("Opened credential_provider.jceks file from resource {}", (is == null) ? false : true);
+
+        // create a jceks provider path on hdfs in the following location
+        String hdfsProviderPath = "/falcon/providerpath";
+        FSDataOutputStream fos = fs.create(new Path(hdfsProviderPath));
+        LOG.info("Opened embedded cluster hdfs file for writing jceks {}", (fos == null) ? false : true);
+        int numBytes = IOUtils.copy(is, fos);
+        LOG.info("Copied {} bytes to hdfs provider file from resource.", numBytes);
+        IOUtils.closeQuietly(is);
+        IOUtils.closeQuietly(fos);
+
+        // put the fully qualified HDFS password file path into overlay for substitution
+        String qualifiedHdfsPath = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY) + hdfsProviderPath;
+        LOG.info("Qualifed HDFS provider path set in the overlay {}", qualifiedHdfsPath);
+        overlay.put("providerpath", qualifiedHdfsPath);
+
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.DATASOURCE_TEMPLATE3, overlay);
+        LOG.info("Submit datasource entity {} via entity -submit -type datasource -file {}", dsName, filePath);
+        Assert.assertEquals(TestContext.executeWithURL("entity -submit -type datasource -file " + filePath), 0);
+
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE3, overlay);
+        LOG.info("Submit FEED entity with datasource {} via entity -submit -type feed -file {}", dsName, filePath);
+        Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type feed -file " + filePath));
+    }
+
+    @Test
+    public void testSqoopImportUsingDefaultCredential() throws Exception {
+        TestContext context = new TestContext();
+        Map<String, String> overlay = context.getUniqueOverlay();
+        String filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        context.setCluster(filePath);
+        LOG.info("entity -submit -type cluster -file " + filePath);
+        Assert.assertEquals(TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);
+
+        // Make a new datasource name into the overlay so that DATASOURCE_TEMPLATE4 and FEED_TEMPLATE3
+        // are populated  with the same datasource name
+        String dsName = "datasource-test-4";
+        overlay.put(DATASOURCE_NAME_KEY, dsName);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.DATASOURCE_TEMPLATE4, overlay);
+        LOG.info("Submit datatsource entity {} via entity -submit -type datasource -file {}", dsName, filePath);
+        Assert.assertEquals(TestContext.executeWithURL("entity -submit -type datasource -file " + filePath), 0);
+
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE3, overlay);
+        LOG.info("Submit feed with datasource {} via entity -submitAndSchedule -type feed -file {}", dsName, filePath);
+        Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/resource/TestContext.java b/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
index 321a5cf..e3fe8bb 100644
--- a/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
+++ b/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
@@ -89,7 +89,10 @@ import java.util.regex.Pattern;
  */
 public class TestContext extends AbstractTestContext {
 
-    public static final String DATASOURCE_TEMPLATE = "/datasource-template.xml";
+    public static final String DATASOURCE_TEMPLATE1 = "/datasource-template1.xml";
+    public static final String DATASOURCE_TEMPLATE2 = "/datasource-template2.xml";
+    public static final String DATASOURCE_TEMPLATE3 = "/datasource-template3.xml";
+    public static final String DATASOURCE_TEMPLATE4 = "/datasource-template4.xml";
     public static final String CLUSTER_TEMPLATE = "/cluster-template.xml";
     public static final String PIG_PROCESS_TEMPLATE = "/pig-process-template.xml";
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/java/org/apache/falcon/util/HsqldbTestUtils.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/util/HsqldbTestUtils.java b/webapp/src/test/java/org/apache/falcon/util/HsqldbTestUtils.java
index a92629f..b58b882 100644
--- a/webapp/src/test/java/org/apache/falcon/util/HsqldbTestUtils.java
+++ b/webapp/src/test/java/org/apache/falcon/util/HsqldbTestUtils.java
@@ -18,6 +18,8 @@
 
 package org.apache.falcon.util;
 
+import java.io.File;
+import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -27,16 +29,17 @@ import java.sql.SQLException;
 
 import java.util.ArrayList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.commons.io.FileUtils;
 import org.hsqldb.Server;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Create a simple hsqldb server and schema to use for testing.
  */
 public final class HsqldbTestUtils {
 
-    public static final Log LOG = LogFactory.getLog(HsqldbTestUtils.class.getName());
+    public static final Logger LOG = LoggerFactory.getLogger(HsqldbTestUtils.class);
 
     // singleton server instance.
     private static Server server;
@@ -45,6 +48,10 @@ public final class HsqldbTestUtils {
 
     private static boolean inMemoryDB = IN_MEM.equals(getServerHost());
 
+    private static String dbLocation;
+
+    private static String dbBaseDir;
+
     private HsqldbTestUtils() {}
 
     public static String getServerHost() {
@@ -57,7 +64,7 @@ public final class HsqldbTestUtils {
     // Database name can be altered too
     private static final String DATABASE_NAME = System.getProperty("hsql.database.name",  "db1");
     private static final String CUSTOMER_TABLE_NAME = "CUSTOMER";
-    private static final String DB_URL = "jdbc:hsqldb:" + getServerHost() + DATABASE_NAME;
+    private static final String DB_URL = "jdbc:hsqldb:hsql://" + getServerHost() + DATABASE_NAME;
     private static final String DRIVER_CLASS = "org.hsqldb.jdbcDriver";
 
     public static String getUrl() {
@@ -75,12 +82,12 @@ public final class HsqldbTestUtils {
         if (null == server) {
             LOG.info("Starting new hsqldb server; database=" + DATABASE_NAME);
             String tmpDir = System.getProperty("test.build.data", "/tmp/");
-            String dbLocation = tmpDir + "/falcon/testdb.file";
-            if (inMemoryDB) {dbLocation = IN_MEM; }
+            dbBaseDir = tmpDir + "/falcon";
+            dbLocation =  dbBaseDir + "/testdb.file";
+            deleteHSQLDir();
             server = new Server();
             server.setDatabaseName(0, DATABASE_NAME);
-            server.putPropertiesFromString("database.0=" + dbLocation
-                    + ";no_system_exit=true;");
+            server.putPropertiesFromString("database.0=" + dbLocation + ";no_system_exit=true;");
             server.start();
             LOG.info("Started server with url=" + DB_URL);
         }
@@ -90,6 +97,16 @@ public final class HsqldbTestUtils {
         if (null != server) {
             server.stop();
         }
+        deleteHSQLDir();
+    }
+
+    private static void deleteHSQLDir() {
+        try {
+            FileUtils.deleteDirectory(new File(dbBaseDir));
+            LOG.info("Ok, Deleted HSQL temp dir at {}", dbBaseDir);
+        } catch(IOException ioe) {
+            LOG.info("Error deleting HSQL temp dir at {}", dbBaseDir);
+        }
     }
 
     public static void tearDown() throws SQLException {
@@ -97,6 +114,29 @@ public final class HsqldbTestUtils {
         stop();
     }
 
+    public static void createSqoopUser(String user, String password) throws Exception {
+        Connection connection = null;
+        Statement st = null;
+
+        LOG.info("Creating user {} with password {}", user, password);
+
+        try {
+            connection = getConnectionSystem();
+            st = connection.createStatement();
+            boolean result = st.execute("CREATE USER " + user + " PASSWORD " + password + " ADMIN");
+            LOG.info("CREATE USER returned {}", result);
+            connection.commit();
+        } finally {
+            if (null != st) {
+                st.close();
+            }
+
+            if (null != connection) {
+                connection.close();
+            }
+        }
+    }
+
     public static void changeSAPassword(String passwd) throws Exception {
         Connection connection = null;
         Statement st = null;
@@ -106,7 +146,8 @@ public final class HsqldbTestUtils {
             connection = getConnectionSystem();
 
             st = connection.createStatement();
-            st.executeUpdate("SET PASSWORD \"" + passwd + "\"");
+            boolean result = st.execute("SET PASSWORD \"" + passwd + "\"");
+            LOG.info("Change PASSWORD for SA returned {}", result);
             connection.commit();
         } finally {
             if (null != st) {
@@ -123,7 +164,7 @@ public final class HsqldbTestUtils {
     }
 
     private static Connection getConnection() throws SQLException {
-        return getConnection("SA", "sqoop");
+        return getConnection("sqoop_user", "sqoop");
     }
     private static Connection getConnection(String user, String password) throws SQLException {
         try {
@@ -135,6 +176,7 @@ public final class HsqldbTestUtils {
         }
         Connection connection = DriverManager.getConnection(DB_URL, user, password);
         connection.setAutoCommit(false);
+        LOG.info("Connection for user {} password {} is open {}", user, password, !connection.isClosed());
         return connection;
     }
 
@@ -179,13 +221,18 @@ public final class HsqldbTestUtils {
             connection = getConnection();
 
             st = connection.createStatement();
-            st.executeUpdate("DROP TABLE " + CUSTOMER_TABLE_NAME + " IF EXISTS");
-            st.executeUpdate("CREATE TABLE " + CUSTOMER_TABLE_NAME + "(id INT NOT NULL PRIMARY KEY, name VARCHAR(64))");
-
-            st.executeUpdate("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(1, 'Apple')");
-            st.executeUpdate("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(2, 'Blackberry')");
-            st.executeUpdate("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(3, 'Caterpillar')");
-            st.executeUpdate("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(4, 'DuPont')");
+            boolean r = st.execute("DROP TABLE " + CUSTOMER_TABLE_NAME + " IF EXISTS");
+            r = st.execute("CREATE TABLE " + CUSTOMER_TABLE_NAME + "(id INT NOT NULL PRIMARY KEY, name VARCHAR(64))");
+            LOG.info("CREATE TABLE returned {}", r);
+
+            r=st.execute("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(1, 'Apple')");
+            LOG.info("INSERT INTO returned {}", r);
+            r=st.execute("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(2, 'Blackberry')");
+            LOG.info("INSERT INTO returned {}", r);
+            r=st.execute("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(3, 'Caterpillar')");
+            LOG.info("INSERT INTO returned {}", r);
+            r=st.execute("INSERT INTO " + CUSTOMER_TABLE_NAME + " VALUES(4, 'DuPont')");
+            LOG.info("INSERT INTO returned {}", r);
 
             connection.commit();
         } finally {

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/resources/credential_provider.jceks
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/credential_provider.jceks b/webapp/src/test/resources/credential_provider.jceks
new file mode 100644
index 0000000..b9495cc
Binary files /dev/null and b/webapp/src/test/resources/credential_provider.jceks differ

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/resources/datasource-template.xml
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/datasource-template.xml b/webapp/src/test/resources/datasource-template.xml
deleted file mode 100644
index fb7a329..0000000
--- a/webapp/src/test/resources/datasource-template.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  -->
-
-<datasource colo="##colo##" description="" type="hsql" name="datasource-test" xmlns="uri:falcon:datasource:0.1">
-    <interfaces>
-        <interface type="readonly" endpoint="jdbc:hsqldb:localhost/db1">
-            <credential type="password-text">
-                <userName>SA</userName>
-                <passwordText></passwordText>
-            </credential>
-        </interface>
-
-        <interface type="write" endpoint="jdbc:hsqldb:localhost/db1">
-            <credential type="password-text">
-                <userName>SA</userName>
-                <passwordText>sqoop</passwordText>
-            </credential>
-        </interface>
-
-        <credential type="password-text">
-            <userName>SA</userName>
-            <passwordText>sqoop</passwordText>
-        </credential>
-    </interfaces>
-
-    <driver>
-       <clazz>org.hsqldb.jdbcDriver</clazz>
-       <jar>/user/oozie/share/lib/lib_20150721010816/sqoop/hsqldb-1.8.0.7.jar</jar>
-    </driver>
-</datasource>

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/resources/datasource-template1.xml
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/datasource-template1.xml b/webapp/src/test/resources/datasource-template1.xml
new file mode 100644
index 0000000..cc2abc8
--- /dev/null
+++ b/webapp/src/test/resources/datasource-template1.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<datasource colo="##colo##" description="" type="hsql" name="##datasourcename##" xmlns="uri:falcon:datasource:0.1">
+    <interfaces>
+        <interface type="readonly" endpoint="jdbc:hsqldb:hsql://localhost/db1">
+            <credential type="password-text">
+                <userName>sqoop_user</userName>
+                <passwordText>sqoop</passwordText>
+            </credential>
+        </interface>
+
+        <interface type="write" endpoint="jdbc:hsqldb:hsql://localhost/db1">
+            <credential type="password-text">
+                <userName>sqoop_user</userName>
+                <passwordText>sqoop</passwordText>
+            </credential>
+        </interface>
+
+        <credential type="password-text">
+            <userName>sqoop_user</userName>
+            <passwordText>sqoop</passwordText>
+        </credential>
+    </interfaces>
+
+    <driver>
+        <clazz>org.hsqldb.jdbcDriver</clazz>
+        <jar>/user/oozie/share/lib/lib_20150721010816/sqoop/hsqldb-1.8.0.7.jar</jar>
+    </driver>
+</datasource>

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/resources/datasource-template2.xml
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/datasource-template2.xml b/webapp/src/test/resources/datasource-template2.xml
new file mode 100644
index 0000000..a4138e1
--- /dev/null
+++ b/webapp/src/test/resources/datasource-template2.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<datasource colo="##colo##" description="" type="hsql" name="##datasourcename##" xmlns="uri:falcon:datasource:0.1">
+    <interfaces>
+        <interface type="readonly" endpoint="jdbc:hsqldb:hsql://localhost/db1">
+            <credential type="password-file">
+                <userName>sqoop_user</userName>
+                <passwordFile>##passwordfile##</passwordFile>
+            </credential>
+        </interface>
+
+        <interface type="write" endpoint="jdbc:hsqldb:hsql://localhost/db1">
+        <credential type="password-text">
+            <userName>SA</userName>
+            <passwordText>sqoop</passwordText>
+        </credential>
+    </interface>
+
+        <credential type="password-text">
+            <userName>SA</userName>
+            <passwordText>sqoop</passwordText>
+        </credential>
+    </interfaces>
+
+    <driver>
+       <clazz>org.hsqldb.jdbcDriver</clazz>
+       <jar>/user/oozie/share/lib/lib_20150721010816/sqoop/hsqldb-1.8.0.7.jar</jar>
+    </driver>
+</datasource>

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/resources/datasource-template3.xml
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/datasource-template3.xml b/webapp/src/test/resources/datasource-template3.xml
new file mode 100644
index 0000000..58ff5d9
--- /dev/null
+++ b/webapp/src/test/resources/datasource-template3.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<datasource colo="##colo##" description="" type="hsql" name="##datasourcename##" xmlns="uri:falcon:datasource:0.1">
+    <interfaces>
+        <interface type="readonly" endpoint="jdbc:hsqldb:hsql://localhost/db1">
+            <credential type="password-alias">
+                <userName>sqoop_user</userName>
+                <passwordAlias>
+                    <alias>sqoop.read.password</alias>
+                    <providerPath>##providerpath##</providerPath>
+                </passwordAlias>
+            </credential>
+        </interface>
+
+        <interface type="write" endpoint="jdbc:hsqldb:hsql://localhost/db1">
+            <credential type="password-text">
+                <userName>SA</userName>
+                <passwordText>sqoop</passwordText>
+            </credential>
+        </interface>
+
+        <credential type="password-text">
+            <userName>SA</userName>
+            <passwordText>sqoop</passwordText>
+        </credential>
+    </interfaces>
+
+    <driver>
+       <clazz>org.hsqldb.jdbcDriver</clazz>
+       <jar>/user/oozie/share/lib/lib_20150721010816/sqoop/hsqldb-1.8.0.7.jar</jar>
+    </driver>
+</datasource>

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/resources/datasource-template4.xml
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/datasource-template4.xml b/webapp/src/test/resources/datasource-template4.xml
new file mode 100644
index 0000000..b1b09d8
--- /dev/null
+++ b/webapp/src/test/resources/datasource-template4.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<datasource colo="##colo##" description="" type="hsql" name="##datasourcename##" xmlns="uri:falcon:datasource:0.1">
+    <interfaces>
+        <interface type="readonly" endpoint="jdbc:hsqldb:hsql://localhost/db1"/>
+
+        <interface type="write" endpoint="jdbc:hsqldb:hsql://localhost/db1"/>
+
+        <credential type="password-text">
+            <userName>SA</userName>
+            <passwordText>sqoop</passwordText>
+        </credential>
+    </interfaces>
+
+    <driver>
+       <clazz>org.hsqldb.jdbcDriver</clazz>
+       <jar>/user/oozie/share/lib/lib_20150721010816/sqoop/hsqldb-1.8.0.7.jar</jar>
+    </driver>
+</datasource>

http://git-wip-us.apache.org/repos/asf/falcon/blob/5abb1557/webapp/src/test/resources/feed-template3.xml
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/feed-template3.xml b/webapp/src/test/resources/feed-template3.xml
index a6c1d6b..912e1bf 100644
--- a/webapp/src/test/resources/feed-template3.xml
+++ b/webapp/src/test/resources/feed-template3.xml
@@ -29,7 +29,7 @@
             <retention limit="hours(24)" action="delete"/>
             <!-- Limit can be in Time or Instances 100, Action ENUM DELETE,ARCHIVE -->
             <import>
-                <source name="datasource-test" tableName="simple">
+                <source name="##datasourcename##" tableName="simple">
                     <extract type="full">
                         <mergepolicy>snapshot</mergepolicy>
                     </extract>


Mime
View raw message