ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From niti...@apache.org
Subject [2/2] ambari git commit: AMBARI-19528 : loading coresite and hdfssite and custom configs in view before creating hdfs connection. changed the view.xml of views for adding custom properties. updated pom.xml of hive-next, files, pig, hive20, wfmanager, uti
Date Mon, 16 Jan 2017 06:09:08 GMT
AMBARI-19528 : loading coresite and hdfssite and custom configs in view before creating hdfs connection. changed the view.xml of views for adding custom properties. updated pom.xml of hive-next, files, pig, hive20, wfmanager, utils and commons. (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9cdd3e8a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9cdd3e8a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9cdd3e8a

Branch: refs/heads/trunk
Commit: 9cdd3e8a15f4aa6d1dd7b485ce6d530b06f0f7de
Parents: 56526a8
Author: Nitiraj Rathore <nitiraj.rathore@gmail.com>
Authored: Mon Jan 16 11:39:39 2017 +0530
Committer: Nitiraj Rathore <nitiraj.rathore@gmail.com>
Committed: Mon Jan 16 11:39:39 2017 +0530

----------------------------------------------------------------------
 .../apache/ambari/server/view/ClusterImpl.java  |  14 +-
 .../ambari/server/view/RemoteAmbariCluster.java |  55 +++++--
 .../org/apache/ambari/view/cluster/Cluster.java |   6 +
 contrib/views/commons/pom.xml                   |  20 +++
 .../view/commons/hdfs/FileOperationService.java |  36 +++--
 .../ambari/view/commons/hdfs/HdfsService.java   |  21 ++-
 .../ambari/view/commons/hdfs/UploadService.java |  43 +++--
 .../ambari/view/commons/hdfs/UserService.java   |  20 ++-
 .../view/commons/hdfs/ViewPropertyHelper.java   |  55 +++++++
 contrib/views/files/pom.xml                     |  44 +++--
 .../view/filebrowser/DownloadService.java       |  77 +++++----
 .../view/filebrowser/FileBrowserService.java    |  29 ++--
 .../view/filebrowser/FilePreviewService.java    |  16 +-
 .../ambari/view/filebrowser/HelpService.java    |  17 +-
 contrib/views/files/src/main/resources/view.xml |   7 +
 contrib/views/hive-next/pom.xml                 |  64 ++++++--
 .../view/hive2/internal/HdfsApiSupplier.java    |  10 +-
 .../view/hive2/resources/files/FileService.java |  21 ++-
 .../view/hive2/utils/SharedObjectsFactory.java  |  14 +-
 .../views/hive-next/src/main/resources/view.xml |   6 +
 contrib/views/hive20/pom.xml                    |  64 ++++++--
 .../view/hive20/internal/HdfsApiSupplier.java   |  10 +-
 .../hive20/resources/browser/FileService.java   |  12 +-
 .../hive20/resources/files/FileService.java     |  20 ++-
 .../view/hive20/utils/SharedObjectsFactory.java |  14 +-
 .../views/hive20/src/main/resources/view.xml    |   9 ++
 .../views/jobs/src/main/resources/ui/.gitignore |   3 +-
 contrib/views/pig/pom.xml                       |  34 +++-
 .../view/pig/resources/files/FileService.java   |  22 ++-
 .../ambari/view/pig/utils/UserLocalObjects.java |  16 +-
 contrib/views/pig/src/main/resources/view.xml   |   7 +
 contrib/views/pom.xml                           |   3 +-
 contrib/views/utils/pom.xml                     |  82 ++++++++--
 .../view/utils/hdfs/ConfigurationBuilder.java   | 159 ++++++++++---------
 .../apache/ambari/view/utils/hdfs/HdfsApi.java  |  29 ++--
 .../apache/ambari/view/utils/hdfs/HdfsUtil.java |  67 +++++---
 contrib/views/wfmanager/pom.xml                 |  20 +++
 .../apache/oozie/ambari/view/FileServices.java  |  20 ++-
 .../apache/oozie/ambari/view/HDFSFileUtils.java |  21 ++-
 .../views/wfmanager/src/main/resources/view.xml |   8 +
 40 files changed, 894 insertions(+), 301 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/ambari-server/src/main/java/org/apache/ambari/server/view/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ClusterImpl.java
index 8874a92..05bbaa2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ClusterImpl.java
@@ -18,13 +18,15 @@
 
 package org.apache.ambari.server.view;
 
-import java.util.ArrayList;
-import java.util.List;
-
+import com.google.common.collect.ImmutableMap;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.view.cluster.Cluster;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 /**
  * View associated cluster implementation.
  */
@@ -64,6 +66,12 @@ public class ClusterImpl implements Cluster {
   }
 
   @Override
+  public Map<String, String> getConfigByType(String type) {
+    Config configs = cluster.getDesiredConfigByType(type);
+    return ImmutableMap.copyOf(configs.getProperties());
+  }
+
+  @Override
   public List<String> getHostsForServiceComponent(String serviceName, String componentName){
     List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(serviceName, componentName);
     List<String> hosts = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/ambari-server/src/main/java/org/apache/ambari/server/view/RemoteAmbariCluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/RemoteAmbariCluster.java b/ambari-server/src/main/java/org/apache/ambari/server/view/RemoteAmbariCluster.java
index e006c3e..8b7bed3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/RemoteAmbariCluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/RemoteAmbariCluster.java
@@ -18,30 +18,33 @@
 
 package org.apache.ambari.server.view;
 
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParser;
+import com.google.gson.reflect.TypeToken;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity;
+import org.apache.ambari.view.AmbariHttpException;
+import org.apache.ambari.view.AmbariStreamProvider;
+import org.apache.ambari.view.cluster.Cluster;
+import org.apache.commons.io.IOUtils;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity;
-import org.apache.ambari.view.AmbariHttpException;
-import org.apache.ambari.view.AmbariStreamProvider;
-import org.apache.ambari.view.cluster.Cluster;
-import org.apache.commons.io.IOUtils;
-
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonParser;
-
 /**
  * View associated  Remote cluster implementation.
  */
@@ -159,6 +162,32 @@ public class RemoteAmbariCluster implements Cluster {
   }
 
   @Override
+  public Map<String, String> getConfigByType(String type) {
+    JsonElement config = null;
+    try {
+      String desiredTag = getDesiredConfig(type);
+      if (desiredTag != null) {
+        config = configurationCache.get(String.format("%s/configurations?(type=%s&tag=%s)",this.clusterPath, type, desiredTag));
+      }
+    } catch (ExecutionException e) {
+      throw new RemoteAmbariConfigurationReadException("Can't retrieve configuration from Remote Ambari", e);
+    }
+    if (config == null || !config.isJsonObject()) return null;
+    JsonElement items = config.getAsJsonObject().get("items");
+
+    if (items == null || !items.isJsonArray()) return null;
+    JsonElement item = items.getAsJsonArray().get(0);
+
+    if (item == null || !item.isJsonObject()) return null;
+    JsonElement properties = item.getAsJsonObject().get("properties");
+
+    if (properties == null || !properties.isJsonObject()) return null;
+
+    Map<String, String> retMap = new Gson().fromJson(properties, new TypeToken<HashMap<String, String>>() {}.getType());
+    return retMap;
+  }
+
+  @Override
   public List<String> getHostsForServiceComponent(String serviceName, String componentName) {
     String url = String.format("%s/services/%s/components/%s?" +
       "fields=host_components/HostRoles/host_name", this.clusterPath, serviceName, componentName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/ambari-views/src/main/java/org/apache/ambari/view/cluster/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-views/src/main/java/org/apache/ambari/view/cluster/Cluster.java b/ambari-views/src/main/java/org/apache/ambari/view/cluster/Cluster.java
index 7f7c19b..9663c75 100644
--- a/ambari-views/src/main/java/org/apache/ambari/view/cluster/Cluster.java
+++ b/ambari-views/src/main/java/org/apache/ambari/view/cluster/Cluster.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.cluster;
 
 import java.util.List;
+import java.util.Map;
 
 /**
  * View associated cluster.  A cluster may be associated with a view instance so that the view instance may pull
@@ -43,6 +44,11 @@ public interface Cluster {
   public String getConfigurationValue(String type, String key);
 
   /**
+   * @param type : the type (site) for which the configurations are required.
+   * @return : return a map containing all the key values of configurations
+   */
+  public Map<String,String> getConfigByType(String type);
+  /**
    * Get the hosts for service and componet
    *
    * @param serviceName

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/commons/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/commons/pom.xml b/contrib/views/commons/pom.xml
index 5018d46..27fed13 100644
--- a/contrib/views/commons/pom.xml
+++ b/contrib/views/commons/pom.xml
@@ -53,6 +53,14 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -65,6 +73,18 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
index bc3d11d..d6e484d 100644
--- a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
@@ -35,6 +35,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.ListIterator;
+import java.util.Map;
 
 /**
  * File operations service
@@ -50,6 +51,14 @@ public class FileOperationService extends HdfsService {
   }
 
   /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public FileOperationService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
+  /**
    * List dir
    * @param path path
    * @return response with dir content
@@ -60,8 +69,8 @@ public class FileOperationService extends HdfsService {
   public Response listdir(@QueryParam("path") String path) {
     try {
       JSONObject response = new JSONObject();
-      response.put("files", getApi(context).fileStatusToJSON(getApi(context).listdir(path)));
-      response.put("meta", getApi(context).fileStatusToJSON(getApi(context).getFileStatus(path)));
+      response.put("files", getApi().fileStatusToJSON(getApi().listdir(path)));
+      response.put("meta", getApi().fileStatusToJSON(getApi().getFileStatus(path)));
       return Response.ok(response).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -83,10 +92,10 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response rename(final SrcDstFileRequest request) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       if (api.rename(request.src, request.dst)) {
-        result = Response.ok(getApi(context).fileStatusToJSON(api
+        result = Response.ok(getApi().fileStatusToJSON(api
             .getFileStatus(request.dst)));
       } else {
         result = Response.ok(new FileOperationResult(false, "Can't move '" + request.src + "' to '" + request.dst + "'")).status(422);
@@ -110,10 +119,10 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response chmod(final ChmodRequest request) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       if (api.chmod(request.path, request.mode)) {
-        result = Response.ok(getApi(context).fileStatusToJSON(api
+        result = Response.ok(getApi().fileStatusToJSON(api
             .getFileStatus(request.path)));
       } else {
         result = Response.ok(new FileOperationResult(false, "Can't chmod '" + request.path + "'")).status(422);
@@ -138,7 +147,7 @@ public class FileOperationService extends HdfsService {
   public Response move(final MultiSrcDstFileRequest request,
                        @Context HttpHeaders headers, @Context UriInfo ui) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       String message = "";
 
@@ -192,7 +201,7 @@ public class FileOperationService extends HdfsService {
   public Response copy(final MultiSrcDstFileRequest request,
                        @Context HttpHeaders headers, @Context UriInfo ui) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       String message = "";
 
@@ -240,10 +249,10 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response mkdir(final MkdirRequest request) {
     try{
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       if (api.mkdir(request.path)) {
-        result = Response.ok(getApi(context).fileStatusToJSON(api.getFileStatus(request.path)));
+        result = Response.ok(getApi().fileStatusToJSON(api.getFileStatus(request.path)));
       } else {
         result = Response.ok(new FileOperationResult(false, "Can't create dir '" + request.path + "'")).status(422);
       }
@@ -264,7 +273,7 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response emptyTrash() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       api.emptyTrash();
       return Response.ok(new FileOperationResult(true)).build();
     } catch (WebApplicationException ex) {
@@ -286,7 +295,7 @@ public class FileOperationService extends HdfsService {
   public Response moveToTrash(MultiRemoveRequest request) {
     try {
       ResponseBuilder result;
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       String trash = api.getTrashDirPath();
       String message = "";
 
@@ -343,7 +352,7 @@ public class FileOperationService extends HdfsService {
   public Response remove(MultiRemoveRequest request, @Context HttpHeaders headers,
                          @Context UriInfo ui) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       String message = "";
       if(request.paths.size() == 0) {
@@ -417,7 +426,6 @@ public class FileOperationService extends HdfsService {
     return srcPath.substring(srcPath.lastIndexOf('/') + 1);
   }
 
-
   /**
    * Wrapper for json mapping of mkdir request
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/HdfsService.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/HdfsService.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/HdfsService.java
index 91eebcf..018f94f 100644
--- a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/HdfsService.java
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/HdfsService.java
@@ -18,9 +18,6 @@
 
 package org.apache.ambari.view.commons.hdfs;
 
-import javax.ws.rs.WebApplicationException;
-import javax.xml.bind.annotation.XmlRootElement;
-
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
@@ -29,6 +26,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.ws.rs.WebApplicationException;
+import javax.xml.bind.annotation.XmlRootElement;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
@@ -42,6 +41,7 @@ public abstract class HdfsService {
   protected static final Logger logger = LoggerFactory.getLogger(HdfsService.class);
 
   protected final ViewContext context;
+  private Map<String, String> customProperties;
 
   /**
    * Constructor
@@ -51,6 +51,11 @@ public abstract class HdfsService {
     this.context = context;
   }
 
+  public HdfsService(ViewContext context, Map<String, String> customProperties) {
+    this.context = context;
+    this.customProperties = customProperties;
+  }
+
   /**
    * Wrapper for json mapping of result of Multi Remove Request
    */
@@ -84,14 +89,18 @@ public abstract class HdfsService {
 
   /**
    * Ger HdfsApi instance
-   * @param context View Context instance
    * @return HdfsApi business delegate
    */
-  public HdfsApi getApi(ViewContext context) {
+  public HdfsApi getApi() {
     if (_api == null) {
       try {
-        _api = HdfsUtil.connectToHDFSApi(context);
+        if(this.customProperties != null){
+          _api = HdfsUtil.connectToHDFSApi(context, customProperties);
+        }else{
+          _api = HdfsUtil.connectToHDFSApi(context);
+        }
       } catch (Exception ex) {
+        logger.error("Exception while connecting to hdfs : {}", ex.getMessage(), ex);
         throw new ServiceFormattedException("HdfsApi connection failed. Check \"webhdfs.url\" property", ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
index 97253ad..26a4873 100644
--- a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
@@ -18,23 +18,25 @@
 
 package org.apache.ambari.view.commons.hdfs;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-import javax.ws.rs.*;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
+import com.sun.jersey.core.header.FormDataContentDisposition;
+import com.sun.jersey.multipart.FormDataParam;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
-import org.apache.ambari.view.commons.hdfs.HdfsService;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hadoop.fs.FSDataOutputStream;
 
-import com.sun.jersey.core.header.FormDataContentDisposition;
-import com.sun.jersey.multipart.FormDataParam;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
 
 /**
  * Upload service
@@ -49,13 +51,22 @@ public class UploadService extends HdfsService {
     super(context);
   }
 
+  /**
+   * takes context and any extra custom properties that needs to be included into config
+   * @param context
+   * @param customProperties
+   */
+  public UploadService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
   private void uploadFile(final String filePath, InputStream uploadedInputStream)
       throws IOException, InterruptedException {
     int read;
     byte[] chunk = new byte[1024];
     FSDataOutputStream out = null;
     try {
-      out = getApi(context).create(filePath, false);
+      out = getApi().create(filePath, false);
       while ((read = uploadedInputStream.read(chunk)) != -1) {
         out.write(chunk, 0, read);
       }
@@ -86,7 +97,7 @@ public class UploadService extends HdfsService {
       String filePath = path + contentDisposition.getFileName();
       uploadFile(filePath, uploadedInputStream);
       return Response.ok(
-          getApi(context).fileStatusToJSON(getApi(context).getFileStatus(filePath)))
+          getApi().fileStatusToJSON(getApi().getFileStatus(filePath)))
           .build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -117,7 +128,7 @@ public class UploadService extends HdfsService {
         path = path + "/";
       ZipInputStream zip = new ZipInputStream(uploadedInputStream);
       ZipEntry ze = zip.getNextEntry();
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       while (ze != null) {
         String filePath = path + ze.getName();
         if (ze.isDirectory()) {
@@ -127,7 +138,7 @@ public class UploadService extends HdfsService {
         }
         ze = zip.getNextEntry();
       }
-      return Response.ok(getApi(context).fileStatusToJSON(api.listdir(path))).build();
+      return Response.ok(getApi().fileStatusToJSON(api.listdir(path))).build();
     } catch (WebApplicationException ex) {
       throw ex;
     } catch (Exception ex) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UserService.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UserService.java
index dc303ce..30e2985 100644
--- a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UserService.java
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UserService.java
@@ -30,6 +30,7 @@ import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.io.FileNotFoundException;
+import java.util.Map;
 
 /**
  * User related info service
@@ -45,6 +46,15 @@ public class UserService extends HdfsService {
   }
 
   /**
+   * takes context and any extra custom properties that needs to be included into config
+   * @param context
+   * @param customProperties
+   */
+  public UserService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
+  /**
    * Returns home directory
    * @return home directory
    */
@@ -53,9 +63,9 @@ public class UserService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response homeDir() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       return Response
-        .ok(getApi(context).fileStatusToJSON(api.getFileStatus(api.getHomeDir()
+        .ok(getApi().fileStatusToJSON(api.getFileStatus(api.getHomeDir()
           .toString()))).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -73,7 +83,7 @@ public class UserService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response trashEnabled() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       return Response.ok(new FileOperationResult(api.trashEnabled())).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -91,9 +101,9 @@ public class UserService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response trashdir() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       return Response.ok(
-        getApi(context).fileStatusToJSON(api.getFileStatus(api.getTrashDir()
+        getApi().fileStatusToJSON(api.getFileStatus(api.getTrashDir()
           .toString()))).build();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/ViewPropertyHelper.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/ViewPropertyHelper.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/ViewPropertyHelper.java
new file mode 100644
index 0000000..1a411eb
--- /dev/null
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/ViewPropertyHelper.java
@@ -0,0 +1,55 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.commons.hdfs;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.hdfs.ConfigurationBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ViewPropertyHelper {
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigurationBuilder.class);
+
+  public static Optional<Map<String, String>> getViewConfigs(ViewContext context, String viewConfigPropertyName) {
+    Map<String, String> viewConfigs = new HashMap<>();
+    String keyValues = context.getProperties().get(viewConfigPropertyName);
+    LOG.debug("{} : {}", viewConfigPropertyName, keyValues);
+    if (Strings.isNullOrEmpty(keyValues)) {
+      LOG.info("No values found in {} property.", viewConfigPropertyName);
+      return Optional.absent();
+    }
+
+    for (String entry : keyValues.split(";")) {
+      String[] kv = entry.split("=");
+      if (kv.length != 2) {
+        LOG.error("Ignoring entry {}, because it is not formatted like key=value");
+        continue;
+      }
+
+      viewConfigs.put(kv[0], kv[1]);
+    }
+
+    return Optional.of(viewConfigs);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/files/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/pom.xml b/contrib/views/files/pom.xml
index f42d635..eaad803 100644
--- a/contrib/views/files/pom.xml
+++ b/contrib/views/files/pom.xml
@@ -33,23 +33,43 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
index 96d3541..10b7c9e 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
@@ -18,18 +18,21 @@
 
 package org.apache.ambari.view.filebrowser;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.FileNameMap;
-import java.net.URLConnection;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.Queue;
-import java.util.UUID;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
+import com.google.gson.Gson;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.exceptions.MisconfigurationFormattedException;
+import org.apache.ambari.view.commons.exceptions.NotFoundFormattedException;
+import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
+import org.apache.ambari.view.commons.hdfs.HdfsService;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.security.AccessControlException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.Consumes;
 import javax.ws.rs.GET;
@@ -46,22 +49,18 @@ import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.StreamingOutput;
 import javax.ws.rs.core.UriInfo;
 import javax.xml.bind.annotation.XmlElement;
-
-import com.google.gson.Gson;
-import org.apache.ambari.view.commons.exceptions.MisconfigurationFormattedException;
-import org.apache.ambari.view.commons.exceptions.NotFoundFormattedException;
-import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
-import org.apache.ambari.view.commons.hdfs.HdfsService;
-import org.apache.ambari.view.utils.hdfs.HdfsApi;
-import org.apache.ambari.view.utils.hdfs.HdfsApiException;
-import org.apache.ambari.view.utils.hdfs.HdfsUtil;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.ambari.view.ViewContext;
-import org.apache.hadoop.security.AccessControlException;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.FileNameMap;
+import java.net.URLConnection;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Queue;
+import java.util.UUID;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
 
 /**
  * Service for download and aggregate files
@@ -75,6 +74,14 @@ public class DownloadService extends HdfsService {
   }
 
   /**
+   * @param context
+   * @param customProperties : extra properties that need to be included into config
+   */
+  public DownloadService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
+  /**
    * Download entire file
    * @param path path to file
    * @param download download as octet strem or as file mime type
@@ -92,7 +99,7 @@ public class DownloadService extends HdfsService {
                          @Context HttpHeaders headers, @Context UriInfo ui) {
     LOG.debug("browsing path : {} with download : {}", path, download);
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       FileStatus status = api.getFileStatus(path);
       FSDataInputStream fs = api.open(path);
       if(checkperm) {
@@ -127,7 +134,7 @@ public class DownloadService extends HdfsService {
 
   private void zipFile(ZipOutputStream zip, String path) {
     try {
-      FSDataInputStream in = getApi(context).open(path);
+      FSDataInputStream in = getApi().open(path);
       zip.putNextEntry(new ZipEntry(path.substring(1)));
       byte[] chunk = new byte[1024];
 
@@ -185,7 +192,7 @@ public class DownloadService extends HdfsService {
             ServiceFormattedException {
           ZipOutputStream zip = new ZipOutputStream(output);
           try {
-            HdfsApi api = getApi(context);
+            HdfsApi api = getApi();
             Queue<String> files = new LinkedList<String>();
             for (String file : request.entries) {
               files.add(file);
@@ -249,7 +256,7 @@ public class DownloadService extends HdfsService {
           for (String path : request.entries) {
             try {
               try {
-                in = getApi(context).open(path);
+                in = getApi().open(path);
               } catch (AccessControlException ex) {
                 LOG.error("Error in opening file {}. Ignoring concat of this files.", path.substring(1), ex);
                 continue;
@@ -380,7 +387,7 @@ public class DownloadService extends HdfsService {
 
   private DownloadRequest getDownloadRequest(String requestId) throws HdfsApiException, IOException, InterruptedException {
     String fileName = getFileNameForRequestData(requestId);
-    String json = HdfsUtil.readFile(getApi(context), fileName);
+    String json = HdfsUtil.readFile(getApi(), fileName);
     DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
 
     deleteFileFromHdfs(fileName);
@@ -399,7 +406,7 @@ public class DownloadService extends HdfsService {
   private void writeToHdfs(String uuid, String json) {
     String fileName = getFileNameForRequestData(uuid);
     try {
-      HdfsUtil.putStringToFile(getApi(context), fileName, json);
+      HdfsUtil.putStringToFile(getApi(), fileName, json);
     } catch (HdfsApiException e) {
       LOG.error("Failed to write request data to HDFS", e);
       throw new ServiceFormattedException("Failed to write request data to HDFS", e);
@@ -416,7 +423,7 @@ public class DownloadService extends HdfsService {
   }
 
   private void deleteFileFromHdfs(String fileName) throws IOException, InterruptedException {
-    getApi(context).delete(fileName, true);
+    getApi().delete(fileName, true);
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
index adaa6c9..df7fde8 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
@@ -18,19 +18,23 @@
 
 package org.apache.ambari.view.filebrowser;
 
-import javax.ws.rs.Path;
-
-import org.apache.ambari.view.ViewContext;
-
+import com.google.common.base.Optional;
 import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.hdfs.FileOperationService;
 import org.apache.ambari.view.commons.hdfs.UploadService;
 import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
+
+import javax.ws.rs.Path;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * Root files service
  */
 public class FileBrowserService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   @Inject
   ViewContext context;
@@ -41,7 +45,12 @@ public class FileBrowserService {
    */
   @Path("/download")
   public DownloadService download() {
-    return new DownloadService(context);
+    return new DownloadService(context, getViewConfigs());
+  }
+
+  private Map<String,String> getViewConfigs() {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
   }
 
   /**
@@ -50,7 +59,7 @@ public class FileBrowserService {
    */
   @Path("/upload")
   public UploadService upload() {
-    return new UploadService(context);
+    return new UploadService(context, getViewConfigs());
   }
 
   /**
@@ -59,7 +68,7 @@ public class FileBrowserService {
    */
   @Path("/fileops")
   public FileOperationService fileOps() {
-    return new FileOperationService(context);
+    return new FileOperationService(context, getViewConfigs());
   }
 
   /**
@@ -68,7 +77,7 @@ public class FileBrowserService {
    */
   @Path("/help")
   public HelpService help() {
-    return new HelpService(context);
+    return new HelpService(context, getViewConfigs());
   }
 
   /**
@@ -76,7 +85,7 @@ public class FileBrowserService {
    * @return service
    */
   @Path("/user")
-  public UserService userService() { return new UserService(context); }
+  public UserService userService() { return new UserService(context, getViewConfigs()); }
 
   /**
    * @see org.apache.ambari.view.filebrowser.FilePreviewService
@@ -84,7 +93,7 @@ public class FileBrowserService {
    */
   @Path("/preview")
   public FilePreviewService preview() {
-    return new FilePreviewService(context);
+    return new FilePreviewService(context, getViewConfigs());
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FilePreviewService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FilePreviewService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FilePreviewService.java
index 051e40d..b4bf102 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FilePreviewService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FilePreviewService.java
@@ -37,6 +37,7 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.io.FileNotFoundException;
 import java.io.InputStream;
+import java.util.Map;
 
 /**
  * File Preview Service
@@ -49,6 +50,10 @@ public class FilePreviewService extends HdfsService {
   public FilePreviewService(ViewContext context) {
     super(context);
 
+    initCompressionCodecFactory();
+  }
+
+  private void initCompressionCodecFactory() {
     Configuration conf = new Configuration();
     conf.set("io.compression.codecs","org.apache.hadoop.io.compress.GzipCodec," +
       "org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec," +
@@ -57,13 +62,22 @@ public class FilePreviewService extends HdfsService {
     compressionCodecFactory = new CompressionCodecFactory(conf);
   }
 
+  /**
+   * @param context
+   * @param viewConfigs : extra properties that needs to be included into configs
+   */
+  public FilePreviewService(ViewContext context, Map<String, String> viewConfigs) {
+    super(context, viewConfigs);
+    initCompressionCodecFactory();
+  }
+
   @GET
   @Path("/file")
   @Produces(MediaType.APPLICATION_JSON)
   public Response previewFile(@QueryParam("path") String path, @QueryParam("start") int start, @QueryParam("end") int end) {
     LOG.info("previewing file {}, from start {}, till end {}", path, start, end);
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       FileStatus status = api.getFileStatus(path);
 
       CompressionCodec codec = compressionCodecFactory.getCodec(status.getPath());

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
index 92af2d5..1177e15 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
@@ -18,15 +18,16 @@
 
 package org.apache.ambari.view.filebrowser;
 
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.HdfsService;
+import org.json.simple.JSONObject;
+
 import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.commons.hdfs.HdfsService;
-import org.json.simple.JSONObject;
+import java.util.Map;
 
 /**
  * Help service
@@ -42,6 +43,14 @@ public class HelpService extends HdfsService {
   }
 
   /**
+   * @param context
+   * @param viewConfigs : extra properties that needs to be included into configs
+   */
+  public HelpService(ViewContext context, Map<String, String> viewConfigs) {
+    super(context, viewConfigs);
+  }
+
+  /**
    * Version
    * @return version
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/files/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/view.xml b/contrib/views/files/src/main/resources/view.xml
index 640cee9..6c80e01 100644
--- a/contrib/views/files/src/main/resources/view.xml
+++ b/contrib/views/files/src/main/resources/view.xml
@@ -141,6 +141,13 @@
         <default-value>/user/${username}/files-view/tmp</default-value>
         <required>true</required>
     </parameter>
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim. Format : key1=value1;
+          key2=value2</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
 
     <resource>
         <name>files</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive-next/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/pom.xml b/contrib/views/hive-next/pom.xml
index 65c9902..09d0329 100644
--- a/contrib/views/hive-next/pom.xml
+++ b/contrib/views/hive-next/pom.xml
@@ -104,12 +104,20 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -124,6 +132,18 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -156,9 +176,21 @@
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
         <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+        <exclusion>
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -175,6 +207,16 @@
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <version>0.9.0</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -214,16 +256,6 @@
       <version>2.4</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <version>4.5.2</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpcore</artifactId>
-      <version>4.4.3</version>
-    </dependency>
-    <dependency>
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-csv</artifactId>
       <version>1.1</version>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/internal/HdfsApiSupplier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/internal/HdfsApiSupplier.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/internal/HdfsApiSupplier.java
index 1cc1211..58ae8ad 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/internal/HdfsApiSupplier.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/internal/HdfsApiSupplier.java
@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive2.internal;
 
 import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.ambari.view.utils.hdfs.HdfsUtil;
@@ -30,6 +31,7 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   protected final Logger LOG =
     LoggerFactory.getLogger(getClass());
@@ -44,7 +46,13 @@ public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
         synchronized (lock) {
           if(!hdfsApiMap.containsKey(getKey(context))) {
             LOG.debug("Creating HDFSApi instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
-            HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+            Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+            HdfsApi api;
+            if(props.isPresent()){
+              api = HdfsUtil.connectToHDFSApi(context, props.get());
+            }else{
+              api = HdfsUtil.connectToHDFSApi(context);
+            }
             hdfsApiMap.put(getKey(context), api);
             return Optional.of(api);
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/files/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/files/FileService.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/files/FileService.java
index 654bfcc..64880bb 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/files/FileService.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/files/FileService.java
@@ -18,10 +18,12 @@
 
 package org.apache.ambari.view.hive2.resources.files;
 
+import com.google.common.base.Optional;
 import com.jayway.jsonpath.JsonPath;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive2.BaseService;
 import org.apache.ambari.view.hive2.utils.*;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
@@ -46,6 +48,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URL;
 import java.util.HashMap;
+import java.util.Map;
 
 /**
  * File access resource
@@ -60,6 +63,8 @@ import java.util.HashMap;
  *      update file content
  */
 public class FileService extends BaseService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   public static final String FAKE_FILE = "fakefile://";
   public static final String JSON_PATH_FILE = "jsonpath:";
 
@@ -226,7 +231,13 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+      Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+      HdfsApi api;
+      if(props.isPresent()){
+        api = HdfsUtil.connectToHDFSApi(context, props.get());
+      }else{
+        api = HdfsUtil.connectToHDFSApi(context);
+      }
       api.getStatus();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -241,7 +252,7 @@ public class FileService extends BaseService {
    */
   public static void userhomeSmokeTest(ViewContext context) {
     try {
-      UserService userservice = new UserService(context);
+      UserService userservice = new UserService(context, getViewConfigs(context));
       userservice.homeDir();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -263,4 +274,10 @@ public class FileService extends BaseService {
     }
     return filePath;
   }
+
+  private static Map<String,String> getViewConfigs(ViewContext context) {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
index dd0b715..cfd6d04 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.hive2.utils;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive2.persistence.IStorageFactory;
 import org.apache.ambari.view.hive2.persistence.Storage;
 import org.apache.ambari.view.hive2.persistence.utils.StorageFactory;
@@ -45,6 +47,8 @@ import java.util.concurrent.ConcurrentHashMap;
  * will use different connection.
  */
 public class SharedObjectsFactory implements IStorageFactory {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   protected final static Logger LOG =
       LoggerFactory.getLogger(SharedObjectsFactory.class);
 
@@ -123,7 +127,15 @@ public class SharedObjectsFactory implements IStorageFactory {
   public HdfsApi getHdfsApi() {
     if (!localObjects.get(HdfsApi.class).containsKey(getTagName())) {
       try {
-        localObjects.get(HdfsApi.class).put(getTagName(), HdfsUtil.connectToHDFSApi(context));
+        Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+        HdfsApi api;
+        if(props.isPresent()){
+          api = HdfsUtil.connectToHDFSApi(context, props.get());
+        }else{
+          api = HdfsUtil.connectToHDFSApi(context);
+        }
+
+        localObjects.get(HdfsApi.class).put(getTagName(), api);
       } catch (HdfsApiException e) {
         String message = "F060 Couldn't open connection to HDFS";
         LOG.error(message);

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive-next/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/view.xml b/contrib/views/hive-next/src/main/resources/view.xml
index 1107cd1..4c131b5 100644
--- a/contrib/views/hive-next/src/main/resources/view.xml
+++ b/contrib/views/hive-next/src/main/resources/view.xml
@@ -216,6 +216,12 @@
         <required>true</required>
     </parameter>
 
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim.</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
 
     <parameter>
         <name>use.hive.interactive.mode</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive20/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/pom.xml b/contrib/views/hive20/pom.xml
index e9bde1d..168ff98 100644
--- a/contrib/views/hive20/pom.xml
+++ b/contrib/views/hive20/pom.xml
@@ -105,12 +105,20 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -125,6 +133,18 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -157,9 +177,21 @@
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
         <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+        <exclusion>
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -176,6 +208,16 @@
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <version>0.9.0</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -215,16 +257,6 @@
       <version>2.4</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <version>4.5.2</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpcore</artifactId>
-      <version>4.4.3</version>
-    </dependency>
-    <dependency>
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-csv</artifactId>
       <version>1.1</version>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
index e66b9ab..8adac24 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive20.internal;
 
 import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.ambari.view.utils.hdfs.HdfsUtil;
@@ -30,6 +31,7 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   protected final Logger LOG =
     LoggerFactory.getLogger(getClass());
@@ -44,7 +46,13 @@ public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
         synchronized (lock) {
           if(!hdfsApiMap.containsKey(getKey(context))) {
             LOG.debug("Creating HDFSApi instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
-            HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+            Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+            HdfsApi api;
+            if(props.isPresent()){
+              api = HdfsUtil.connectToHDFSApi(context, props.get());
+            }else{
+              api = HdfsUtil.connectToHDFSApi(context);
+            }
             hdfsApiMap.put(getKey(context), api);
             return Optional.of(api);
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
index 96e9554..fffc8a1 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
@@ -18,18 +18,28 @@
 
 package org.apache.ambari.view.hive20.resources.browser;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.commons.hdfs.FileOperationService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive20.BaseService;
 
 import javax.ws.rs.Path;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  *
  */
 public class FileService extends BaseService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   @Path("/ops")
   public FileOperationService fileOps() {
-    return new FileOperationService(context);
+    return new FileOperationService(context, getViewConfigs());
+  }
+
+  private Map<String,String> getViewConfigs() {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
index a3623e9..d520705 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
@@ -18,10 +18,12 @@
 
 package org.apache.ambari.view.hive20.resources.files;
 
+import com.google.common.base.Optional;
 import com.jayway.jsonpath.JsonPath;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive20.BaseService;
 import org.apache.ambari.view.hive20.utils.*;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
@@ -46,6 +48,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URL;
 import java.util.HashMap;
+import java.util.Map;
 
 /**
  * File access resource
@@ -62,6 +65,7 @@ import java.util.HashMap;
 public class FileService extends BaseService {
   public static final String FAKE_FILE = "fakefile://";
   public static final String JSON_PATH_FILE = "jsonpath:";
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   @Inject
   ViewResourceHandler handler;
@@ -226,7 +230,14 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+      Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+      HdfsApi api;
+      if(props.isPresent()){
+        api = HdfsUtil.connectToHDFSApi(context, props.get());
+      }else{
+        api = HdfsUtil.connectToHDFSApi(context);
+      }
+
       api.getStatus();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -241,7 +252,7 @@ public class FileService extends BaseService {
    */
   public static void userhomeSmokeTest(ViewContext context) {
     try {
-      UserService userservice = new UserService(context);
+      UserService userservice = new UserService(context, getViewConfigs(context));
       userservice.homeDir();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -263,4 +274,9 @@ public class FileService extends BaseService {
     }
     return filePath;
   }
+
+  private static Map<String,String> getViewConfigs(ViewContext context) {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
index 5bc6070..2b9fb41 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.hive20.utils;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive20.persistence.IStorageFactory;
 import org.apache.ambari.view.hive20.persistence.Storage;
 import org.apache.ambari.view.hive20.persistence.utils.StorageFactory;
@@ -45,6 +47,8 @@ import java.util.concurrent.ConcurrentHashMap;
  * will use different connection.
  */
 public class SharedObjectsFactory implements IStorageFactory {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   protected final static Logger LOG =
       LoggerFactory.getLogger(SharedObjectsFactory.class);
 
@@ -123,7 +127,15 @@ public class SharedObjectsFactory implements IStorageFactory {
   public HdfsApi getHdfsApi() {
     if (!localObjects.get(HdfsApi.class).containsKey(getTagName())) {
       try {
-        localObjects.get(HdfsApi.class).put(getTagName(), HdfsUtil.connectToHDFSApi(context));
+        Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+        HdfsApi api;
+        if(props.isPresent()){
+          api = HdfsUtil.connectToHDFSApi(context, props.get());
+        }else{
+          api = HdfsUtil.connectToHDFSApi(context);
+        }
+
+        localObjects.get(HdfsApi.class).put(getTagName(), api);
       } catch (HdfsApiException e) {
         String message = "F060 Couldn't open connection to HDFS";
         LOG.error(message);

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/hive20/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/view.xml b/contrib/views/hive20/src/main/resources/view.xml
index 315d0a9..2cbfef0 100644
--- a/contrib/views/hive20/src/main/resources/view.xml
+++ b/contrib/views/hive20/src/main/resources/view.xml
@@ -243,6 +243,15 @@
         <required>true</required>
     </parameter>
 
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim. Format : key1=value1;
+          key2=value2</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
+
+
     <resource>
         <name>savedQuery</name>
         <plural-name>savedQueries</plural-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/jobs/src/main/resources/ui/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/views/jobs/src/main/resources/ui/.gitignore b/contrib/views/jobs/src/main/resources/ui/.gitignore
index 503a8a9..45a7d0b 100644
--- a/contrib/views/jobs/src/main/resources/ui/.gitignore
+++ b/contrib/views/jobs/src/main/resources/ui/.gitignore
@@ -5,4 +5,5 @@ dist
 .tmp
 app/bower_components
 test/bower_components
-.editorconfig
\ No newline at end of file
+.editorconfig
+node
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/pig/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pig/pom.xml b/contrib/views/pig/pom.xml
index 2f03d1e..3b71985 100644
--- a/contrib/views/pig/pom.xml
+++ b/contrib/views/pig/pom.xml
@@ -87,22 +87,42 @@
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
         <exclusions>
-            <exclusion>
+          <exclusion>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+          </exclusion>
+          <exclusion>
                 <groupId>tomcat</groupId>
                 <artifactId>jasper-runtime</artifactId>
             </exclusion>
+            <exclusion>
+                <groupId>xerces</groupId>
+                <artifactId>xercesImpl</artifactId>
+            </exclusion>
         </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>javax.ws.rs</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
index 509b20e..2af0628 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
@@ -18,9 +18,11 @@
 
 package org.apache.ambari.view.pig.resources.files;
 
+import com.google.common.base.Optional;
 import com.google.inject.Inject;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.pig.services.BaseService;
 import org.apache.ambari.view.pig.utils.BadRequestFormattedException;
 import org.apache.ambari.view.pig.utils.FilePaginator;
@@ -53,8 +55,10 @@ import javax.ws.rs.core.Response;
 import javax.ws.rs.core.UriInfo;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 
 
 /**
@@ -70,6 +74,8 @@ import java.util.List;
  *      update file content
  */
 public class FileService extends BaseService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   @Inject
   ViewResourceHandler handler;
 
@@ -213,7 +219,14 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+      Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+      HdfsApi api;
+      if(props.isPresent()){
+        api = HdfsUtil.connectToHDFSApi(context, props.get());
+      }else{
+        api = HdfsUtil.connectToHDFSApi(context);
+      }
+
       api.getStatus();
     } catch (WebApplicationException ex) {
       LOG.error("Error occurred : ", ex);
@@ -231,7 +244,7 @@ public class FileService extends BaseService {
    */
   public static void userhomeSmokeTest(ViewContext context) {
     try {
-      UserService  userservice = new UserService(context);
+      UserService  userservice = new UserService(context, getViewConfigs(context));
       userservice.homeDir();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -253,4 +266,9 @@ public class FileService extends BaseService {
     }
     return filePath;
   }
+
+  private static Map<String,String> getViewConfigs(ViewContext context) {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
index 8293899..b22c6aa 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.pig.utils;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.pig.templeton.client.TempletonApi;
 import org.apache.ambari.view.pig.templeton.client.TempletonApiFactory;
 import org.apache.ambari.view.utils.UserLocal;
@@ -28,7 +30,11 @@ import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.Map;
+
 public class UserLocalObjects {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   private final static Logger LOG =
       LoggerFactory.getLogger(UserLocalObjects.class);
 
@@ -55,7 +61,15 @@ public class UserLocalObjects {
       @Override
       protected synchronized HdfsApi initialValue(ViewContext context) {
         try {
-          return HdfsUtil.connectToHDFSApi(context);
+          Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+          HdfsApi api;
+          if(props.isPresent()){
+            api = HdfsUtil.connectToHDFSApi(context, props.get());
+          }else{
+            api = HdfsUtil.connectToHDFSApi(context);
+          }
+
+          return api;
         } catch (HdfsApiException e) {
           throw new ServiceFormattedException(e);
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/pig/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/resources/view.xml b/contrib/views/pig/src/main/resources/view.xml
index da8ffdd..a4eb44f 100644
--- a/contrib/views/pig/src/main/resources/view.xml
+++ b/contrib/views/pig/src/main/resources/view.xml
@@ -194,6 +194,13 @@
         <required>false</required>
     </parameter>
 
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim.</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
+
     <resource>
         <name>script</name>
         <plural-name>scripts</plural-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pom.xml b/contrib/views/pom.xml
index 7f02a43..ee26174 100644
--- a/contrib/views/pom.xml
+++ b/contrib/views/pom.xml
@@ -31,7 +31,8 @@
   <properties>
     <ambari.version>2.0.0.0-SNAPSHOT</ambari.version>
     <ambari.dir>${project.parent.parent.basedir}</ambari.dir>
-    <hadoop.version>2.7.1</hadoop.version>
+    <hadoop.version>2.7.3</hadoop.version>
+    <aws-java-sdk.version>1.10.6</aws-java-sdk.version>
     <views.jars.dir>views-jars</views.jars.dir>
     <views.jars.dir.rel>../target/${views.jars.dir}</views.jars.dir.rel>
   </properties>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cdd3e8a/contrib/views/utils/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/utils/pom.xml b/contrib/views/utils/pom.xml
index f2b7013..c045f50 100644
--- a/contrib/views/utils/pom.xml
+++ b/contrib/views/utils/pom.xml
@@ -31,25 +31,69 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.amazonaws</groupId>
+          <artifactId>aws-java-sdk</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-annotations</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-s3</artifactId>
+      <version>${aws-java-sdk.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -142,6 +186,20 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-azure</artifactId>
       <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>commons-validator</groupId>


Mime
View raw message