ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maha...@apache.org
Subject [2/4] AMBARI-5704. Pig View Cleanup. (mahadev)
Date Wed, 07 May 2014 19:38:56 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
index de9142f..a23f008 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
@@ -19,21 +19,16 @@
 package org.apache.ambari.view.pig.templeton.client;
 
 import com.google.gson.Gson;
-import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.api.client.WebResource;
 import com.sun.jersey.core.util.MultivaluedMapImpl;
-import org.apache.ambari.view.URLStreamProvider;
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.UriBuilder;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.StringWriter;
-import java.net.URI;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -42,172 +37,207 @@ import java.util.Map;
  * @param <RESPONSE> data type to deserialize response from JSON
  */
 public class Request<RESPONSE> {
-    protected final Class<RESPONSE> responseClass;
-    protected final ViewContext context;
-    protected final WebResource resource;
-
-    protected final Gson gson = new Gson();
-
-    protected final static Logger LOG =
-            LoggerFactory.getLogger(Request.class);
-
-    public Request(WebResource resource, Class<RESPONSE> responseClass, ViewContext context) {
-        this.resource = resource;
-        this.responseClass = responseClass;
-        this.context = context;
-    }
-
-    /**
-     * Main implementation of GET request
-     * @param resource resource
-     * @return unmarshalled response data
-     */
-    public RESPONSE get(WebResource resource) throws IOException {
-        LOG.debug("GET " + resource.toString());
-
-        InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(), "GET",
-                null, new HashMap<String, String>());
-
-        String responseJson = IOUtils.toString(inputStream);
-        LOG.debug(String.format("RESPONSE => %s", responseJson));
-        return gson.fromJson(responseJson, responseClass);
-    }
-
-    public RESPONSE get() throws IOException {
-        return get(this.resource);
-    }
-
-    public RESPONSE get(MultivaluedMapImpl params) throws IOException {
-        return get(this.resource.queryParams(params));
-    }
-
-    /**
-     * Main implementation of POST request
-     * @param resource resource
-     * @param data post body
-     * @return unmarshalled response data
-     */
-    public RESPONSE post(WebResource resource, MultivaluedMapImpl data) throws IOException {
-        LOG.debug("POST " + resource.toString());
-        LOG.debug("data: " + data.toString());
-
-        UriBuilder builder = UriBuilder.fromPath("host/");
-        for(String key : data.keySet()) {
-            for(String value : data.get(key))
-                builder.queryParam(key, value);
-        }
-
-        if (data != null)
-            LOG.debug("... data: " + builder.build().getRawQuery());
-
-        Map<String, String> headers = new HashMap<String, String>();
-        headers.put("Content-Type", "application/x-www-form-urlencoded");
-
-        InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(),
-                "POST", builder.build().getRawQuery(), headers);
-        String responseJson = IOUtils.toString(inputStream);
-
-        LOG.debug(String.format("RESPONSE => %s", responseJson));
-        return gson.fromJson(responseJson, responseClass);
-    }
-
-    public RESPONSE post(MultivaluedMapImpl data) throws IOException {
-        return post(resource, data);
-    }
-
-    public RESPONSE post() throws IOException {
-        return post(resource, new MultivaluedMapImpl());
-    }
-
-    public RESPONSE post(MultivaluedMapImpl params, MultivaluedMapImpl data) throws IOException {
-        return post(resource.queryParams(params), data);
-    }
-
-    public static void main(String[] args) {
-        UriBuilder builder = UriBuilder.fromPath("host/");
-        builder.queryParam("aa", "/tmp/.pigjobs/hue/test111_17-03-2014-16-50-37");
-        System.out.println(builder.build().getRawQuery());
-    }
-
-    /**
-     * Main implementation of PUT request
-     * @param resource resource
-     * @param data put body
-     * @return unmarshalled response data
-     */
-    public RESPONSE put(WebResource resource, MultivaluedMapImpl data) throws IOException {
-        LOG.debug("PUT " + resource.toString());
-
-        UriBuilder builder = UriBuilder.fromPath("host/");
-        for(String key : data.keySet()) {
-            for(String value : data.get(key))
-                builder.queryParam(key, value);
-        }
-
-        if (data != null)
-            LOG.debug("... data: " + builder.build().getRawQuery());
-
-        Map<String, String> headers = new HashMap<String, String>();
-        headers.put("Content-Type", "application/x-www-form-urlencoded");
-
-        InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(),
-                "PUT", builder.build().getRawQuery(), headers);
-        String responseJson = IOUtils.toString(inputStream);
-
-        LOG.debug(String.format("RESPONSE => %s", responseJson));
-        return gson.fromJson(responseJson, responseClass);
-    }
-
-    public RESPONSE put(MultivaluedMapImpl data) throws IOException {
-        return put(resource, data);
-    }
-
-    public RESPONSE put() throws IOException {
-        return put(resource, new MultivaluedMapImpl());
-    }
-
-    public RESPONSE put(MultivaluedMapImpl params, MultivaluedMapImpl data) throws IOException {
-        return put(resource.queryParams(params), data);
-    }
-
-    /**
-     * Main implementation of DELETE request
-     * @param resource resource
-     * @param data delete body
-     * @return unmarshalled response data
-     */
-    public RESPONSE delete(WebResource resource, MultivaluedMapImpl data) throws IOException {
-        LOG.debug("DELETE " + resource.toString());
-
-        UriBuilder builder = UriBuilder.fromPath("host/");
-        for(String key : data.keySet()) {
-            for(String value : data.get(key))
-                builder.queryParam(key, value);
-        }
-
-        if (data != null)
-            LOG.debug("... data: " + builder.build().getRawQuery());
-
-        Map<String, String> headers = new HashMap<String, String>();
-        headers.put("Content-Type", "application/x-www-form-urlencoded");
-
-        InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(),
-                "DELETE", builder.build().getRawQuery(), headers);
-        String responseJson = IOUtils.toString(inputStream);
-
-        LOG.debug(String.format("RESPONSE => %s", responseJson));
-        return gson.fromJson(responseJson, responseClass);
-    }
-
-    public RESPONSE delete(MultivaluedMapImpl data) throws IOException {
-        return delete(resource, data);
-    }
-
-    public RESPONSE delete() throws IOException {
-        return delete(resource, new MultivaluedMapImpl());
-    }
-
-    public RESPONSE delete(MultivaluedMapImpl params, MultivaluedMapImpl data) throws IOException {
-        return delete(resource.queryParams(params), data);
-    }
+  protected final Class<RESPONSE> responseClass;
+  protected final ViewContext context;
+  protected final WebResource resource;
+
+  protected final Gson gson = new Gson();
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(Request.class);
+
+  /**
+   * Constructor
+   * @param resource object that represents resource
+   * @param responseClass model class
+   * @param context View Context instance
+   */
+  public Request(WebResource resource, Class<RESPONSE> responseClass, ViewContext context) {
+    this.resource = resource;
+    this.responseClass = responseClass;
+    this.context = context;
+  }
+
+  /**
+   * Main implementation of GET request
+   * @param resource resource
+   * @return unmarshalled response data
+   */
+  public RESPONSE get(WebResource resource) throws IOException {
+    LOG.debug("GET " + resource.toString());
+
+    InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(), "GET",
+        null, new HashMap<String, String>());
+
+    String responseJson = IOUtils.toString(inputStream);
+    LOG.debug(String.format("RESPONSE => %s", responseJson));
+    return gson.fromJson(responseJson, responseClass);
+  }
+
+  /**
+   * Make GET request
+   * @see #get(WebResource)
+   */
+  public RESPONSE get() throws IOException {
+    return get(this.resource);
+  }
+
+  /**
+   * Make GET request
+   * @see #get(WebResource)
+   */
+  public RESPONSE get(MultivaluedMapImpl params) throws IOException {
+    return get(this.resource.queryParams(params));
+  }
+
+  /**
+   * Main implementation of POST request
+   * @param resource resource
+   * @param data post body
+   * @return unmarshalled response data
+   */
+  public RESPONSE post(WebResource resource, MultivaluedMapImpl data) throws IOException {
+    LOG.debug("POST " + resource.toString());
+    LOG.debug("data: " + data.toString());
+
+    UriBuilder builder = UriBuilder.fromPath("host/");
+    for(String key : data.keySet()) {
+      for(String value : data.get(key))
+        builder.queryParam(key, value);
+    }
+
+    if (data != null)
+      LOG.debug("... data: " + builder.build().getRawQuery());
+
+    Map<String, String> headers = new HashMap<String, String>();
+    headers.put("Content-Type", "application/x-www-form-urlencoded");
+
+    InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(),
+        "POST", builder.build().getRawQuery(), headers);
+    String responseJson = IOUtils.toString(inputStream);
+
+    LOG.debug(String.format("RESPONSE => %s", responseJson));
+    return gson.fromJson(responseJson, responseClass);
+  }
+
+  /**
+   * @see #post(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE post(MultivaluedMapImpl data) throws IOException {
+    return post(resource, data);
+  }
+
+  /**
+   * @see #post(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE post() throws IOException {
+    return post(resource, new MultivaluedMapImpl());
+  }
+
+  /**
+   * @see #post(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE post(MultivaluedMapImpl params, MultivaluedMapImpl data) throws IOException {
+    return post(resource.queryParams(params), data);
+  }
+
+  /**
+   * Main implementation of PUT request
+   * @param resource resource
+   * @param data put body
+   * @return unmarshalled response data
+   */
+  public RESPONSE put(WebResource resource, MultivaluedMapImpl data) throws IOException {
+    LOG.debug("PUT " + resource.toString());
+
+    UriBuilder builder = UriBuilder.fromPath("host/");
+    for(String key : data.keySet()) {
+      for(String value : data.get(key))
+        builder.queryParam(key, value);
+    }
+
+    if (data != null)
+      LOG.debug("... data: " + builder.build().getRawQuery());
+
+    Map<String, String> headers = new HashMap<String, String>();
+    headers.put("Content-Type", "application/x-www-form-urlencoded");
+
+    InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(),
+        "PUT", builder.build().getRawQuery(), headers);
+    String responseJson = IOUtils.toString(inputStream);
+
+    LOG.debug(String.format("RESPONSE => %s", responseJson));
+    return gson.fromJson(responseJson, responseClass);
+  }
+
+  /**
+   * @see #put(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE put(MultivaluedMapImpl data) throws IOException {
+    return put(resource, data);
+  }
+
+  /**
+   * @see #put(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE put() throws IOException {
+    return put(resource, new MultivaluedMapImpl());
+  }
+
+  /**
+   * @see #put(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE put(MultivaluedMapImpl params, MultivaluedMapImpl data) throws IOException {
+    return put(resource.queryParams(params), data);
+  }
+
+  /**
+   * Main implementation of DELETE request
+   * @param resource resource
+   * @param data delete body
+   * @return unmarshalled response data
+   */
+  public RESPONSE delete(WebResource resource, MultivaluedMapImpl data) throws IOException {
+    LOG.debug("DELETE " + resource.toString());
+
+    UriBuilder builder = UriBuilder.fromPath("host/");
+    for(String key : data.keySet()) {
+      for(String value : data.get(key))
+        builder.queryParam(key, value);
+    }
+
+    if (data != null)
+      LOG.debug("... data: " + builder.build().getRawQuery());
+
+    Map<String, String> headers = new HashMap<String, String>();
+    headers.put("Content-Type", "application/x-www-form-urlencoded");
+
+    InputStream inputStream = context.getURLStreamProvider().readFrom(resource.toString(),
+        "DELETE", builder.build().getRawQuery(), headers);
+    String responseJson = IOUtils.toString(inputStream);
+
+    LOG.debug(String.format("RESPONSE => %s", responseJson));
+    return gson.fromJson(responseJson, responseClass);
+  }
+
+  /**
+   * @see #delete(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE delete(MultivaluedMapImpl data) throws IOException {
+    return delete(resource, data);
+  }
+
+  /**
+   * @see #delete(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE delete() throws IOException {
+    return delete(resource, new MultivaluedMapImpl());
+  }
+
+  /**
+   * @see #delete(WebResource, MultivaluedMapImpl)
+   */
+  public RESPONSE delete(MultivaluedMapImpl params, MultivaluedMapImpl data) throws IOException {
+    return delete(resource.queryParams(params), data);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
index 9675a1e..4fe61cd 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
@@ -40,125 +40,163 @@ import java.util.Map;
  * Templeton Business Delegate
  */
 public class TempletonApi {
-    private final Gson gson = new Gson();
-
-    protected final static Logger LOG =
-            LoggerFactory.getLogger(TempletonApi.class);
-
-    protected WebResource service;
-    private String username;
-    private String doAs;
-    private ViewContext context;
-
-    /**
-     * TempletonApi constructor
-     * @param api dataworker.templeton_url
-     * @param username templeton username
-     * @param doAs doAs argument
-     * @param context context with URLStreamProvider
-     */
-    public TempletonApi(String api, String username, String doAs, ViewContext context) {
-        this.username = username;
-        this.doAs = doAs;
-        this.context = context;
-        ClientConfig config = new DefaultClientConfig();
-        config.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
-        Client client = Client.create(config);
-        this.service = client.resource(api);
+  private final Gson gson = new Gson();
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(TempletonApi.class);
+
+  protected WebResource service;
+  private String username;
+  private String doAs;
+  private ViewContext context;
+
+  /**
+   * TempletonApi constructor
+   * @param api dataworker.templeton_url
+   * @param username templeton username
+   * @param doAs doAs argument
+   * @param context context with URLStreamProvider
+   */
+  public TempletonApi(String api, String username, String doAs, ViewContext context) {
+    this.username = username;
+    this.doAs = doAs;
+    this.context = context;
+    ClientConfig config = new DefaultClientConfig();
+    config.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
+    Client client = Client.create(config);
+    this.service = client.resource(api);
+  }
+
+  /**
+   * @see #TempletonApi(String,String,String,ViewContext)
+   */
+  public TempletonApi(String api, String username, ViewContext context) {
+    this(api, username, username, context);
+  }
+
+  /**
+   * Create and queue a Pig job.
+   * @param execute String containing an entire, short pig program to run. (e.g. pwd)
+   * @param pigFile HDFS file name of a pig program to run. (One of either "execute" or "file" is required )
+   * @param statusDir A directory where Templeton will write the status of the Pig job. If
+   *                  provided, it is the caller's responsibility to remove this directory when done.
+   * @param arg Set a program argument. Optional None
+   * @return id A string containing the job ID similar to "job_201110132141_0001".
+   *         info A JSON object containing the information returned when the job was queued.
+   */
+  public JobData runPigQuery(String execute, File pigFile, String statusDir, String arg) throws IOException {
+    MultivaluedMapImpl data = new MultivaluedMapImpl();
+    if (execute != null)
+      data.add("execute", execute);
+    if (pigFile != null)
+      data.add("file", pigFile.toString());
+    if (statusDir != null)
+      data.add("statusdir", statusDir);
+    if (arg != null && !arg.isEmpty()) {
+      for(String arg1 : arg.split("\t")) {
+        data.add("arg", arg1);
+      }
     }
 
-    public TempletonApi(String api, String username, ViewContext context) {
-        this(api, username, username, context);
-    }
-
-    /**
-     * Create and queue a Pig job.
-     * @param execute String containing an entire, short pig program to run. (e.g. pwd)
-     * @param pigFile HDFS file name of a pig program to run. (One of either "execute" or "file" is required )
-     * @param statusDir A directory where Templeton will write the status of the Pig job. If
-     *                  provided, it is the caller's responsibility to remove this directory when done.
-     * @param arg Set a program argument. Optional None
-     * @return id A string containing the job ID similar to "job_201110132141_0001".
-     *         info A JSON object containing the information returned when the job was queued.
-     */
-    public JobData runPigQuery(String execute, File pigFile, String statusDir, String arg) throws IOException {
-        MultivaluedMapImpl data = new MultivaluedMapImpl();
-        if (execute != null)
-            data.add("execute", execute);
-        if (pigFile != null)
-            data.add("file", pigFile.toString());
-        if (statusDir != null)
-            data.add("statusdir", statusDir);
-        if (arg != null && !arg.isEmpty()) {
-            for(String arg1 : arg.split("\t")) {
-                data.add("arg", arg1);
-            }
-        }
-
-        TempletonRequest<JobData> request =
-                new TempletonRequest<JobData>(service.path("pig"), JobData.class, username, doAs, context);
-
-        return request.post(data);
-    }
-
-    public JobData runPigQuery(File pigFile, String statusDir, String arg) throws IOException {
-        return runPigQuery(null, pigFile, statusDir, arg);
-    }
-
-    public JobData runPigQuery(String execute, String statusDir, String arg) throws IOException {
-        return runPigQuery(execute, null, statusDir, arg);
-    }
-
-    public JobData runPigQuery(String execute) throws IOException {
-        return runPigQuery(execute, null, null, null);
-    }
-
-    public JobInfo checkJob(String jobId) throws IOException {
-        TempletonRequest<JobInfo> request =
-                new TempletonRequest<JobInfo>(service.path("jobs").path(jobId), JobInfo.class, username, context);
-
-        return request.get();
-    }
-
-    public void killJob(String jobId) throws IOException {
-        TempletonRequest<JobInfo> request =
-                new TempletonRequest<JobInfo>(service.path("jobs").path(jobId), JobInfo.class, username, context);
-
-        try {
-             request.delete();
-        } catch (IOException e) {
-            //TODO: remove this after HIVE-5835 resolved
-            LOG.debug("Ignoring 500 response from webhcat (see HIVE-5835)");
-        }
-    }
-
-    public Status status() throws IOException {
-        TempletonRequest<Status> request =
-                new TempletonRequest<Status>(service.path("status"), Status.class,
-                username, doAs, context);
-        return request.get();
-    }
-
-    public class Status {
-        public String status;
-        public String version;
-    }
-
-    public class JobData {
-        public String id;
-    }
-
-    public class JobInfo {
-        public Map<String, Object> status;
-        public Map<String, Object> profile;
-        public Map<String, Object> userargs;
-
-        public String id;
-        public String parentId;
-        public String percentComplete;
-        public Integer exitValue;
-        public String user;
-        public String callback;
-        public String completed;
+    TempletonRequest<JobData> request =
+        new TempletonRequest<JobData>(service.path("pig"), JobData.class, username, doAs, context);
+
+    return request.post(data);
+  }
+
+  /**
+   * @see #runPigQuery(String, java.io.File, String, String)
+   */
+  public JobData runPigQuery(File pigFile, String statusDir, String arg) throws IOException {
+    return runPigQuery(null, pigFile, statusDir, arg);
+  }
+
+  /**
+   * @see #runPigQuery(String, java.io.File, String, String)
+   */
+  public JobData runPigQuery(String execute, String statusDir, String arg) throws IOException {
+    return runPigQuery(execute, null, statusDir, arg);
+  }
+
+  /**
+   * @see #runPigQuery(String, java.io.File, String, String)
+   */
+  public JobData runPigQuery(String execute) throws IOException {
+    return runPigQuery(execute, null, null, null);
+  }
+
+  /**
+   * Get Job information
+   * @param jobId templeton job identifier
+   * @return JobInfo object
+   * @throws IOException
+   */
+  public JobInfo checkJob(String jobId) throws IOException {
+    TempletonRequest<JobInfo> request =
+        new TempletonRequest<JobInfo>(service.path("jobs").path(jobId), JobInfo.class, username, context);
+
+    return request.get();
+  }
+
+  /**
+   * Kill templeton job
+   * @param jobId templeton job identifier
+   * @throws IOException
+   */
+  public void killJob(String jobId) throws IOException {
+    TempletonRequest<JobInfo> request =
+        new TempletonRequest<JobInfo>(service.path("jobs").path(jobId), JobInfo.class, username, context);
+
+    try {
+      request.delete();
+    } catch (IOException e) {
+      //TODO: remove this after HIVE-5835 resolved
+      LOG.debug("Ignoring 500 response from webhcat (see HIVE-5835)");
     }
+  }
+
+  /**
+   * Get templeton status (version)
+   * @return templeton status
+   * @throws IOException
+   */
+  public Status status() throws IOException {
+    TempletonRequest<Status> request =
+        new TempletonRequest<Status>(service.path("status"), Status.class,
+            username, doAs, context);
+    return request.get();
+  }
+
+  /**
+   * Wrapper for json mapping of status request
+   */
+  public class Status {
+    public String status;
+    public String version;
+  }
+
+  /**
+   * Wrapper for json mapping of runPigQuery request
+   * @see #runPigQuery(String, java.io.File, String, String)
+   */
+  public class JobData {
+    public String id;
+  }
+
+  /**
+   * Wrapper for json mapping of job status
+   */
+  public class JobInfo {
+    public Map<String, Object> status;
+    public Map<String, Object> profile;
+    public Map<String, Object> userargs;
+
+    public String id;
+    public String parentId;
+    public String percentComplete;
+    public Integer exitValue;
+    public String user;
+    public String callback;
+    public String completed;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonRequest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonRequest.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonRequest.java
index 38ec211..8b8b89e 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonRequest.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonRequest.java
@@ -32,49 +32,68 @@ import java.io.IOException;
  * @param <RESPONSE> data type to deserialize response from JSON
  */
 public class TempletonRequest<RESPONSE> extends Request<RESPONSE> {
-    private String username;
-    private String doAs;
+  private String username;
+  private String doAs;
 
-    protected final static Logger LOG =
-            LoggerFactory.getLogger(TempletonRequest.class);
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(TempletonRequest.class);
 
-    public TempletonRequest(WebResource resource, Class<RESPONSE> responseClass,
-                            String username, ViewContext context) {
-        this(resource, responseClass, username, username, context);
-    }
+  /**
+   * Constructor
+   * @param resource object that represents resource
+   * @param responseClass model class
+   * @param context View Context instance
+   * @param username user.name of templeton. user.name will be equal to doAs value
+   */
+  public TempletonRequest(WebResource resource, Class<RESPONSE> responseClass,
+                          String username, ViewContext context) {
+    this(resource, responseClass, username, username, context);
+  }
 
-    public TempletonRequest(WebResource resource, Class<RESPONSE> responseClass,
-                            String username, String doAs, ViewContext context) {
-        super(resource, responseClass, context);
-        this.username = username;
-        this.doAs = doAs;
-    }
+  /**
+   * Constructor
+   * @param resource object that represents resource
+   * @param responseClass model class
+   * @param context View Context instance
+   * @param username user.name of templeton
+   * @param doAs doAs user for templeton
+   */
+  public TempletonRequest(WebResource resource, Class<RESPONSE> responseClass,
+                          String username, String doAs, ViewContext context) {
+    super(resource, responseClass, context);
+    this.username = username;
+    this.doAs = doAs;
+  }
 
-    public RESPONSE get(WebResource resource) throws IOException {
-        MultivaluedMapImpl params = new MultivaluedMapImpl();
-        params.add("user.name", username);
-        params.add("doAs", doAs);
-        return super.get(resource.queryParams(params));
-    }
+  @Override
+  public RESPONSE get(WebResource resource) throws IOException {
+    MultivaluedMapImpl params = new MultivaluedMapImpl();
+    params.add("user.name", username);
+    params.add("doAs", doAs);
+    return super.get(resource.queryParams(params));
+  }
 
-    public RESPONSE put(WebResource resource, MultivaluedMapImpl data) throws IOException {
-        MultivaluedMapImpl params = new MultivaluedMapImpl();
-        params.add("user.name", username);
-        params.add("doAs", doAs);
-        return super.put(resource.queryParams(params), data);
-    }
+  @Override
+  public RESPONSE put(WebResource resource, MultivaluedMapImpl data) throws IOException {
+    MultivaluedMapImpl params = new MultivaluedMapImpl();
+    params.add("user.name", username);
+    params.add("doAs", doAs);
+    return super.put(resource.queryParams(params), data);
+  }
 
-    public RESPONSE delete(WebResource resource, MultivaluedMapImpl data) throws IOException {
-        MultivaluedMapImpl params = new MultivaluedMapImpl();
-        params.add("user.name", username);
-        params.add("doAs", doAs);
-        return super.delete(resource.queryParams(params), data);
-    }
+  @Override
+  public RESPONSE delete(WebResource resource, MultivaluedMapImpl data) throws IOException {
+    MultivaluedMapImpl params = new MultivaluedMapImpl();
+    params.add("user.name", username);
+    params.add("doAs", doAs);
+    return super.delete(resource.queryParams(params), data);
+  }
 
-    public RESPONSE post(WebResource resource, MultivaluedMapImpl data) throws IOException {
-        MultivaluedMapImpl params = new MultivaluedMapImpl();
-        params.add("user.name", username);
-        params.add("doAs", doAs);
-        return super.post(resource.queryParams(params), data);
-    }
+  @Override
+  public RESPONSE post(WebResource resource, MultivaluedMapImpl data) throws IOException {
+    MultivaluedMapImpl params = new MultivaluedMapImpl();
+    params.add("user.name", username);
+    params.add("doAs", doAs);
+    return super.post(resource.queryParams(params), data);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
index 9312204..2d49c97 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
@@ -28,56 +28,81 @@ import java.util.Arrays;
 
 import static java.lang.Math.ceil;
 
+/**
+ * Pagination for HDFS file implementation
+ */
 public class FilePaginator {
-    private static int PAGE_SIZE = 1*1024*1024;  // 1MB
+  private static int PAGE_SIZE = 1*1024*1024;  // 1MB
 
-    private String filePath;
-    private HdfsApi hdfsApi;
+  private String filePath;
+  private HdfsApi hdfsApi;
 
-    public FilePaginator(String filePath, ViewContext context) {
-        this.filePath = filePath;
-        hdfsApi = BaseService.getHdfsApi(context);
-    }
+  /**
+   * Constructor
+   * @param filePath Path to file on HDFS
+   * @param context View Context instance
+   */
+  public FilePaginator(String filePath, ViewContext context) {
+    this.filePath = filePath;
+    hdfsApi = BaseService.getHdfsApi(context);
+  }
 
-    public static void setPageSize(int PAGE_SIZE) {
-        FilePaginator.PAGE_SIZE = PAGE_SIZE;
-    }
+  /**
+   * Set page size
+   * @param PAGE_SIZE size
+   */
+  public static void setPageSize(int PAGE_SIZE) {
+    FilePaginator.PAGE_SIZE = PAGE_SIZE;
+  }
 
-    public long pageCount() throws IOException, InterruptedException {
-        return (long)
-                ceil( hdfsApi.getFileStatus(filePath).getLen() / ((double)PAGE_SIZE) );
-    }
+  /**
+   * Get page count
+   * @return page count
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public long pageCount() throws IOException, InterruptedException {
+    return (long)
+        ceil( hdfsApi.getFileStatus(filePath).getLen() / ((double)PAGE_SIZE) );
+  }
 
-    public String readPage(long page) throws IOException, InterruptedException {
-        FSDataInputStream stream = hdfsApi.open(filePath);
-        try {
-            stream.seek(page * PAGE_SIZE);
-        } catch (IOException e) {
-            throw new IllegalArgumentException("Page " + page + " does not exists");
-        }
+  /**
+   * Read one page of size PAGE_SIZE
+   * @param page page index
+   * @return data in UTF-8
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public String readPage(long page) throws IOException, InterruptedException {
+    FSDataInputStream stream = hdfsApi.open(filePath);
+    try {
+      stream.seek(page * PAGE_SIZE);
+    } catch (IOException e) {
+      throw new IllegalArgumentException("Page " + page + " does not exists");
+    }
 
-        byte[] buffer = new byte[PAGE_SIZE];
-        int readCount = 0;
-        int read = 0;
-        while(read < PAGE_SIZE) {
-            try {
-                readCount = stream.read(buffer, read, PAGE_SIZE-read);
-            } catch (IOException e) {
-                stream.close();
-                throw e;
-            }
-            if (readCount == -1)
-                break;
-            read += readCount;
-        }
-        if (read != 0) {
-            byte[] readData = Arrays.copyOfRange(buffer, 0, read);
-            return new String(readData, Charset.forName("UTF-8"));
-        } else {
-            if (page == 0) {
-                return "";
-            }
-            throw new IllegalArgumentException("Page " + page + " does not exists");
-        }
+    byte[] buffer = new byte[PAGE_SIZE];
+    int readCount = 0;
+    int read = 0;
+    while(read < PAGE_SIZE) {
+      try {
+        readCount = stream.read(buffer, read, PAGE_SIZE-read);
+      } catch (IOException e) {
+        stream.close();
+        throw e;
+      }
+      if (readCount == -1)
+        break;
+      read += readCount;
+    }
+    if (read != 0) {
+      byte[] readData = Arrays.copyOfRange(buffer, 0, read);
+      return new String(readData, Charset.forName("UTF-8"));
+    } else {
+      if (page == 0) {
+        return "";
+      }
+      throw new IllegalArgumentException("Page " + page + " does not exists");
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java
index 9068475..3c698d2 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java
@@ -36,154 +36,226 @@ import org.json.simple.JSONArray;
 
 import java.util.LinkedHashMap;
 
+/**
+ * HDFS Business Delegate
+ */
 public class HdfsApi {
-    private Configuration conf = new Configuration();
-
-    private FileSystem fs;
-    private UserGroupInformation ugi;
-
-    public HdfsApi(String defaultFs, String username) throws IOException,
-        InterruptedException {
-        Thread.currentThread().setContextClassLoader(null);
-        conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
-        conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
-        conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
-        fs = FileSystem.get(URI.create(defaultFs), conf);
-        ugi = UserGroupInformation.createProxyUser(username,
-            UserGroupInformation.getLoginUser());
-    }
+  private Configuration conf = new Configuration();
 
-    public FileStatus[] listdir(final String path) throws FileNotFoundException,
-        IOException, InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
-            public FileStatus[] run() throws FileNotFoundException, Exception {
-                return fs.listStatus(new Path(path));
-            }
-        });
-    }
+  private FileSystem fs;
+  private UserGroupInformation ugi;
 
-    public FileStatus getFileStatus(final String path) throws IOException,
-        FileNotFoundException, InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
-            public FileStatus run() throws FileNotFoundException, IOException {
-                return fs.getFileStatus(new Path(path));
-            }
-        });
-    }
+  /**
+   * Constructor
+   * @param defaultFs hdfs uri
+   * @param username user.name
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public HdfsApi(String defaultFs, String username) throws IOException,
+      InterruptedException {
+    Thread.currentThread().setContextClassLoader(null);
+    conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
+    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
+    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
+    fs = FileSystem.get(URI.create(defaultFs), conf);
+    ugi = UserGroupInformation.createProxyUser(username,
+        UserGroupInformation.getLoginUser());
+  }
 
-    public boolean mkdir(final String path) throws IOException,
-        InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-            public Boolean run() throws Exception {
-                return fs.mkdirs(new Path(path));
-            }
-        });
-    }
+  /**
+   * List dir operation
+   * @param path path
+   * @return array of FileStatus objects
+   * @throws FileNotFoundException
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public FileStatus[] listdir(final String path) throws FileNotFoundException,
+      IOException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
+      public FileStatus[] run() throws FileNotFoundException, Exception {
+        return fs.listStatus(new Path(path));
+      }
+    });
+  }
 
-    public boolean rename(final String src, final String dst) throws IOException,
-        InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-            public Boolean run() throws Exception {
-                return fs.rename(new Path(src), new Path(dst));
-            }
-        });
-    }
+  /**
+   * Get file status
+   * @param path path
+   * @return file status
+   * @throws IOException
+   * @throws FileNotFoundException
+   * @throws InterruptedException
+   */
+  public FileStatus getFileStatus(final String path) throws IOException,
+      FileNotFoundException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
+      public FileStatus run() throws FileNotFoundException, IOException {
+        return fs.getFileStatus(new Path(path));
+      }
+    });
+  }
 
-    public boolean delete(final String path, final boolean recursive)
-        throws IOException, InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-            public Boolean run() throws Exception {
-                return fs.delete(new Path(path), recursive);
-            }
-        });
-    }
+  /**
+   * Make directory
+   * @param path path
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean mkdir(final String path) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return fs.mkdirs(new Path(path));
+      }
+    });
+  }
 
-    public FSDataOutputStream create(final String path, final boolean overwrite)
-        throws IOException, InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
-            public FSDataOutputStream run() throws Exception {
-                return fs.create(new Path(path), overwrite);
-            }
-        });
-    }
+  /**
+   * Rename
+   * @param src source path
+   * @param dst destination path
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean rename(final String src, final String dst) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return fs.rename(new Path(src), new Path(dst));
+      }
+    });
+  }
 
-    public FSDataInputStream open(final String path) throws IOException,
-        InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
-            public FSDataInputStream run() throws Exception {
-                return fs.open(new Path(path));
-            }
-        });
-    }
+  /**
+   * Delete
+   * @param path path
+   * @param recursive delete recursive
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean delete(final String path, final boolean recursive)
+      throws IOException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return fs.delete(new Path(path), recursive);
+      }
+    });
+  }
 
-    public boolean copy(final String src, final String dest) throws IOException,
-            InterruptedException {
-        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-            public Boolean run() throws Exception {
-                return FileUtil.copy(fs, new Path(src), fs, new Path(dest), false, conf);
-            }
-        });
-    }
+  /**
+   * Create file
+   * @param path path
+   * @param overwrite overwrite existent file
+   * @return output stream
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public FSDataOutputStream create(final String path, final boolean overwrite)
+      throws IOException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
+      public FSDataOutputStream run() throws Exception {
+        return fs.create(new Path(path), overwrite);
+      }
+    });
+  }
 
-    /**
-     * Converts a Hadoop permission into a Unix permission symbolic representation
-     * (i.e. -rwxr--r--) or default if the permission is NULL.
-     *
-     * @param p
-     *          Hadoop permission.
-     * @return the Unix permission symbolic representation or default if the
-     *         permission is NULL.
-     */
-    private static String permissionToString(FsPermission p) {
-        return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
-            + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
-    }
+  /**
+   * Open file
+   * @param path path
+   * @return input stream
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public FSDataInputStream open(final String path) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
+      public FSDataInputStream run() throws Exception {
+        return fs.open(new Path(path));
+      }
+    });
+  }
 
-    /**
-     * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
-     * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-     * specified URL.
-     * <p/>
-     *
-     * @param status
-     *          Hadoop file status.
-     * @return The JSON representation of the file status.
-     */
-
-    public static Map<String, Object> fileStatusToJSON(FileStatus status) {
-        Map<String, Object> json = new LinkedHashMap<String, Object>();
-        json.put("path", status.getPath().toString());
-        json.put("isDirectory", status.isDirectory());
-        json.put("len", status.getLen());
-        json.put("owner", status.getOwner());
-        json.put("group", status.getGroup());
-        json.put("permission", permissionToString(status.getPermission()));
-        json.put("accessTime", status.getAccessTime());
-        json.put("modificationTime", status.getModificationTime());
-        json.put("blockSize", status.getBlockSize());
-        json.put("replication", status.getReplication());
-        return json;
-    }
+  /**
+   * Copy file
+   * @param src source path
+   * @param dest destination path
+   * @return success
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public boolean copy(final String src, final String dest) throws IOException,
+      InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+      public Boolean run() throws Exception {
+        return FileUtil.copy(fs, new Path(src), fs, new Path(dest), false, conf);
+      }
+    });
+  }
+
+  /**
+   * Converts a Hadoop permission into a Unix permission symbolic representation
+   * (i.e. -rwxr--r--) or default if the permission is NULL.
+   *
+   * @param p
+   *          Hadoop permission.
+   * @return the Unix permission symbolic representation or default if the
+   *         permission is NULL.
+   */
+  private static String permissionToString(FsPermission p) {
+    return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
+        + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
+  }
+
+  /**
+   * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
+   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+   * specified URL.
+   * <p/>
+   *
+   * @param status
+   *          Hadoop file status.
+   * @return The JSON representation of the file status.
+   */
+
+  public static Map<String, Object> fileStatusToJSON(FileStatus status) {
+    Map<String, Object> json = new LinkedHashMap<String, Object>();
+    json.put("path", status.getPath().toString());
+    json.put("isDirectory", status.isDirectory());
+    json.put("len", status.getLen());
+    json.put("owner", status.getOwner());
+    json.put("group", status.getGroup());
+    json.put("permission", permissionToString(status.getPermission()));
+    json.put("accessTime", status.getAccessTime());
+    json.put("modificationTime", status.getModificationTime());
+    json.put("blockSize", status.getBlockSize());
+    json.put("replication", status.getReplication());
+    return json;
+  }
 
-    /**
-     * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
-     * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-     * specified URL.
-     * <p/>
-     *
-     * @param status
-     *          Hadoop file status array.
-     * @return The JSON representation of the file status array.
-     */
-    @SuppressWarnings("unchecked")
-    public static JSONArray fileStatusToJSON(FileStatus[] status) {
-        JSONArray json = new JSONArray();
-        if (status != null) {
-            for (FileStatus s : status) {
-                json.add(fileStatusToJSON(s));
-            }
-        }
-        return json;
+  /**
+   * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
+   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+   * specified URL.
+   * <p/>
+   *
+   * @param status
+   *          Hadoop file status array.
+   * @return The JSON representation of the file status array.
+   */
+  @SuppressWarnings("unchecked")
+  public static JSONArray fileStatusToJSON(FileStatus[] status) {
+    JSONArray json = new JSONArray();
+    if (status != null) {
+      for (FileStatus s : status) {
+        json.add(fileStatusToJSON(s));
+      }
     }
+    return json;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
index 7a237d7..7352157 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
@@ -33,64 +33,64 @@ import java.util.Map;
 import static org.easymock.EasyMock.*;
 
 public abstract class BasePigTest {
-    protected ViewResourceHandler handler;
-    protected ViewContext context;
-    protected static File pigStorageFile;
-    protected static File baseDir;
+  protected ViewResourceHandler handler;
+  protected ViewContext context;
+  protected static File pigStorageFile;
+  protected static File baseDir;
 
-    protected static String DATA_DIRECTORY = "./target/PigTest";
+  protected static String DATA_DIRECTORY = "./target/PigTest";
 
-    @BeforeClass
-    public static void startUp() throws Exception {
-        File baseDir = new File(DATA_DIRECTORY)
-                .getAbsoluteFile();
-        FileUtil.fullyDelete(baseDir);
-    }
+  @BeforeClass
+  public static void startUp() throws Exception {
+    File baseDir = new File(DATA_DIRECTORY)
+        .getAbsoluteFile();
+    FileUtil.fullyDelete(baseDir);
+  }
 
-    @Before
-    public void setUp() throws Exception {
-        handler = createNiceMock(ViewResourceHandler.class);
-        context = createNiceMock(ViewContext.class);
+  @Before
+  public void setUp() throws Exception {
+    handler = createNiceMock(ViewResourceHandler.class);
+    context = createNiceMock(ViewContext.class);
 
-        Map<String, String> properties = new HashMap<String, String>();
-        baseDir = new File(DATA_DIRECTORY)
-                .getAbsoluteFile();
-        pigStorageFile = new File("./target/BasePigTest/storage.dat")
-                .getAbsoluteFile();
+    Map<String, String> properties = new HashMap<String, String>();
+    baseDir = new File(DATA_DIRECTORY)
+        .getAbsoluteFile();
+    pigStorageFile = new File("./target/BasePigTest/storage.dat")
+        .getAbsoluteFile();
 
-        properties.put("dataworker.storagePath", pigStorageFile.toString());
-        properties.put("dataworker.templeton_url", "localhost:50111/templeton/v1");
-        properties.put("dataworker.templeton_user", "admin");
-        properties.put("dataworker.userScriptsPath", "/tmp/.pigscripts");
-        properties.put("dataworker.pigJobsPath", "/tmp/.pigjobs");
+    properties.put("dataworker.storagePath", pigStorageFile.toString());
+    properties.put("dataworker.templeton_url", "localhost:50111/templeton/v1");
+    properties.put("dataworker.templeton_user", "admin");
+    properties.put("dataworker.userScriptsPath", "/tmp/.pigscripts");
+    properties.put("dataworker.pigJobsPath", "/tmp/.pigjobs");
 
-        setupProperties(properties, baseDir);
+    setupProperties(properties, baseDir);
 
-        expect(context.getProperties()).andReturn(properties).anyTimes();
-        expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
+    expect(context.getProperties()).andReturn(properties).anyTimes();
+    expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
 
-        replay(handler, context);
-    }
+    replay(handler, context);
+  }
 
-    protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
+  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
 
-    }
+  }
 
-    @After
-    public void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
 
-    }
+  }
 
-    protected static <T> T getService(Class<T> clazz,
+  protected static <T> T getService(Class<T> clazz,
                                     final ViewResourceHandler viewResourceHandler,
                                     final ViewContext viewInstanceContext) {
-        Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() {
-            @Override
-            protected void configure() {
-                bind(ViewResourceHandler.class).toInstance(viewResourceHandler);
-                bind(ViewContext.class).toInstance(viewInstanceContext);
-            }
-        });
-        return viewInstanceInjector.getInstance(clazz);
-    }
+    Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(ViewResourceHandler.class).toInstance(viewResourceHandler);
+        bind(ViewContext.class).toInstance(viewInstanceContext);
+      }
+    });
+    return viewInstanceInjector.getInstance(clazz);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/HDFSTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/HDFSTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/HDFSTest.java
index 214c6ef..85a67f6 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/HDFSTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/HDFSTest.java
@@ -28,33 +28,33 @@ import java.io.File;
 import java.util.Map;
 
 public abstract class HDFSTest extends BasePigTest {
-    protected static MiniDFSCluster hdfsCluster;
-    protected static String hdfsURI;
-
-    @BeforeClass
-    public static void startUp() throws Exception {
-        BasePigTest.startUp(); // super
-        File hdfsDir = new File("./target/PigTest/hdfs/")
-                .getAbsoluteFile();
-        FileUtil.fullyDelete(hdfsDir);
-
-        Configuration conf = new Configuration();
-        conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDir.getAbsolutePath());
-
-        MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
-        hdfsCluster = builder.build();
-        hdfsURI = hdfsCluster.getURI().toString();
-    }
-
-    @AfterClass
-    public static void shutDown() throws Exception {
-        hdfsCluster.shutdown();
-        hdfsCluster = null;
-    }
-
-    @Override
-    protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
-        super.setupProperties(properties, baseDir);
-        properties.put("dataworker.defaultFs", hdfsURI);
-    }
+  protected static MiniDFSCluster hdfsCluster;
+  protected static String hdfsURI;
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    BasePigTest.startUp(); // super
+    File hdfsDir = new File("./target/PigTest/hdfs/")
+        .getAbsoluteFile();
+    FileUtil.fullyDelete(hdfsDir);
+
+    Configuration conf = new Configuration();
+    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDir.getAbsolutePath());
+
+    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+    hdfsCluster = builder.build();
+    hdfsURI = hdfsCluster.getURI().toString();
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    hdfsCluster.shutdown();
+    hdfsCluster = null;
+  }
+
+  @Override
+  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
+    super.setupProperties(properties, baseDir);
+    properties.put("dataworker.defaultFs", hdfsURI);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
index 1e78ee6..3ebb4f4 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
@@ -36,168 +36,168 @@ import java.util.UUID;
 import static org.easymock.EasyMock.*;
 
 public class FileTest extends HDFSTest {
-    private final static int PAGINATOR_PAGE_SIZE = 4;
-    private FileService fileService;
-
-    @Override
-    @Before
-    public void setUp() throws Exception {
-        super.setUp();
-        fileService = getService(FileService.class, handler, context);
-        FilePaginator.setPageSize(PAGINATOR_PAGE_SIZE);
-    }
-
-    @BeforeClass
-    public static void startUp() throws Exception {
-        HDFSTest.startUp(); // super
-    }
-
-    @AfterClass
-    public static void shutDown() throws Exception {
-        HDFSTest.shutDown(); // super
-        FileService.setHdfsApi(null); //cleanup API connection
-    }
-
-    private Response doCreateFile() throws IOException, InterruptedException {
-        replay(handler, context);
-        return doCreateFile("luke", "i'm your father");
-    }
-
-    private Response doCreateFile(String name, String content) throws IOException, InterruptedException {
-        return doCreateFile(name, content, "/tmp/");
-    }
-
-    private Response doCreateFile(String name, String content, String filePath) throws IOException, InterruptedException {
-        FileService.FileResourceRequest request = new FileService.FileResourceRequest();
-        request.file = new FileResource();
-        request.file.filePath = filePath + name;
-        request.file.fileContent = content;
-
-        HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
-        resp_obj.setHeader(eq("Location"), anyString());
-
-        UriInfo uriInfo = createNiceMock(UriInfo.class);
-        URI uri = UriBuilder.fromUri("http://host/a/b").build();
-        expect(uriInfo.getAbsolutePath()).andReturn(uri);
-
-        replay(resp_obj, uriInfo);
-        return fileService.createFile(request, resp_obj, uriInfo);
-    }
-
-    @Test
-    public void testCreateFile() throws IOException, InterruptedException {
-        String name = UUID.randomUUID().toString().replaceAll("-", "");
-        Response response = doCreateFile(name, "12323");
-        Assert.assertEquals(204, response.getStatus());
-
-        String name2 = UUID.randomUUID().toString().replaceAll("-", "");
-        Response response2 = doCreateFile(name2, "12323");
-        Assert.assertEquals(204, response2.getStatus());
-    }
-
-    @Test
-    public void testCreateFilePathNotExists() throws IOException, InterruptedException {
-        Response response = doCreateFile("Luke", null, "/non/existent/path/");
-        Assert.assertEquals(204, response.getStatus());  // path created automatically
-
-        Response response2 = doCreateFile("Leia", null, "/tmp/");
-        Assert.assertEquals(204, response2.getStatus());
-
-        Response response3 = doCreateFile("Leia", null, "/tmp/"); // file already exists
-        Assert.assertEquals(400, response3.getStatus());
-    }
-
-    @Test
-    public void testUpdateFileContent() throws Exception {
-        String name = UUID.randomUUID().toString().replaceAll("-", "");
-        String filePath = "/tmp/" + name;
-
-        Response createdFile = doCreateFile(name, "some content");
-        FileService.FileResourceRequest request = new FileService.FileResourceRequest();
-        request.file = new FileResource();
-        request.file.filePath = filePath;
-        request.file.fileContent = "1234567890";  // 10 bytes, 3*(4b page)
-
-        Response response = fileService.updateFile(request, filePath);
-        Assert.assertEquals(204, response.getStatus());
-
-        Response response2 = fileService.getFile(filePath, 0L);
-        Assert.assertEquals(200, response2.getStatus());
-
-        JSONObject obj = ((JSONObject) response2.getEntity());
-        Assert.assertTrue(obj.containsKey("file"));
-        Assert.assertEquals("1234", ((FileResource) obj.get("file")).fileContent);
-    }
-
-    @Test
-    public void testPagination() throws Exception {
-        String name = UUID.randomUUID().toString().replaceAll("-", "");
-        String filePath = "/tmp/" + name;
-
-        doCreateFile(name, "1234567890");
-
-        Response response = fileService.getFile(filePath, 0L);
-        Assert.assertEquals(200, response.getStatus());
-
-        JSONObject obj = ((JSONObject) response.getEntity());
-        Assert.assertTrue(obj.containsKey("file"));
-        Assert.assertEquals("1234", ((FileResource) obj.get("file")).fileContent);
-        Assert.assertEquals(3, ((FileResource) obj.get("file")).pageCount);
-        Assert.assertEquals(0, ((FileResource) obj.get("file")).page);
-        Assert.assertTrue(((FileResource) obj.get("file")).hasNext);
-        Assert.assertEquals(filePath, ((FileResource) obj.get("file")).filePath);
-
-        response = fileService.getFile(filePath, 1L);
-        Assert.assertEquals(200, response.getStatus());
-
-        obj = ((JSONObject) response.getEntity());
-        Assert.assertEquals("5678", ((FileResource) obj.get("file")).fileContent);
-        Assert.assertEquals(1, ((FileResource) obj.get("file")).page);
-        Assert.assertTrue(((FileResource) obj.get("file")).hasNext);
-
-        response = fileService.getFile(filePath, 2L);
-        Assert.assertEquals(200, response.getStatus());
-
-        obj = ((JSONObject) response.getEntity());
-        Assert.assertEquals("90", ((FileResource) obj.get("file")).fileContent);
-        Assert.assertEquals(2, ((FileResource) obj.get("file")).page);
-        Assert.assertFalse(((FileResource) obj.get("file")).hasNext);
-
-        response = fileService.getFile(filePath, 3L);
-        Assert.assertEquals(400, response.getStatus()); //page not found
-    }
-
-    @Test
-    public void testZeroLengthFile() throws Exception {
-        String name = UUID.randomUUID().toString().replaceAll("-", "");
-        String filePath = "/tmp/" + name;
-
-        doCreateFile(name, "");
-
-        Response response = fileService.getFile(filePath, 0L);
-        Assert.assertEquals(200, response.getStatus());
-        JSONObject obj = ((JSONObject) response.getEntity());
-        Assert.assertEquals("", ((FileResource) obj.get("file")).fileContent);
-        Assert.assertEquals(0, ((FileResource) obj.get("file")).page);
-        Assert.assertFalse(((FileResource) obj.get("file")).hasNext);
-    }
-
-    @Test
-    public void testFileNotFound() throws IOException, InterruptedException {
-        Response response1 = fileService.getFile("/tmp/notExistentFile", 2L);
-        Assert.assertEquals(404, response1.getStatus());
-    }
-
-    @Test
-    public void testDeleteFile() throws IOException, InterruptedException {
-        String name = UUID.randomUUID().toString().replaceAll("-", "");
-        String filePath = "/tmp/" + name;
-        Response createdFile = doCreateFile(name, "some content");
-
-        Response response = fileService.deleteFile(filePath);
-        Assert.assertEquals(204, response.getStatus());
-
-        Response response2 = fileService.getFile(filePath, 0L);
-        Assert.assertEquals(404, response2.getStatus());
-    }
+  private final static int PAGINATOR_PAGE_SIZE = 4;
+  private FileService fileService;
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    fileService = getService(FileService.class, handler, context);
+    FilePaginator.setPageSize(PAGINATOR_PAGE_SIZE);
+  }
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    HDFSTest.startUp(); // super
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    HDFSTest.shutDown(); // super
+    FileService.setHdfsApi(null); //cleanup API connection
+  }
+
+  private Response doCreateFile() throws IOException, InterruptedException {
+    replay(handler, context);
+    return doCreateFile("luke", "i'm your father");
+  }
+
+  private Response doCreateFile(String name, String content) throws IOException, InterruptedException {
+    return doCreateFile(name, content, "/tmp/");
+  }
+
+  private Response doCreateFile(String name, String content, String filePath) throws IOException, InterruptedException {
+    FileService.FileResourceRequest request = new FileService.FileResourceRequest();
+    request.file = new FileResource();
+    request.file.setFilePath(filePath + name);
+    request.file.setFileContent(content);
+
+    HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
+    resp_obj.setHeader(eq("Location"), anyString());
+
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+    URI uri = UriBuilder.fromUri("http://host/a/b").build();
+    expect(uriInfo.getAbsolutePath()).andReturn(uri);
+
+    replay(resp_obj, uriInfo);
+    return fileService.createFile(request, resp_obj, uriInfo);
+  }
+
+  @Test
+  public void testCreateFile() throws IOException, InterruptedException {
+    String name = UUID.randomUUID().toString().replaceAll("-", "");
+    Response response = doCreateFile(name, "12323");
+    Assert.assertEquals(204, response.getStatus());
+
+    String name2 = UUID.randomUUID().toString().replaceAll("-", "");
+    Response response2 = doCreateFile(name2, "12323");
+    Assert.assertEquals(204, response2.getStatus());
+  }
+
+  @Test
+  public void testCreateFilePathNotExists() throws IOException, InterruptedException {
+    Response response = doCreateFile("Luke", null, "/non/existent/path/");
+    Assert.assertEquals(204, response.getStatus());  // path created automatically
+
+    Response response2 = doCreateFile("Leia", null, "/tmp/");
+    Assert.assertEquals(204, response2.getStatus());
+
+    Response response3 = doCreateFile("Leia", null, "/tmp/"); // file already exists
+    Assert.assertEquals(400, response3.getStatus());
+  }
+
+  @Test
+  public void testUpdateFileContent() throws Exception {
+    String name = UUID.randomUUID().toString().replaceAll("-", "");
+    String filePath = "/tmp/" + name;
+
+    Response createdFile = doCreateFile(name, "some content");
+    FileService.FileResourceRequest request = new FileService.FileResourceRequest();
+    request.file = new FileResource();
+    request.file.setFilePath(filePath);
+    request.file.setFileContent("1234567890");  // 10 bytes, 3*(4b page)
+
+    Response response = fileService.updateFile(request, filePath);
+    Assert.assertEquals(204, response.getStatus());
+
+    Response response2 = fileService.getFile(filePath, 0L);
+    Assert.assertEquals(200, response2.getStatus());
+
+    JSONObject obj = ((JSONObject) response2.getEntity());
+    Assert.assertTrue(obj.containsKey("file"));
+    Assert.assertEquals("1234", ((FileResource) obj.get("file")).getFileContent());
+  }
+
+  @Test
+  public void testPagination() throws Exception {
+    String name = UUID.randomUUID().toString().replaceAll("-", "");
+    String filePath = "/tmp/" + name;
+
+    doCreateFile(name, "1234567890");
+
+    Response response = fileService.getFile(filePath, 0L);
+    Assert.assertEquals(200, response.getStatus());
+
+    JSONObject obj = ((JSONObject) response.getEntity());
+    Assert.assertTrue(obj.containsKey("file"));
+    Assert.assertEquals("1234", ((FileResource) obj.get("file")).getFileContent());
+    Assert.assertEquals(3, ((FileResource) obj.get("file")).getPageCount());
+    Assert.assertEquals(0, ((FileResource) obj.get("file")).getPage());
+    Assert.assertTrue(((FileResource) obj.get("file")).isHasNext());
+    Assert.assertEquals(filePath, ((FileResource) obj.get("file")).getFilePath());
+
+    response = fileService.getFile(filePath, 1L);
+    Assert.assertEquals(200, response.getStatus());
+
+    obj = ((JSONObject) response.getEntity());
+    Assert.assertEquals("5678", ((FileResource) obj.get("file")).getFileContent());
+    Assert.assertEquals(1, ((FileResource) obj.get("file")).getPage());
+    Assert.assertTrue(((FileResource) obj.get("file")).isHasNext());
+
+    response = fileService.getFile(filePath, 2L);
+    Assert.assertEquals(200, response.getStatus());
+
+    obj = ((JSONObject) response.getEntity());
+    Assert.assertEquals("90", ((FileResource) obj.get("file")).getFileContent());
+    Assert.assertEquals(2, ((FileResource) obj.get("file")).getPage());
+    Assert.assertFalse(((FileResource) obj.get("file")).isHasNext());
+
+    response = fileService.getFile(filePath, 3L);
+    Assert.assertEquals(400, response.getStatus()); //page not found
+  }
+
+  @Test
+  public void testZeroLengthFile() throws Exception {
+    String name = UUID.randomUUID().toString().replaceAll("-", "");
+    String filePath = "/tmp/" + name;
+
+    doCreateFile(name, "");
+
+    Response response = fileService.getFile(filePath, 0L);
+    Assert.assertEquals(200, response.getStatus());
+    JSONObject obj = ((JSONObject) response.getEntity());
+    Assert.assertEquals("", ((FileResource) obj.get("file")).getFileContent());
+    Assert.assertEquals(0, ((FileResource) obj.get("file")).getPage());
+    Assert.assertFalse(((FileResource) obj.get("file")).isHasNext());
+  }
+
+  @Test
+  public void testFileNotFound() throws IOException, InterruptedException {
+    Response response1 = fileService.getFile("/tmp/notExistentFile", 2L);
+    Assert.assertEquals(404, response1.getStatus());
+  }
+
+  @Test
+  public void testDeleteFile() throws IOException, InterruptedException {
+    String name = UUID.randomUUID().toString().replaceAll("-", "");
+    String filePath = "/tmp/" + name;
+    Response createdFile = doCreateFile(name, "some content");
+
+    Response response = fileService.deleteFile(filePath);
+    Assert.assertEquals(204, response.getStatus());
+
+    Response response2 = fileService.getFile(filePath, 0L);
+    Assert.assertEquals(404, response2.getStatus());
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
index 5d633dc..167317d 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
@@ -27,33 +27,33 @@ import org.junit.*;
 import javax.ws.rs.core.Response;
 
 public class HelpTest extends HDFSTest {
-    private HelpService helpService;
-
-    @Override
-    @Before
-    public void setUp() throws Exception {
-        super.setUp();
-        helpService = new HelpService(context, handler);
-    }
-
-    @BeforeClass
-    public static void startUp() throws Exception {
-        HDFSTest.startUp(); // super
-    }
-
-    @AfterClass
-    public static void shutDown() throws Exception {
-        HDFSTest.shutDown(); // super
-        FileService.setHdfsApi(null); //cleanup API connection
-    }
-
-    @Test
-    public void configTest() {
-        Response response = helpService.config();
-        Assert.assertEquals(200, response.getStatus());
-
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(obj.containsKey("dataworker.defaultFs"));
-        Assert.assertEquals(hdfsURI, obj.get("dataworker.defaultFs"));
-    }
+  private HelpService helpService;
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    helpService = new HelpService(context, handler);
+  }
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    HDFSTest.startUp(); // super
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    HDFSTest.shutDown(); // super
+    FileService.setHdfsApi(null); //cleanup API connection
+  }
+
+  @Test
+  public void configTest() {
+    Response response = helpService.config();
+    Assert.assertEquals(200, response.getStatus());
+
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(obj.containsKey("dataworker.defaultFs"));
+    Assert.assertEquals(hdfsURI, obj.get("dataworker.defaultFs"));
+  }
 }


Mime
View raw message