ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maha...@apache.org
Subject [1/4] AMBARI-5704. Pig View Cleanup. (mahadev)
Date Wed, 07 May 2014 19:38:55 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-1.6.0 40e5707bd -> c64261e26


http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
index a8a8c34..6cf4bd7 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
@@ -38,373 +38,373 @@ import java.util.HashMap;
 import static org.easymock.EasyMock.*;
 
 public class JobTest extends BasePigTest {
-    private JobService jobService;
-
-    @Override
-    @Before
-    public void setUp() throws Exception {
-        super.setUp();
-        jobService = getService(JobService.class, handler, context);
-    }
-
-    @Override
-    @After
-    public void tearDown() throws Exception {
-        super.tearDown();
-        jobService.getResourceManager().setTempletonApi(null);
-        JobService.setHdfsApi(null);
-    }
-
-    private Response doCreateJob(String title, String pigScript, String templetonArguments)
{
-        return doCreateJob(title, pigScript, templetonArguments, null);
-    }
-
-    private Response doCreateJob(String title, String pigScript, String templetonArguments,
String forcedContent) {
-        JobService.PigJobRequest request = new JobService.PigJobRequest();
-        request.job = new PigJob();
-        request.job.setTitle(title);
-        request.job.setPigScript(pigScript);
-        request.job.setTempletonArguments(templetonArguments);
-        request.job.setForcedContent(forcedContent);
-
-        UriInfo uriInfo = createNiceMock(UriInfo.class);
-        URI uri = UriBuilder.fromUri("http://host/a/b").build();
-        expect(uriInfo.getAbsolutePath()).andReturn(uri);
-
-        HttpServletResponse resp_obj = createStrictMock(HttpServletResponse.class);
-
-        resp_obj.setHeader(eq("Location"), anyString());
-
-        replay(uriInfo, resp_obj);
-        return jobService.runJob(request, resp_obj, uriInfo);
-    }
-
-    @Test
-    public void testSubmitJob() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
-
-        Assert.assertEquals("-useHCatalog", do_stream.toString());
-        Assert.assertEquals(201, response.getStatus());
-
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(obj.containsKey("job"));
-        Assert.assertNotNull(((PigJob) obj.get("job")).getId());
-        Assert.assertFalse(((PigJob) obj.get("job")).getId().isEmpty());
-        Assert.assertTrue(((PigJob) obj.get("job")).getStatusDir().startsWith("/tmp/.pigjobs/admin/test"));
-
-        PigJob job = ((PigJob) obj.get("job"));
-        Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
-        Assert.assertTrue(job.isInProgress());
-    }
-
-    @Test
-    public void testSubmitJobNoArguments() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        expect(api.runPigQuery((File) anyObject(), anyString(), (String) isNull())).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob("Test", "/tmp/script.pig", null);
-
-        Assert.assertEquals("", do_stream.toString());
-        Assert.assertEquals(201, response.getStatus());
-
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(obj.containsKey("job"));
-        Assert.assertNotNull(((PigJob) obj.get("job")).getId());
-        Assert.assertFalse(((PigJob) obj.get("job")).getId().isEmpty());
-        Assert.assertTrue(((PigJob) obj.get("job")).getStatusDir().startsWith("/tmp/.pigjobs/admin/test"));
-
-        PigJob job = ((PigJob) obj.get("job"));
-        Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
-        Assert.assertTrue(job.isInProgress());
-    }
-
-    @Test
-    public void testSubmitJobNoFile() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob("Test", null, "-useHCatalog");
-        Assert.assertEquals(400, response.getStatus());
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(((String)obj.get("message")).contains("No pigScript file or forcedContent
specifed;"));
-    }
-
-    @Test
-    public void testSubmitJobForcedContent() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-
-        ByteArrayOutputStream baScriptStream = new ByteArrayOutputStream();
-        ByteArrayOutputStream baTempletonArgsStream = new ByteArrayOutputStream();
-
-        FSDataOutputStream scriptStream = new FSDataOutputStream(baScriptStream);
-        FSDataOutputStream templetonArgsStream = new FSDataOutputStream(baTempletonArgsStream);
-        expect(hdfsApi.create(endsWith("script.pig"), eq(true))).andReturn(scriptStream);
-        expect(hdfsApi.create(endsWith("params"), eq(true))).andReturn(templetonArgsStream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob("Test", null, "-useHCatalog", "pwd");  // with forcedContent
-        Assert.assertEquals(201, response.getStatus());
-        Assert.assertEquals("-useHCatalog", baTempletonArgsStream.toString());
-        Assert.assertEquals("pwd", baScriptStream.toString());
-    }
-
-    @Test
-    public void testSubmitJobNoTitle() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob(null, "/tmp/1.pig", "-useHCatalog");
-        Assert.assertEquals(400, response.getStatus());
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(((String)obj.get("message")).contains("No title specifed"));
-    }
-
-    @Test
-    public void testSubmitJobFailed() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(false);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
-        Assert.assertEquals(500, response.getStatus());
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(((String)obj.get("message")).contains("Can't copy"));
-    }
-
-    @Test
-    public void testSubmitJobTempletonError() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        // Templeton returns 500 e.g.
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andThrow(new
IOException());
-        replay(api);
-
-        Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
-        Assert.assertEquals(500, response.getStatus());
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(((String) obj.get("message")).contains("Templeton"));
-    }
-
-    @Test
-    public void testKillJob() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createStrictMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        data.id = "job_id_##";
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
-        Assert.assertEquals(201, response.getStatus());
-
-        reset(api);
-        api.killJob(eq("job_id_##"));
-        replay(api);
-        JSONObject obj = (JSONObject)response.getEntity();
-        PigJob job = ((PigJob)obj.get("job"));
-        response = jobService.killJob(job.getId());
-        Assert.assertEquals(204, response.getStatus());
-    }
-
-    @Test
-    public void testJobStatusFlow() throws Exception {
-        HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-        expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
-
-        ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
-
-        FSDataOutputStream stream = new FSDataOutputStream(do_stream);
-        expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
-        replay(hdfsApi);
-        JobService.setHdfsApi(hdfsApi);
-
-        TempletonApi api = createNiceMock(TempletonApi.class);
-        jobService.getResourceManager().setTempletonApi(api);
-        TempletonApi.JobData data = api.new JobData();
-        data.id = "job_id_#";
-        expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
-        replay(api);
-
-        Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
-
-        Assert.assertEquals("-useHCatalog", do_stream.toString());
-        Assert.assertEquals(201, response.getStatus());
-
-        PigJob job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
-        Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
-        Assert.assertTrue(job.isInProgress());
-
-        // Retrieve status:
-        // SUBMITTED
-        reset(api);
-        TempletonApi.JobInfo info = api.new JobInfo();
-        expect(api.checkJob(eq("job_id_#"))).andReturn(info);
-        replay(api);
-        response = jobService.getJob(job.getId());
-        Assert.assertEquals(200, response.getStatus());
-        job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
-        Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
-
-        // RUNNING
-        reset(api);
-        info = api.new JobInfo();
-        info.status = new HashMap<String, Object>();
-        info.status.put("runState", (double)PigJob.RUN_STATE_RUNNING);
-        info.percentComplete = "30% complete";
-        expect(api.checkJob(eq("job_id_#"))).andReturn(info);
-        replay(api);
-        response = jobService.getJob(job.getId());
-        Assert.assertEquals(200, response.getStatus());
-        job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
-        Assert.assertEquals(PigJob.Status.RUNNING, job.getStatus());
-        Assert.assertTrue(job.isInProgress());
-        Assert.assertEquals(30, (Object) job.getPercentComplete());
-
-        // SUCCEED
-        reset(api);
-        info = api.new JobInfo();
-        info.status = new HashMap<String, Object>();
-        info.status.put("runState", (double)PigJob.RUN_STATE_SUCCEEDED);
-        expect(api.checkJob(eq("job_id_#"))).andReturn(info);
-        replay(api);
-        response = jobService.getJob(job.getId());
-        Assert.assertEquals(200, response.getStatus());
-        job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
-        Assert.assertEquals(PigJob.Status.COMPLETED, job.getStatus());
-        Assert.assertFalse(job.isInProgress());
-        Assert.assertNull(job.getPercentComplete());
-
-        // PREP
-        reset(api);
-        info = api.new JobInfo();
-        info.status = new HashMap<String, Object>();
-        info.status.put("runState", (double)PigJob.RUN_STATE_PREP);
-        expect(api.checkJob(eq("job_id_#"))).andReturn(info);
-        replay(api);
-        response = jobService.getJob(job.getId());
-        Assert.assertEquals(200, response.getStatus());
-        job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
-        Assert.assertEquals(PigJob.Status.RUNNING, job.getStatus());
-
-        // FAILED
-        reset(api);
-        info = api.new JobInfo();
-        info.status = new HashMap<String, Object>();
-        info.status.put("runState", (double)PigJob.RUN_STATE_FAILED);
-        expect(api.checkJob(eq("job_id_#"))).andReturn(info);
-        replay(api);
-        response = jobService.getJob(job.getId());
-        Assert.assertEquals(200, response.getStatus());
-        job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
-        Assert.assertEquals(PigJob.Status.FAILED, job.getStatus());
-        Assert.assertFalse(job.isInProgress());
-
-        // KILLED
-        reset(api);
-        info = api.new JobInfo();
-        info.status = new HashMap<String, Object>();
-        info.status.put("runState", (double)PigJob.RUN_STATE_KILLED);
-        expect(api.checkJob(eq("job_id_#"))).andReturn(info);
-        replay(api);
-        response = jobService.getJob(job.getId());
-        Assert.assertEquals(200, response.getStatus());
-        job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
-        Assert.assertEquals(PigJob.Status.KILLED, job.getStatus());
-        Assert.assertFalse(job.isInProgress());
-    }
+  private JobService jobService;
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    jobService = getService(JobService.class, handler, context);
+  }
+
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    super.tearDown();
+    jobService.getResourceManager().setTempletonApi(null);
+    JobService.setHdfsApi(null);
+  }
+
+  private Response doCreateJob(String title, String pigScript, String templetonArguments)
{
+    return doCreateJob(title, pigScript, templetonArguments, null);
+  }
+
+  private Response doCreateJob(String title, String pigScript, String templetonArguments,
String forcedContent) {
+    JobService.PigJobRequest request = new JobService.PigJobRequest();
+    request.job = new PigJob();
+    request.job.setTitle(title);
+    request.job.setPigScript(pigScript);
+    request.job.setTempletonArguments(templetonArguments);
+    request.job.setForcedContent(forcedContent);
+
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+    URI uri = UriBuilder.fromUri("http://host/a/b").build();
+    expect(uriInfo.getAbsolutePath()).andReturn(uri);
+
+    HttpServletResponse resp_obj = createStrictMock(HttpServletResponse.class);
+
+    resp_obj.setHeader(eq("Location"), anyString());
+
+    replay(uriInfo, resp_obj);
+    return jobService.runJob(request, resp_obj, uriInfo);
+  }
+
+  @Test
+  public void testSubmitJob() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
+
+    Assert.assertEquals("-useHCatalog", do_stream.toString());
+    Assert.assertEquals(201, response.getStatus());
+
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(obj.containsKey("job"));
+    Assert.assertNotNull(((PigJob) obj.get("job")).getId());
+    Assert.assertFalse(((PigJob) obj.get("job")).getId().isEmpty());
+    Assert.assertTrue(((PigJob) obj.get("job")).getStatusDir().startsWith("/tmp/.pigjobs/admin/test"));
+
+    PigJob job = ((PigJob) obj.get("job"));
+    Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
+    Assert.assertTrue(job.isInProgress());
+  }
+
+  @Test
+  public void testSubmitJobNoArguments() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    expect(api.runPigQuery((File) anyObject(), anyString(), (String) isNull())).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob("Test", "/tmp/script.pig", null);
+
+    Assert.assertEquals("", do_stream.toString());
+    Assert.assertEquals(201, response.getStatus());
+
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(obj.containsKey("job"));
+    Assert.assertNotNull(((PigJob) obj.get("job")).getId());
+    Assert.assertFalse(((PigJob) obj.get("job")).getId().isEmpty());
+    Assert.assertTrue(((PigJob) obj.get("job")).getStatusDir().startsWith("/tmp/.pigjobs/admin/test"));
+
+    PigJob job = ((PigJob) obj.get("job"));
+    Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
+    Assert.assertTrue(job.isInProgress());
+  }
+
+  @Test
+  public void testSubmitJobNoFile() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob("Test", null, "-useHCatalog");
+    Assert.assertEquals(400, response.getStatus());
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(((String)obj.get("message")).contains("No pigScript file or forcedContent
specifed;"));
+  }
+
+  @Test
+  public void testSubmitJobForcedContent() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+
+    ByteArrayOutputStream baScriptStream = new ByteArrayOutputStream();
+    ByteArrayOutputStream baTempletonArgsStream = new ByteArrayOutputStream();
+
+    FSDataOutputStream scriptStream = new FSDataOutputStream(baScriptStream);
+    FSDataOutputStream templetonArgsStream = new FSDataOutputStream(baTempletonArgsStream);
+    expect(hdfsApi.create(endsWith("script.pig"), eq(true))).andReturn(scriptStream);
+    expect(hdfsApi.create(endsWith("params"), eq(true))).andReturn(templetonArgsStream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob("Test", null, "-useHCatalog", "pwd");  // with forcedContent
+    Assert.assertEquals(201, response.getStatus());
+    Assert.assertEquals("-useHCatalog", baTempletonArgsStream.toString());
+    Assert.assertEquals("pwd", baScriptStream.toString());
+  }
+
+  @Test
+  public void testSubmitJobNoTitle() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob(null, "/tmp/1.pig", "-useHCatalog");
+    Assert.assertEquals(400, response.getStatus());
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(((String)obj.get("message")).contains("No title specifed"));
+  }
+
+  @Test
+  public void testSubmitJobFailed() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(false);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
+    Assert.assertEquals(500, response.getStatus());
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(((String)obj.get("message")).contains("Can't copy"));
+  }
+
+  @Test
+  public void testSubmitJobTempletonError() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    // Templeton returns 500 e.g.
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andThrow(new
IOException());
+    replay(api);
+
+    Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
+    Assert.assertEquals(500, response.getStatus());
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(((String) obj.get("message")).contains("Templeton"));
+  }
+
+  @Test
+  public void testKillJob() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createStrictMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    data.id = "job_id_##";
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
+    Assert.assertEquals(201, response.getStatus());
+
+    reset(api);
+    api.killJob(eq("job_id_##"));
+    replay(api);
+    JSONObject obj = (JSONObject)response.getEntity();
+    PigJob job = ((PigJob)obj.get("job"));
+    response = jobService.killJob(job.getId());
+    Assert.assertEquals(204, response.getStatus());
+  }
+
+  @Test
+  public void testJobStatusFlow() throws Exception {
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+
+    ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
+
+    FSDataOutputStream stream = new FSDataOutputStream(do_stream);
+    expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
+    replay(hdfsApi);
+    JobService.setHdfsApi(hdfsApi);
+
+    TempletonApi api = createNiceMock(TempletonApi.class);
+    jobService.getResourceManager().setTempletonApi(api);
+    TempletonApi.JobData data = api.new JobData();
+    data.id = "job_id_#";
+    expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
+    replay(api);
+
+    Response response = doCreateJob("Test", "/tmp/script.pig", "-useHCatalog");
+
+    Assert.assertEquals("-useHCatalog", do_stream.toString());
+    Assert.assertEquals(201, response.getStatus());
+
+    PigJob job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
+    Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
+    Assert.assertTrue(job.isInProgress());
+
+    // Retrieve status:
+    // SUBMITTED
+    reset(api);
+    TempletonApi.JobInfo info = api.new JobInfo();
+    expect(api.checkJob(eq("job_id_#"))).andReturn(info);
+    replay(api);
+    response = jobService.getJob(job.getId());
+    Assert.assertEquals(200, response.getStatus());
+    job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
+    Assert.assertEquals(PigJob.Status.SUBMITTED, job.getStatus());
+
+    // RUNNING
+    reset(api);
+    info = api.new JobInfo();
+    info.status = new HashMap<String, Object>();
+    info.status.put("runState", (double)PigJob.RUN_STATE_RUNNING);
+    info.percentComplete = "30% complete";
+    expect(api.checkJob(eq("job_id_#"))).andReturn(info);
+    replay(api);
+    response = jobService.getJob(job.getId());
+    Assert.assertEquals(200, response.getStatus());
+    job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
+    Assert.assertEquals(PigJob.Status.RUNNING, job.getStatus());
+    Assert.assertTrue(job.isInProgress());
+    Assert.assertEquals(30, (Object) job.getPercentComplete());
+
+    // SUCCEED
+    reset(api);
+    info = api.new JobInfo();
+    info.status = new HashMap<String, Object>();
+    info.status.put("runState", (double)PigJob.RUN_STATE_SUCCEEDED);
+    expect(api.checkJob(eq("job_id_#"))).andReturn(info);
+    replay(api);
+    response = jobService.getJob(job.getId());
+    Assert.assertEquals(200, response.getStatus());
+    job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
+    Assert.assertEquals(PigJob.Status.COMPLETED, job.getStatus());
+    Assert.assertFalse(job.isInProgress());
+    Assert.assertNull(job.getPercentComplete());
+
+    // PREP
+    reset(api);
+    info = api.new JobInfo();
+    info.status = new HashMap<String, Object>();
+    info.status.put("runState", (double)PigJob.RUN_STATE_PREP);
+    expect(api.checkJob(eq("job_id_#"))).andReturn(info);
+    replay(api);
+    response = jobService.getJob(job.getId());
+    Assert.assertEquals(200, response.getStatus());
+    job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
+    Assert.assertEquals(PigJob.Status.RUNNING, job.getStatus());
+
+    // FAILED
+    reset(api);
+    info = api.new JobInfo();
+    info.status = new HashMap<String, Object>();
+    info.status.put("runState", (double)PigJob.RUN_STATE_FAILED);
+    expect(api.checkJob(eq("job_id_#"))).andReturn(info);
+    replay(api);
+    response = jobService.getJob(job.getId());
+    Assert.assertEquals(200, response.getStatus());
+    job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
+    Assert.assertEquals(PigJob.Status.FAILED, job.getStatus());
+    Assert.assertFalse(job.isInProgress());
+
+    // KILLED
+    reset(api);
+    info = api.new JobInfo();
+    info.status = new HashMap<String, Object>();
+    info.status.put("runState", (double)PigJob.RUN_STATE_KILLED);
+    expect(api.checkJob(eq("job_id_#"))).andReturn(info);
+    replay(api);
+    response = jobService.getJob(job.getId());
+    Assert.assertEquals(200, response.getStatus());
+    job = ((PigJob) ((JSONObject)response.getEntity()).get("job"));
+    Assert.assertEquals(PigJob.Status.KILLED, job.getStatus());
+    Assert.assertFalse(job.isInProgress());
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
index 55a6d20..9eacbee 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
@@ -39,71 +39,71 @@ import java.util.Map;
 import static org.easymock.EasyMock.*;
 
 public class ScriptTestHDFSUnmanaged extends HDFSTest {
-    private ScriptService scriptService;
-
-    @BeforeClass
-    public static void startUp() throws Exception {
-        HDFSTest.startUp(); // super
-    }
-
-    @AfterClass
-    public static void shutDown() throws Exception {
-        HDFSTest.shutDown(); // super
-        FileService.setHdfsApi(null); //cleanup API connection
-    }
-
-    @Override
-    @Before
-    public void setUp() throws Exception {
-        handler = createNiceMock(ViewResourceHandler.class);
-        context = createNiceMock(ViewContext.class);
-        FileService.setHdfsApi(null); //cleanup API connection
-        StorageUtil.setStorage(null);
-    }
-
-    @Test(expected=WebServiceException.class)
-    public void createScriptAutoCreateNoScriptsPath() throws IOException, InterruptedException
{
-        Map<String, String> properties = new HashMap<String, String>();
-        baseDir = new File(DATA_DIRECTORY)
-                .getAbsoluteFile();
-        pigStorageFile = new File("./target/BasePigTest/storage.dat")
-                .getAbsoluteFile();
-
-        properties.put("dataworker.storagePath", pigStorageFile.toString());
+  private ScriptService scriptService;
+
+  @BeforeClass
+  public static void startUp() throws Exception {
+    HDFSTest.startUp(); // super
+  }
+
+  @AfterClass
+  public static void shutDown() throws Exception {
+    HDFSTest.shutDown(); // super
+    FileService.setHdfsApi(null); //cleanup API connection
+  }
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    handler = createNiceMock(ViewResourceHandler.class);
+    context = createNiceMock(ViewContext.class);
+    FileService.setHdfsApi(null); //cleanup API connection
+    StorageUtil.setStorage(null);
+  }
+
+  @Test(expected=WebServiceException.class)
+  public void createScriptAutoCreateNoScriptsPath() throws IOException, InterruptedException
{
+    Map<String, String> properties = new HashMap<String, String>();
+    baseDir = new File(DATA_DIRECTORY)
+        .getAbsoluteFile();
+    pigStorageFile = new File("./target/BasePigTest/storage.dat")
+        .getAbsoluteFile();
+
+    properties.put("dataworker.storagePath", pigStorageFile.toString());
 //        properties.put("dataworker.userScriptsPath", "/tmp/.pigscripts");
-        properties.put("dataworker.defaultFs", hdfsURI);
+    properties.put("dataworker.defaultFs", hdfsURI);
 
-        expect(context.getProperties()).andReturn(properties).anyTimes();
-        expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
+    expect(context.getProperties()).andReturn(properties).anyTimes();
+    expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
 
-        replay(handler, context);
-        scriptService = getService(ScriptService.class, handler, context);
+    replay(handler, context);
+    scriptService = getService(ScriptService.class, handler, context);
 
-        doCreateScript("Test", null);
-    }
+    doCreateScript("Test", null);
+  }
 
-    @Test
-    public void createScriptAutoCreateNoStoragePath() throws IOException, InterruptedException
{
-        Map<String, String> properties = new HashMap<String, String>();
-        baseDir = new File(DATA_DIRECTORY)
-                .getAbsoluteFile();
-        pigStorageFile = new File("./target/BasePigTest/storage.dat")
-                .getAbsoluteFile();
+  @Test
+  public void createScriptAutoCreateNoStoragePath() throws IOException, InterruptedException
{
+    Map<String, String> properties = new HashMap<String, String>();
+    baseDir = new File(DATA_DIRECTORY)
+        .getAbsoluteFile();
+    pigStorageFile = new File("./target/BasePigTest/storage.dat")
+        .getAbsoluteFile();
 
 //        properties.put("dataworker.storagePath", pigStorageFile.toString());
-        properties.put("dataworker.userScriptsPath", "/tmp/.pigscripts");
-        properties.put("dataworker.defaultFs", hdfsURI);
+    properties.put("dataworker.userScriptsPath", "/tmp/.pigscripts");
+    properties.put("dataworker.defaultFs", hdfsURI);
 
-        expect(context.getProperties()).andReturn(properties).anyTimes();
-        expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
+    expect(context.getProperties()).andReturn(properties).anyTimes();
+    expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
 
-        replay(handler, context);
+    replay(handler, context);
 
-        Storage storage = StorageUtil.getStorage(context);
-        Assert.assertEquals(InstanceKeyValueStorage.class.getSimpleName(), storage.getClass().getSimpleName());
-    }
+    Storage storage = StorageUtil.getStorage(context);
+    Assert.assertEquals(InstanceKeyValueStorage.class.getSimpleName(), storage.getClass().getSimpleName());
+  }
 
-    private Response doCreateScript(String title, String path) {
-        return ScriptTest.doCreateScript(title, path, scriptService);
-    }
+  private Response doCreateScript(String title, String path) {
+    return ScriptTest.doCreateScript(title, path, scriptService);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
index 61d0004..a6138b5 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
@@ -51,42 +51,42 @@ import static org.easymock.EasyMock.*;
  * Tests without HDFS and predefined properties
  */
 public class ScriptTestUnmanaged extends BasePigTest {
-    private ScriptService scriptService;
-    private File pigStorageFile;
-    private File baseDir;
+  private ScriptService scriptService;
+  private File pigStorageFile;
+  private File baseDir;
 
-    @AfterClass
-    public static void shutDown() throws Exception {
-        FileService.setHdfsApi(null); //cleanup API connection
-    }
+  @AfterClass
+  public static void shutDown() throws Exception {
+    FileService.setHdfsApi(null); //cleanup API connection
+  }
 
-    @Before
-    public void setUp() throws Exception {
-        handler = createNiceMock(ViewResourceHandler.class);
-        context = createNiceMock(ViewContext.class);
+  @Before
+  public void setUp() throws Exception {
+    handler = createNiceMock(ViewResourceHandler.class);
+    context = createNiceMock(ViewContext.class);
 
-        baseDir = new File(DATA_DIRECTORY)
-                .getAbsoluteFile();
-        pigStorageFile = new File("./target/BasePigTest/storage.dat")
-                .getAbsoluteFile();
-    }
+    baseDir = new File(DATA_DIRECTORY)
+        .getAbsoluteFile();
+    pigStorageFile = new File("./target/BasePigTest/storage.dat")
+        .getAbsoluteFile();
+  }
 
-    private Response doCreateScript(String title, String path) {
-        return ScriptTest.doCreateScript(title, path, scriptService);
-    }
+  private Response doCreateScript(String title, String path) {
+    return ScriptTest.doCreateScript(title, path, scriptService);
+  }
 
-    @Test(expected=WebServiceException.class)
-    public void createScriptAutoCreateNoDefaultFS() {
-        Map<String, String> properties = new HashMap<String, String>();
-        properties.put("dataworker.storagePath", pigStorageFile.toString());
-        properties.put("dataworker.userScriptsPath", "/tmp/.pigscripts");
+  @Test(expected=WebServiceException.class)
+  public void createScriptAutoCreateNoDefaultFS() {
+    Map<String, String> properties = new HashMap<String, String>();
+    properties.put("dataworker.storagePath", pigStorageFile.toString());
+    properties.put("dataworker.userScriptsPath", "/tmp/.pigscripts");
 
-        expect(context.getProperties()).andReturn(properties).anyTimes();
-        expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
+    expect(context.getProperties()).andReturn(properties).anyTimes();
+    expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
 
-        replay(handler, context);
-        scriptService = getService(ScriptService.class, handler, context);
+    replay(handler, context);
+    scriptService = getService(ScriptService.class, handler, context);
 
-        doCreateScript("Test", null);
-    }
+    doCreateScript("Test", null);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c64261e2/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/UDFTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/UDFTest.java
b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/UDFTest.java
index 533499a..6d33fd4 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/UDFTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/UDFTest.java
@@ -36,81 +36,81 @@ import static org.easymock.EasyMock.*;
 import static org.easymock.EasyMock.replay;
 
 public class UDFTest extends BasePigTest {
-    private UDFService udfService;
-
-    @Override
-    @Before
-    public void setUp() throws Exception {
-        super.setUp();
-        udfService = getService(UDFService.class, handler, context);
-    }
-
-    private Response doCreateUDF() {
-        UDFService.UDFRequest request = new UDFService.UDFRequest();
-        request.udf = new UDF();
-        request.udf.setPath("/tmp/udf.jar");
-        request.udf.setName("TestUDF");
-
-        UriInfo uriInfo = createNiceMock(UriInfo.class);
-        URI uri = UriBuilder.fromUri("http://host/a/b").build();
-        expect(uriInfo.getAbsolutePath()).andReturn(uri);
-
-        HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
-
-        resp_obj.setHeader(eq("Location"), anyString());
-
-        replay(uriInfo, resp_obj);
-        return udfService.createUDF(request, resp_obj, uriInfo);
-    }
-
-    @Test
-    public void createUDF() {
-        Response response = doCreateUDF();
-        Assert.assertEquals(201, response.getStatus());
-
-        JSONObject obj = (JSONObject)response.getEntity();
-        Assert.assertTrue(obj.containsKey("udf"));
-        Assert.assertNotNull(((UDF) obj.get("udf")).getId());
-        Assert.assertFalse(((UDF) obj.get("udf")).getId().isEmpty());
-    }
-
-    @Test
-    public void udfNotFound() {
-        Response response2 = udfService.getUDF("4242");
-        Assert.assertEquals(404, response2.getStatus());
-    }
-
-    @Test
-    public void updateUDF() {
-        Response createdUDF = doCreateUDF();
-        String createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
-
-        UDFService.UDFRequest request = new UDFService.UDFRequest();
-        request.udf = new UDF();
-        request.udf.setPath("/tmp/updatedUDF.jar");
-        request.udf.setName("TestUDF2");
-
-        Response response = udfService.updateUDF(request, createdUdfId);
-        Assert.assertEquals(204, response.getStatus());
-
-        Response response2 = udfService.getUDF(createdUdfId);
-        Assert.assertEquals(200, response2.getStatus());
-
-        JSONObject obj = ((JSONObject) response2.getEntity());
-        Assert.assertTrue(obj.containsKey("udf"));
-        Assert.assertEquals(((UDF) obj.get("udf")).getName(), request.udf.getName());
-        Assert.assertEquals(((UDF) obj.get("udf")).getPath(), request.udf.getPath());
-    }
-
-    @Test
-    public void deleteUDF() {
-        Response createdUDF = doCreateUDF();
-        String createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
-
-        Response response = udfService.deleteUDF(createdUdfId);
-        Assert.assertEquals(204, response.getStatus());
-
-        Response response2 = udfService.getUDF(createdUdfId);
-        Assert.assertEquals(404, response2.getStatus());
-    }
+  private UDFService udfService;
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    udfService = getService(UDFService.class, handler, context);
+  }
+
+  private Response doCreateUDF() {
+    UDFService.UDFRequest request = new UDFService.UDFRequest();
+    request.udf = new UDF();
+    request.udf.setPath("/tmp/udf.jar");
+    request.udf.setName("TestUDF");
+
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+    URI uri = UriBuilder.fromUri("http://host/a/b").build();
+    expect(uriInfo.getAbsolutePath()).andReturn(uri);
+
+    HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
+
+    resp_obj.setHeader(eq("Location"), anyString());
+
+    replay(uriInfo, resp_obj);
+    return udfService.createUDF(request, resp_obj, uriInfo);
+  }
+
+  @Test
+  public void createUDF() {
+    Response response = doCreateUDF();
+    Assert.assertEquals(201, response.getStatus());
+
+    JSONObject obj = (JSONObject)response.getEntity();
+    Assert.assertTrue(obj.containsKey("udf"));
+    Assert.assertNotNull(((UDF) obj.get("udf")).getId());
+    Assert.assertFalse(((UDF) obj.get("udf")).getId().isEmpty());
+  }
+
+  @Test
+  public void udfNotFound() {
+    Response response2 = udfService.getUDF("4242");
+    Assert.assertEquals(404, response2.getStatus());
+  }
+
+  @Test
+  public void updateUDF() {
+    Response createdUDF = doCreateUDF();
+    String createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
+
+    UDFService.UDFRequest request = new UDFService.UDFRequest();
+    request.udf = new UDF();
+    request.udf.setPath("/tmp/updatedUDF.jar");
+    request.udf.setName("TestUDF2");
+
+    Response response = udfService.updateUDF(request, createdUdfId);
+    Assert.assertEquals(204, response.getStatus());
+
+    Response response2 = udfService.getUDF(createdUdfId);
+    Assert.assertEquals(200, response2.getStatus());
+
+    JSONObject obj = ((JSONObject) response2.getEntity());
+    Assert.assertTrue(obj.containsKey("udf"));
+    Assert.assertEquals(((UDF) obj.get("udf")).getName(), request.udf.getName());
+    Assert.assertEquals(((UDF) obj.get("udf")).getPath(), request.udf.getPath());
+  }
+
+  @Test
+  public void deleteUDF() {
+    Response createdUDF = doCreateUDF();
+    String createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
+
+    Response response = udfService.deleteUDF(createdUdfId);
+    Assert.assertEquals(204, response.getStatus());
+
+    Response response2 = udfService.getUDF(createdUdfId);
+    Assert.assertEquals(404, response2.getStatus());
+  }
 }


Mime
View raw message