ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jlun...@apache.org
Subject [14/34] ambari git commit: AMBARI-17355 & AMBARI-17354: POC: FE & BE changes for first class support for Yarn hosted services
Date Mon, 27 Jun 2016 23:36:40 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQuery.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQuery.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQuery.java
new file mode 100644
index 0000000..f52cde3
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQuery.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.savedQueries;
+
+import org.apache.ambari.view.hive2.persistence.utils.PersonalResource;
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+/**
+ * Bean to represent saved query
+ */
+public class SavedQuery implements Serializable, PersonalResource {
+  private String queryFile;
+  private String dataBase;
+  private String title;
+  private String shortQuery;
+
+  private String id;
+  private String owner;
+
+  public SavedQuery() {}
+  public SavedQuery(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    BeanUtils.populate(this, stringObjectMap);
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getQueryFile() {
+    return queryFile;
+  }
+
+  public void setQueryFile(String queryFile) {
+    this.queryFile = queryFile;
+  }
+
+  public String getDataBase() {
+    return dataBase;
+  }
+
+  public void setDataBase(String dataBase) {
+    this.dataBase = dataBase;
+  }
+
+  public String getTitle() {
+    return title;
+  }
+
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  public String getShortQuery() {
+    return shortQuery;
+  }
+
+  public void setShortQuery(String shortQuery) {
+    this.shortQuery = shortQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceManager.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceManager.java
new file mode 100644
index 0000000..ff8632f
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceManager.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.savedQueries;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive2.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive2.utils.*;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Object that provides CRUD operations for query objects
+ */
+public class SavedQueryResourceManager extends PersonalCRUDResourceManager<SavedQuery> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(SavedQueryResourceManager.class);
+
+  private SharedObjectsFactory sharedObjectsFactory;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public SavedQueryResourceManager(ViewContext context, SharedObjectsFactory sharedObjectsFactory) {
+    super(SavedQuery.class, sharedObjectsFactory, context);
+    this.sharedObjectsFactory = sharedObjectsFactory;
+  }
+
+  @Override
+  public SavedQuery create(SavedQuery object) {
+    object = super.create(object);
+    try {
+
+      if (object.getQueryFile() == null || object.getQueryFile().isEmpty()) {
+        createDefaultQueryFile(object);
+      }
+
+    } catch (ServiceFormattedException e) {
+      cleanupAfterErrorAndThrowAgain(object, e);
+    }
+    return object;
+  }
+
+  private void createDefaultQueryFile(SavedQuery object) {
+    String userScriptsPath = context.getProperties().get("scripts.dir");
+    if (userScriptsPath == null) {
+      String msg = "scripts.dir is not configured!";
+      LOG.error(msg);
+      throw new MisconfigurationFormattedException("scripts.dir");
+    }
+
+    String normalizedName = String.format("hive-query-%s", object.getId());
+    String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
+    String baseFileName = String.format(userScriptsPath +
+        "/%s-%s", normalizedName, timestamp);
+
+    String newFilePath = null;
+    try {
+      newFilePath = HdfsUtil.findUnallocatedFileName(sharedObjectsFactory.getHdfsApi(), baseFileName, ".hql");
+      HdfsUtil.putStringToFile(sharedObjectsFactory.getHdfsApi(), newFilePath, "");
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException(e);
+    }
+
+    object.setQueryFile(newFilePath);
+    storageFactory.getStorage().store(SavedQuery.class, object);
+  }
+
+  @Override
+  public SavedQuery read(Object id) throws ItemNotFound {
+    SavedQuery savedQuery = super.read(id);
+    fillShortQueryField(savedQuery);
+    return savedQuery;
+  }
+
+  private void fillShortQueryField(SavedQuery savedQuery) {
+    if (savedQuery.getQueryFile() != null) {
+      FilePaginator paginator = new FilePaginator(savedQuery.getQueryFile(), sharedObjectsFactory.getHdfsApi());
+      String query = null;
+      try {
+        query = paginator.readPage(0);
+      } catch (IOException e) {
+        LOG.error("Can't read query file " + savedQuery.getQueryFile());
+        return;
+      } catch (InterruptedException e) {
+        LOG.error("Can't read query file " + savedQuery.getQueryFile());
+        return;
+      }
+      savedQuery.setShortQuery(makeShortQuery(query));
+    }
+    storageFactory.getStorage().store(SavedQuery.class, savedQuery);
+  }
+
+  private void emptyShortQueryField(SavedQuery query) {
+    query.setShortQuery("");
+    storageFactory.getStorage().store(SavedQuery.class, query);
+  }
+
+  /**
+   * Generate short preview of query.
+   * Remove SET settings like "set hive.execution.engine=tez;" from beginning
+   * and trim to 42 symbols.
+   * @param query full query
+   * @return shortened query
+   */
+  protected static String makeShortQuery(String query) {
+    query = query.replaceAll("(?i)set\\s+[\\w\\-.]+(\\s*)=(\\s*)[\\w\\-.]+(\\s*);", "");
+    query = query.trim();
+    return query.substring(0, (query.length() > 42)?42:query.length());
+  }
+
+  @Override
+  public SavedQuery update(SavedQuery newObject, String id) throws ItemNotFound {
+    SavedQuery savedQuery = super.update(newObject, id);
+    // Emptying short query so that in next read, this gets updated with proper value
+    // from the queryFile
+    emptyShortQueryField(savedQuery);
+    return savedQuery;
+  }
+
+  @Override
+  public List<SavedQuery> readAll(FilteringStrategy filteringStrategy) {
+    List<SavedQuery> queries = super.readAll(filteringStrategy);
+    for(SavedQuery query : queries) {
+      String shortQuery = query.getShortQuery();
+      if(shortQuery == null || shortQuery.isEmpty()) {
+        fillShortQueryField(query);
+      }
+    }
+    return queries;
+  }
+
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    super.delete(resourceId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceProvider.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceProvider.java
new file mode 100644
index 0000000..87f3321
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryResourceProvider.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.savedQueries;
+
+import org.apache.ambari.view.*;
+import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive2.utils.SharedObjectsFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Resource provider for SavedQuery
+ */
+public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery> {
+  @Inject
+  ViewContext context;
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(SavedQueryResourceProvider.class);
+  private SharedObjectsFactory sharedObjectsFactory;
+
+  public SharedObjectsFactory getSharedObjectsFactory() {
+    if (sharedObjectsFactory == null)
+      sharedObjectsFactory = new SharedObjectsFactory(context);
+    return sharedObjectsFactory;
+  }
+
+  protected synchronized SavedQueryResourceManager getResourceManager() {
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
+  }
+
+  @Override
+  public SavedQuery getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      return getResourceManager().read(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+  }
+
+  @Override
+  public Set<SavedQuery> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    if (context == null) {
+      return new HashSet<SavedQuery>();
+    }
+    return new HashSet<SavedQuery>(getResourceManager().readAll(
+        new OnlyOwnersFilteringStrategy(this.context.getUsername())));
+  }
+
+  @Override
+  public void createResource(String s, Map<String, Object> stringObjectMap) throws SystemException, ResourceAlreadyExistsException, NoSuchResourceException, UnsupportedPropertyException {
+    SavedQuery item = null;
+    try {
+      item = new SavedQuery(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on creating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on creating resource", e);
+    }
+    getResourceManager().create(item);
+  }
+
+  @Override
+  public boolean updateResource(String resourceId, Map<String, Object> stringObjectMap) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    SavedQuery item = null;
+    try {
+      item = new SavedQuery(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on updating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on updating resource", e);
+    }
+    try {
+      getResourceManager().update(item, resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+
+  @Override
+  public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      getResourceManager().delete(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryService.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryService.java
new file mode 100644
index 0000000..ccc4512
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/savedQueries/SavedQueryService.java
@@ -0,0 +1,230 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.savedQueries;
+
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive2.BaseService;
+import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive2.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive2.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.util.List;
+
+/**
+ * Servlet for queries
+ * API:
+ * GET /:id
+ *      read SavedQuery
+ * POST /
+ *      create new SavedQuery
+ *      Required: title, queryFile
+ * GET /
+ *      get all SavedQueries of current user
+ */
+public class SavedQueryService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+
+  protected SavedQueryResourceManager resourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(SavedQueryService.class);
+
+  protected synchronized SavedQueryResourceManager getResourceManager() {
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
+  }
+
+  protected void setResourceManager(SavedQueryResourceManager resourceManager) {
+    this.resourceManager = resourceManager;
+  }
+
+  /**
+   * Get single item
+   */
+  @GET
+  @Path("{queryId}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getOne(@PathParam("queryId") String queryId) {
+    try {
+      SavedQuery savedQuery = getResourceManager().read(queryId);
+      JSONObject object = new JSONObject();
+      object.put("savedQuery", savedQuery);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Delete single item
+   */
+  @DELETE
+  @Path("{queryId}")
+  public Response delete(@PathParam("queryId") String queryId) {
+    try {
+      getResourceManager().delete(queryId);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get all SavedQueries
+   */
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getList() {
+    try {
+      LOG.debug("Getting all SavedQuery");
+      List allSavedQueries = getResourceManager().readAll(
+          new OnlyOwnersFilteringStrategy(this.context.getUsername()));  //TODO: move strategy to PersonalCRUDRM
+
+      JSONObject object = new JSONObject();
+      object.put("savedQueries", allSavedQueries);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Update item
+   */
+  @PUT
+  @Path("{queryId}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response update(SavedQueryRequest request,
+                         @PathParam("queryId") String queryId) {
+    try {
+      getResourceManager().update(request.savedQuery, queryId);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Create savedQuery
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response create(SavedQueryRequest request, @Context HttpServletResponse response,
+                         @Context UriInfo ui) {
+    try {
+      getResourceManager().create(request.savedQuery);
+
+      SavedQuery item = null;
+
+      item = getResourceManager().read(request.savedQuery.getId());
+
+      response.setHeader("Location",
+          String.format("%s/%s", ui.getAbsolutePath().toString(), request.savedQuery.getId()));
+
+      JSONObject object = new JSONObject();
+      object.put("savedQuery", item);
+      return Response.ok(object).status(201).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get default settings for query
+   */
+  @GET
+  @Path("defaultSettings")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getDefaultSettings() {
+    try {
+      String defaultsFile = context.getProperties().get("scripts.settings.defaults-file");
+      HdfsApi hdfsApi = getSharedObjectsFactory().getHdfsApi();
+
+      String defaults = "{\"settings\": {}}";
+      if (hdfsApi.exists(defaultsFile)) {
+        defaults = HdfsUtil.readFile(hdfsApi, defaultsFile);
+      }
+      return Response.ok(JSONValue.parse(defaults)).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Set default settings for query (overwrites if present)
+   */
+  @POST
+  @Path("defaultSettings")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response setDefaultSettings(JSONObject settings) {
+    try {
+      String defaultsFile = context.getProperties().get("scripts.settings.defaults-file");
+      HdfsApi hdfsApi = getSharedObjectsFactory().getHdfsApi();
+
+      HdfsUtil.putStringToFile(hdfsApi, defaultsFile,
+          settings.toString());
+      String defaults = HdfsUtil.readFile(hdfsApi, defaultsFile);
+      return Response.ok(JSONValue.parse(defaults)).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Wrapper object for json mapping
+   */
+  public static class SavedQueryRequest {
+    public SavedQuery savedQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDF.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDF.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDF.java
new file mode 100644
index 0000000..aa863a6
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDF.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.udfs;
+
+import org.apache.ambari.view.hive2.persistence.utils.PersonalResource;
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+/**
+ * Bean to represent UDF
+ */
+public class UDF implements Serializable, PersonalResource {
+  private String name;
+  private String classname;
+  private String fileResource;
+
+  private String id;
+  private String owner;
+
+  public UDF() {}
+  public UDF(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    BeanUtils.populate(this, stringObjectMap);
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getClassname() {
+    return classname;
+  }
+
+  public void setClassname(String classname) {
+    this.classname = classname;
+  }
+
+  public String getFileResource() {
+    return fileResource;
+  }
+
+  public void setFileResource(String fileResource) {
+    this.fileResource = fileResource;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceManager.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceManager.java
new file mode 100644
index 0000000..9be6c15
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceManager.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.udfs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive2.persistence.IStorageFactory;
+import org.apache.ambari.view.hive2.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.resources.PersonalCRUDResourceManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+/**
+ * Object that provides CRUD operations for udf objects
+ */
+public class UDFResourceManager extends PersonalCRUDResourceManager<UDF> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(UDFResourceManager.class);
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public UDFResourceManager(IStorageFactory storageFactory, ViewContext context) {
+    super(UDF.class, storageFactory, context);
+  }
+
+  @Override
+  public UDF read(Object id) throws ItemNotFound {
+    return super.read(id);
+  }
+
+  @Override
+  public List<UDF> readAll(FilteringStrategy filteringStrategy) {
+    return super.readAll(filteringStrategy);
+  }
+
+  @Override
+  public UDF create(UDF object) {
+    return super.create(object);
+  }
+
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    super.delete(resourceId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceProvider.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceProvider.java
new file mode 100644
index 0000000..1b18a14
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFResourceProvider.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.udfs;
+
+import org.apache.ambari.view.*;
+import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive2.utils.SharedObjectsFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Resource provider for udf
+ */
+public class UDFResourceProvider implements ResourceProvider<UDF> {
+  @Inject
+  ViewContext context;
+
+  protected UDFResourceManager resourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(UDFResourceProvider.class);
+
+
+  protected synchronized UDFResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new UDFResourceManager(new SharedObjectsFactory(context), context);
+    }
+    return resourceManager;
+  }
+
+  @Override
+  public UDF getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      return getResourceManager().read(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+  }
+
+  @Override
+  public Set<UDF> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    if (context == null) {
+      return new HashSet<UDF>();
+    }
+    return new HashSet<UDF>(getResourceManager().readAll(
+        new OnlyOwnersFilteringStrategy(this.context.getUsername())));
+  }
+
+  @Override
+  public void createResource(String s, Map<String, Object> stringObjectMap) throws SystemException, ResourceAlreadyExistsException, NoSuchResourceException, UnsupportedPropertyException {
+    UDF item = null;
+    try {
+      item = new UDF(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on creating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on creating resource", e);
+    }
+    getResourceManager().create(item);
+  }
+
+  @Override
+  public boolean updateResource(String resourceId, Map<String, Object> stringObjectMap) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    UDF item = null;
+    try {
+      item = new UDF(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on updating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on updating resource", e);
+    }
+    try {
+      getResourceManager().update(item, resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+
+  @Override
+  public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      getResourceManager().delete(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFService.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFService.java
new file mode 100644
index 0000000..4fad828
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/udfs/UDFService.java
@@ -0,0 +1,193 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.udfs;
+
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive2.BaseService;
+import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive2.resources.resources.FileResourceResourceManager;
+import org.apache.ambari.view.hive2.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive2.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.util.List;
+
+/**
+ * Servlet for UDFs
+ * API:
+ * GET /:id
+ *      read udf
+ * POST /
+ *      create new udf
+ * GET /
+ *      get all udf of current user
+ */
+public class UDFService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+
+  protected UDFResourceManager resourceManager = null;
+  protected FileResourceResourceManager fileResourceResourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(UDFService.class);
+
+  protected synchronized UDFResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new UDFResourceManager(getSharedObjectsFactory(), context);
+    }
+    return resourceManager;
+  }
+
+  protected synchronized FileResourceResourceManager getFileResourceResourceManager() {
+    if (fileResourceResourceManager == null) {
+      fileResourceResourceManager = new FileResourceResourceManager(getSharedObjectsFactory(), context);
+    }
+    return fileResourceResourceManager;
+  }
+
+  /**
+   * Get single item
+   */
+  @GET
+  @Path("{id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getOne(@PathParam("id") String id) {
+    try {
+      UDF udf = getResourceManager().read(id);
+      JSONObject object = new JSONObject();
+      object.put("udf", udf);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Delete single item
+   */
+  @DELETE
+  @Path("{id}")
+  public Response delete(@PathParam("id") String id) {
+    try {
+      getResourceManager().delete(id);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get all UDFs
+   */
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getList() {
+    try {
+      LOG.debug("Getting all udf");
+      List items = getResourceManager().readAll(
+          new OnlyOwnersFilteringStrategy(this.context.getUsername()));  //TODO: move strategy to PersonalCRUDRM
+
+      JSONObject object = new JSONObject();
+      object.put("udfs", items);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Update item
+   */
+  @PUT
+  @Path("{id}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response update(UDFRequest request,
+                         @PathParam("id") String id) {
+    try {
+      if (request.udf.getFileResource() != null)
+        getFileResourceResourceManager().read(request.udf.getFileResource());
+      getResourceManager().update(request.udf, id);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Create udf
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response create(UDFRequest request, @Context HttpServletResponse response,
+                         @Context UriInfo ui) {
+    try {
+      if (request.udf.getFileResource() != null)
+        getFileResourceResourceManager().read(request.udf.getFileResource());
+      getResourceManager().create(request.udf);
+
+      UDF item = null;
+
+      item = getResourceManager().read(request.udf.getId());
+
+      response.setHeader("Location",
+          String.format("%s/%s", ui.getAbsolutePath().toString(), request.udf.getId()));
+
+      JSONObject object = new JSONObject();
+      object.put("udf", item);
+      return Response.ok(object).status(201).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Wrapper object for json mapping
+   */
+  public static class UDFRequest {
+    public UDF udf;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/ColumnDescriptionImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/ColumnDescriptionImpl.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/ColumnDescriptionImpl.java
new file mode 100644
index 0000000..6d095e3
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/ColumnDescriptionImpl.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+
+import java.io.Serializable;
+
+/**
+ * implementation of ColumnDescription which also includes scale and precision.
+ */
+public class ColumnDescriptionImpl implements ColumnDescription, Serializable {
+  private String name;
+  private String type;
+  private int position;
+  /**
+   * can be null
+   */
+  private Integer precision;
+  /**
+   * can be null
+   */
+  private Integer scale;
+
+  public ColumnDescriptionImpl() {
+  }
+
+  public ColumnDescriptionImpl(String name, String type, int position) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+  }
+
+  public ColumnDescriptionImpl(String name, String type, int position, int precision) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+    this.precision = precision;
+  }
+
+  public ColumnDescriptionImpl(String name, String type, int position, int precision, int scale) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+    this.precision = precision;
+    this.scale = scale;
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public int getPosition() {
+    return this.position;
+  }
+
+  public Integer getPrecision() {
+    return precision;
+  }
+
+  public Integer getScale() {
+    return scale;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    ColumnDescriptionImpl that = (ColumnDescriptionImpl) o;
+
+    if (position != that.position) return false;
+    if (!name.equals(that.name)) return false;
+    return type.equals(that.type);
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = name.hashCode();
+    result = 31 * result + type.hashCode();
+    result = 31 * result + position;
+    return result;
+  }
+
+  @Override
+  public String toString() {
+    return new StringBuilder().append("ColumnDescriptionImpl[")
+            .append("name : ").append(name)
+            .append(", type : " + type)
+            .append(", position : " + position)
+            .append(", precision : " + precision)
+            .append(", scale : " + scale)
+            .append("]").toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/HiveFileType.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/HiveFileType.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/HiveFileType.java
new file mode 100644
index 0000000..82217eb
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/HiveFileType.java
@@ -0,0 +1,30 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads;
+
+public enum HiveFileType {
+  SEQUENCEFILE,
+  TEXTFILE,
+  RCFILE,
+  ORC,
+  PARQUET,
+  AVRO,
+  INPUTFORMAT;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableDataReader.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableDataReader.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableDataReader.java
new file mode 100644
index 0000000..d7dba85
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableDataReader.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Iterator;
+
+/**
+ * Takes row iterator as input.
+ * iterate over rows and creates a CSV formated stream separating rows by endline "\n"
+ * Note : column values should not contain "\n".
+ */
+public class TableDataReader extends Reader {
+
+  private static final int CAPACITY = 1024;
+  private StringReader stringReader = new StringReader("");
+
+  private Iterator<Row> iterator;
+  private static final CSVFormat CSV_FORMAT = CSVFormat.DEFAULT.withRecordSeparator("\n");
+
+  public TableDataReader(Iterator<Row> rowIterator) {
+    this.iterator = rowIterator;
+  }
+
+  @Override
+  public int read(char[] cbuf, int off, int len) throws IOException {
+
+    int totalLen = len;
+    int count = 0;
+    do {
+      int n = stringReader.read(cbuf, off, len);
+
+      if (n != -1) {
+        // n  were read
+        len = len - n; // len more to be read
+        off = off + n; // off now shifted to n more
+        count += n;
+      }
+
+      if (count == totalLen) return count; // all totalLen characters were read
+
+      if (iterator.hasNext()) { // keep reading as long as we keep getting rows
+        StringWriter stringWriter = new StringWriter(CAPACITY);
+        CSVPrinter csvPrinter = new CSVPrinter(stringWriter, CSV_FORMAT);
+        Row row = iterator.next();
+        csvPrinter.printRecord(row.getRow());
+        stringReader.close(); // close the old string reader
+        stringReader = new StringReader(stringWriter.getBuffer().toString());
+        csvPrinter.close();
+        stringWriter.close();
+      } else {
+        return count == 0 ? -1 : count;
+      }
+    } while (count < totalLen);
+
+    return count;
+  }
+
+  @Override
+  public void close() throws IOException {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableInput.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableInput.java
new file mode 100644
index 0000000..9c27b24
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/TableInput.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads;
+
+import java.util.List;
+
+/**
+ * used as input in REST call
+ */
+class TableInput {
+  public Boolean isFirstRowHeader = Boolean.FALSE;
+  public List<ColumnDescriptionImpl> header;
+  public String tableName;
+  public String databaseName;
+  /**
+   * the format of the file created for the table inside hive : ORC TEXTFILE etc.
+   */
+  public String fileType;
+
+  public TableInput() {
+  }
+
+  public Boolean getIsFirstRowHeader() {
+    return isFirstRowHeader;
+  }
+
+  public void setIsFirstRowHeader(Boolean isFirstRowHeader) {
+    this.isFirstRowHeader = isFirstRowHeader;
+  }
+
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public String getFileType() {
+    return fileType;
+  }
+
+  public void setFileType(String fileType) {
+    this.fileType = fileType;
+  }
+
+  public void validate(){
+    if( null == this.getFileType()){
+      throw new IllegalArgumentException("fileType parameter cannot be null.");
+    }
+    if( null == this.getTableName()){
+      throw new IllegalArgumentException("tableName parameter cannot be null.");
+    }
+    if( null == this.getDatabaseName()){
+      throw new IllegalArgumentException("databaseName parameter cannot be null.");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
new file mode 100644
index 0000000..7a7f77c
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads;
+
+import java.io.Serializable;
+
+public class UploadFromHdfsInput implements Serializable{
+  private Boolean isFirstRowHeader;
+  private String inputFileType;
+  private String hdfsPath;
+  private String tableName;
+  private String databaseName;
+
+  public UploadFromHdfsInput() {
+  }
+
+  public UploadFromHdfsInput(Boolean isFirstRowHeader, String inputFileType, String hdfsPath, String tableName, String databaseName) {
+    this.isFirstRowHeader = isFirstRowHeader;
+    this.inputFileType = inputFileType;
+    this.hdfsPath = hdfsPath;
+    this.tableName = tableName;
+    this.databaseName = databaseName;
+  }
+
+  public Boolean getIsFirstRowHeader() {
+    return isFirstRowHeader;
+  }
+
+  public void setIsFirstRowHeader(Boolean firstRowHeader) {
+    isFirstRowHeader = firstRowHeader;
+  }
+
+  public String getInputFileType() {
+    return inputFileType;
+  }
+
+  public void setInputFileType(String inputFileType) {
+    this.inputFileType = inputFileType;
+  }
+
+  public String getHdfsPath() {
+    return hdfsPath;
+  }
+
+  public void setHdfsPath(String hdfsPath) {
+    this.hdfsPath = hdfsPath;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  @Override
+  public String toString() {
+    return "UploadFromHdfsInput{" +
+            "isFirstRowHeader=" + isFirstRowHeader +
+            ", inputFileType='" + inputFileType + '\'' +
+            ", hdfsPath='" + hdfsPath + '\'' +
+            ", tableName='" + tableName + '\'' +
+            ", databaseName='" + databaseName + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
new file mode 100644
index 0000000..59e969d
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
@@ -0,0 +1,463 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads;
+
+import com.sun.jersey.core.header.FormDataContentDisposition;
+import com.sun.jersey.multipart.FormDataParam;
+import org.apache.ambari.view.hive2.BaseService;
+import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobController;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.PreviewData;
+import org.apache.ambari.view.hive2.resources.uploads.query.*;
+import org.apache.ambari.view.hive2.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive2.utils.SharedObjectsFactory;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.apache.commons.io.input.ReaderInputStream;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.*;
+import java.lang.reflect.InvocationTargetException;
+import java.util.*;
+
+/**
+ * UI driven end points for creation of new hive table and inserting data into it.
+ * It uploads a file, parses it partially based on its type, generates preview,
+ * creates temporary hive table for storage as CSV and actual hive table,
+ * uploads the file again, parses it, create CSV stream and upload to hdfs in temporary table,
+ * insert rows from temporary table to actual table, delete temporary table.
+ * <p/>
+ * API:
+ * POST /preview : takes stream, parses it and returns preview rows, headers and column type suggestions
+ * POST /createTable : runs hive query to create table in hive
+ * POST /upload : takes stream, parses it and converts it into CSV and uploads it to the temporary table
+ * POST /insertIntoTable : runs hive query to insert data from temporary table to actual hive table
+ * POST /deleteTable : deletes the temporary table
+ */
+public class UploadService extends BaseService {
+
+  private AmbariApi ambariApi;
+  protected JobResourceManager resourceManager;
+
+  final private static String HIVE_METASTORE_LOCATION_KEY = "hive.metastore.warehouse.dir";
+  final private static String HIVE_SITE = "hive-site";
+  final private static String HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY = HIVE_METASTORE_LOCATION_KEY;
+  private static final String HIVE_DEFAULT_METASTORE_LOCATION = "/apps/hive/warehouse" ;
+  final private static String HIVE_DEFAULT_DB = "default";
+
+  public void validateForUploadFile(UploadFromHdfsInput input){
+    if( null == input.getInputFileType()){
+      throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+    }
+    if( null == input.getHdfsPath()){
+      throw new IllegalArgumentException("hdfsPath parameter cannot be null.");
+    }
+    if( null == input.getTableName()){
+      throw new IllegalArgumentException("tableName parameter cannot be null.");
+    }
+    if( null == input.getDatabaseName()){
+      throw new IllegalArgumentException("databaseName parameter cannot be null.");
+    }
+
+    if( input.getIsFirstRowHeader() == null ){
+      input.setIsFirstRowHeader(false);
+    }
+  }
+
+  public void validateForPreview(UploadFromHdfsInput input){
+    if( input.getIsFirstRowHeader() == null ){
+      input.setIsFirstRowHeader(false);
+    }
+
+    if( null == input.getInputFileType()){
+      throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+    }
+    if( null == input.getHdfsPath()){
+      throw new IllegalArgumentException("hdfsPath parameter cannot be null.");
+    }
+  }
+
+  @POST
+  @Path("/previewFromHdfs")
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadForPreviewFromHDFS(UploadFromHdfsInput input) {
+
+    InputStream uploadedInputStream = null;
+    try {
+      uploadedInputStream = getHDFSFileStream(input.getHdfsPath());
+      this.validateForPreview(input);
+      PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(), uploadedInputStream);
+      String tableName = getBasenameFromPath(input.getHdfsPath());
+      return createPreviewResponse(pd, input.getIsFirstRowHeader(),tableName);
+    } catch (Exception e) {
+      LOG.error("Exception occurred while generating preview for hdfs file : " + input.getHdfsPath(), e);
+      throw new ServiceFormattedException(e.getMessage(), e);
+    } finally {
+      if (null != uploadedInputStream) {
+        try {
+          uploadedInputStream.close();
+        } catch (IOException e) {
+          LOG.error("Exception occured while closing the HDFS file stream for path " + input.getHdfsPath(), e);
+        }
+      }
+    }
+  }
+
+  @POST
+  @Path("/preview")
+  @Consumes(MediaType.MULTIPART_FORM_DATA)
+  public Response uploadForPreview(
+    @FormDataParam("file") InputStream uploadedInputStream,
+    @FormDataParam("file") FormDataContentDisposition fileDetail,
+    @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
+    @FormDataParam("inputFileType") String inputFileType
+  ) {
+    try {
+      if( null == inputFileType)
+        throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+
+      if( null == isFirstRowHeader )
+        isFirstRowHeader = false;
+
+      PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, uploadedInputStream);
+      return createPreviewResponse(pd, isFirstRowHeader,getBasename(fileDetail.getFileName()));
+    } catch (Exception e) {
+      LOG.error("Exception occurred while generating preview for local file", e);
+      throw new ServiceFormattedException(e.getMessage(), e);
+    }
+  }
+
+
+  @Path("/createTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response createTable(TableInput tableInput) {
+    try {
+      tableInput.validate();
+      List<ColumnDescriptionImpl> header = tableInput.getHeader();
+      String databaseName = tableInput.getDatabaseName();
+      String tableName = tableInput.getTableName();
+      Boolean isFirstRowHeader = tableInput.getIsFirstRowHeader();
+      String fileTypeStr = tableInput.getFileType();
+      HiveFileType hiveFileType = HiveFileType.valueOf(fileTypeStr);
+
+      TableInfo ti = new TableInfo(databaseName, tableName, header, hiveFileType);
+      String tableCreationQuery = generateCreateQuery(ti);
+
+      LOG.info("tableCreationQuery : {}", tableCreationQuery);
+
+      Job actualTableJob = createJob(tableCreationQuery, databaseName);
+      String actualTableJobId = actualTableJob.getId();
+
+      JSONObject jobObject = new JSONObject();
+      jobObject.put("jobId", actualTableJobId);
+
+      LOG.info("table creation jobId {}", actualTableJobId);
+      return Response.ok(jobObject).status(201).build();
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while creating table with input : " + tableInput, e);
+      throw new ServiceFormattedException(e.getMessage(), e);
+    }
+  }
+
+  @Path("/uploadFromHDFS")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadFileFromHdfs(UploadFromHdfsInput input ) {
+    this.validateForUploadFile(input);
+
+    if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) && Boolean.FALSE.equals(input.getIsFirstRowHeader())) {
+      // upload using the LOAD query
+      LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(), input.getTableName());
+      String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
+
+      try {
+        Job job = createJob(loadQuery,  input.getDatabaseName());
+
+        JSONObject jo = new JSONObject();
+        jo.put("jobId", job.getId());
+
+        return Response.ok(jo).build();
+      } catch (Throwable e) {
+        LOG.error("Exception occurred while creating job for Load From HDFS query : " + loadQuery, e);
+        throw new ServiceFormattedException(e.getMessage(), e);
+      }
+
+    } else {
+      // create stream and upload
+      InputStream hdfsStream = null;
+      try {
+        hdfsStream = getHDFSFileStream(input.getHdfsPath());
+        String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(),input.getInputFileType(),input.getTableName(), input.getDatabaseName());
+
+        JSONObject jo = new JSONObject();
+        jo.put("uploadedPath", path);
+
+        return Response.ok(jo).build();
+      } catch (Exception e) {
+        LOG.error("Exception occurred while uploading the file from HDFS with path : " + input.getHdfsPath(), e);
+        throw new ServiceFormattedException(e.getMessage(), e);
+      } finally {
+        if (null != hdfsStream)
+          try {
+            hdfsStream.close();
+          } catch (IOException e) {
+            LOG.error("Exception occured while closing the HDFS stream for path : " + input.getHdfsPath(), e);
+          }
+      }
+    }
+  }
+
+  @Path("/upload")
+  @POST
+  @Consumes(MediaType.MULTIPART_FORM_DATA)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadFile(
+    @FormDataParam("file") InputStream uploadedInputStream,
+    @FormDataParam("file") FormDataContentDisposition fileDetail,
+    @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
+    @FormDataParam("inputFileType") String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
+    @FormDataParam("tableName") String tableName,
+    @FormDataParam("databaseName") String databaseName
+  ) {
+    try {
+
+      String path = uploadFileFromStream(uploadedInputStream,isFirstRowHeader,inputFileType,tableName,databaseName);
+
+      JSONObject jo = new JSONObject();
+      jo.put("uploadedPath", path);
+      return Response.ok(jo).build();
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
+    }
+  }
+
+  @Path("/insertIntoTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response insertFromTempTable(InsertFromQueryInput input) {
+    try {
+      String insertQuery = generateInsertFromQuery(input);
+      LOG.info("insertQuery : {}", insertQuery);
+
+      Job job = createJob(insertQuery, "default");
+
+      JSONObject jo = new JSONObject();
+      jo.put("jobId", job.getId());
+
+      return Response.ok(jo).build();
+    } catch (Throwable e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
+    }
+  }
+
+  @Path("/deleteTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response deleteTable(DeleteQueryInput input) {
+    try {
+      String deleteQuery = generateDeleteQuery(input);
+      LOG.info("deleteQuery : {}", deleteQuery);
+
+      Job job = createJob(deleteQuery, "default");
+
+      JSONObject jo = new JSONObject();
+      jo.put("jobId", job.getId());
+
+      return Response.ok(jo).build();
+    } catch (Throwable e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
+    }
+  }
+
+  private String uploadIntoTable(Reader reader, String databaseName, String tempTableName) {
+    try {
+      String basePath = getHiveMetaStoreLocation();
+
+      if (!basePath.endsWith("/")) {
+        basePath = basePath + "/";
+      }
+
+      if (databaseName != null && !databaseName.equals(HIVE_DEFAULT_DB)) {
+        basePath = basePath + databaseName + ".db/";
+      }
+
+      String fullPath = basePath + tempTableName + "/" + tempTableName + ".csv";
+
+      LOG.info("Uploading file into : {}", fullPath);
+
+      uploadFile(fullPath, new ReaderInputStream(reader));
+
+      return fullPath;
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
+    }
+  }
+
+  private synchronized JobResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
+    }
+    return resourceManager;
+  }
+
+  private synchronized AmbariApi getAmbariApi() {
+    if (null == ambariApi) {
+      ambariApi = new AmbariApi(this.context);
+    }
+    return ambariApi;
+  }
+
+  private String generateCreateQuery(TableInfo ti) {
+    return new QueryGenerator().generateCreateQuery(ti);
+  }
+
+  private String generateInsertFromQuery(InsertFromQueryInput input) {
+    return new QueryGenerator().generateInsertFromQuery(input);
+  }
+
+  private String generateDeleteQuery(DeleteQueryInput deleteQueryInput) {
+    return new QueryGenerator().generateDropTableQuery(deleteQueryInput);
+  }
+
+  private Job createJob(String query, String databaseName) throws Throwable {
+    Map jobInfo = new HashMap<String, String>();
+    jobInfo.put("title", "Internal Table Creation");
+    jobInfo.put("forcedContent", query);
+    jobInfo.put("dataBase", databaseName);
+
+    LOG.info("jobInfo : " + jobInfo);
+    Job job = new JobImpl(jobInfo);
+    LOG.info("job : " + job);
+    getResourceManager().create(job);
+
+    JobController createdJobController = getResourceManager().readController(job.getId());
+    createdJobController.submit();
+    getResourceManager().saveIfModified(createdJobController);
+
+    return job;
+  }
+
+  private String getHiveMetaStoreLocation() {
+    String dir = context.getProperties().get(HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY);
+    if(dir != null && !dir.trim().isEmpty()){
+      return dir;
+    }else{
+      LOG.debug("Neither found associated cluster nor found the view property {}. Returning default location : {}", HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY, HIVE_DEFAULT_METASTORE_LOCATION);
+      return HIVE_DEFAULT_METASTORE_LOCATION;
+    }
+  }
+
+  private void uploadFile(final String filePath, InputStream uploadedInputStream)
+    throws IOException, InterruptedException {
+    byte[] chunk = new byte[1024];
+    FSDataOutputStream out = getSharedObjectsFactory().getHdfsApi().create(filePath, false);
+    int n = -1;
+    while ((n = uploadedInputStream.read(chunk)) != -1) {
+      out.write(chunk, 0, n);
+    }
+    out.close();
+  }
+
+  private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, InputStream uploadedInputStream) throws Exception {
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
+    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString())){
+      if(isFirstRowHeader)
+        parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+      else
+        parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.NONE.toString());
+    }
+    else
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    LOG.info("isFirstRowHeader : {}, inputFileType : {}", isFirstRowHeader, inputFileType);
+
+    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
+
+    return dataParser.parsePreview();
+  }
+
+  private Response createPreviewResponse(PreviewData pd, Boolean isFirstRowHeader, String tableName) {
+    Map<String, Object> retData = new HashMap<String, Object>();
+    retData.put("header", pd.getHeader());
+    retData.put("rows", pd.getPreviewRows());
+    retData.put("isFirstRowHeader", isFirstRowHeader);
+    retData.put("tableName", tableName);
+
+    JSONObject jsonObject = new JSONObject(retData);
+    return Response.ok(jsonObject).build();
+  }
+
+  private InputStream getHDFSFileStream(String path) throws IOException, InterruptedException {
+    FSDataInputStream fsStream = getSharedObjectsFactory().getHdfsApi().open(path);
+    return fsStream;
+  }
+
+  private String uploadFileFromStream(
+    InputStream uploadedInputStream,
+    Boolean isFirstRowHeader,
+    String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
+    String tableName,
+    String databaseName
+
+  ) throws Exception {
+    LOG.info(" uploading file into databaseName {}, tableName {}", databaseName, tableName);
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
+
+    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
+
+    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString()) && isFirstRowHeader)
+      dataParser.extractHeader(); // removes the header line if any from the stream
+
+    Reader csvReader = dataParser.getTableDataReader();
+    String path = uploadIntoTable(csvReader, databaseName, tableName);
+    return path;
+  }
+
+  private String getBasenameFromPath(String path) {
+    String fileName = new File(path).getName();
+    return getBasename(fileName);
+  }
+
+  private String getBasename(String fileName){
+    int index = fileName.indexOf(".");
+    if(index != -1){
+      return fileName.substring(0,index);
+    }
+
+    return fileName;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
new file mode 100644
index 0000000..6edc97c
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.csv.CSVParser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.xml.XMLParser;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
+
+/**
+ * Wrapper/Decorator over the Stream parsers.
+ * Supports XML/JSON/CSV parsing.
+ */
+public class DataParser implements IParser {
+
+  private IParser parser;
+
+  public DataParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.CSV.toString())) {
+      parser = new CSVParser(reader, parseOptions);
+    } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.JSON.toString())) {
+      parser = new JSONParser(reader, parseOptions);
+    } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.XML.toString())) {
+      parser = new XMLParser(reader, parseOptions);
+    }
+  }
+
+  @Override
+  public Reader getTableDataReader() {
+    return parser.getTableDataReader();
+  }
+
+  @Override
+  public PreviewData parsePreview() {
+    return parser.parsePreview();
+  }
+
+  @Override
+  public Row extractHeader() {
+    return parser.extractHeader();
+  }
+
+  @Override
+  public void close() throws IOException {
+    parser.close();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return parser.iterator();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/EndOfDocumentException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/EndOfDocumentException.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/EndOfDocumentException.java
new file mode 100644
index 0000000..f5adc4e
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/EndOfDocumentException.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+
+public class EndOfDocumentException extends Exception {
+  public EndOfDocumentException() {
+  }
+
+  public EndOfDocumentException(String message) {
+    super(message);
+  }
+
+  public EndOfDocumentException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public EndOfDocumentException(Throwable cause) {
+    super(cause);
+  }
+
+  public EndOfDocumentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
new file mode 100644
index 0000000..401ef48
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive2.client.Row;
+
+import java.io.IOException;
+import java.io.Reader;
+
+/**
+ * Interface defining methods for Parsers that can used for generating preview
+ * and uploading table into hive.
+ */
+public interface IParser extends Iterable<Row> {
+
+  /**
+   * @return returns the Reader that can be read to get the table data as CSV Text Data that can be uploaded directly
+   * to HDFS
+   */
+  Reader getTableDataReader();
+
+  PreviewData parsePreview();
+
+  Row extractHeader();
+
+  void close() throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseOptions.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseOptions.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseOptions.java
new file mode 100644
index 0000000..53af85a
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseOptions.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import java.util.HashMap;
+
+public class ParseOptions {
+  public enum InputFileType {
+    CSV,
+    JSON,
+    XML
+  }
+
+  public enum HEADER {
+    FIRST_RECORD,
+    PROVIDED_BY_USER, // not used right now but can be used when some metadata of file provide this information
+    EMBEDDED, // this one is for JSON/ XML and may be other file formats where its embedded with the data
+    NONE   // if the file does not contain header information at all
+  }
+  final public static String OPTIONS_FILE_TYPE = "FILE_TYPE";
+  final public static String OPTIONS_HEADER = "HEADER";
+  final public static String OPTIONS_NUMBER_OF_PREVIEW_ROWS = "NUMBER_OF_PREVIEW_ROWS";
+
+  private HashMap<String, Object> options = new HashMap<>();
+
+  public void setOption(String key, Object value) {
+    this.options.put(key, value);
+  }
+
+  public Object getOption(String key) {
+    return this.options.get(key);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
new file mode 100644
index 0000000..fefacce
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class ParseUtils {
+
+  final public static String[] DATE_FORMATS = {"mm/dd/yyyy", "dd/mm/yyyy", "mm-dd-yyyy" /*add more formatss*/};
+
+  public static boolean isInteger(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Integer)
+      return true;
+
+    try {
+      Integer i = Integer.parseInt(object.toString());
+      return true;
+    } catch (NumberFormatException nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isBoolean(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Boolean)
+      return true;
+
+    String strValue = object.toString();
+    if (strValue.equalsIgnoreCase("true") || strValue.equalsIgnoreCase("false"))
+      return true;
+    else
+      return false;
+  }
+
+  public static boolean isLong(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Long)
+      return true;
+
+    try {
+      Long i = Long.parseLong(object.toString());
+      return true;
+    } catch (Exception nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isDouble(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Double)
+      return true;
+
+    try {
+      Double i = Double.parseDouble(object.toString());
+      return true;
+    } catch (Exception nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isChar(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Character)
+      return true;
+
+    String str = object.toString().trim();
+    if (str.length() == 1)
+      return true;
+
+    return false;
+  }
+
+  public static boolean isDate(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Date)
+      return true;
+
+    String str = object.toString();
+    for (String format : DATE_FORMATS) {
+      try {
+        Date i = new SimpleDateFormat(format).parse(str);
+        return true;
+      } catch (Exception e) {
+      }
+    }
+
+    return false;
+  }
+
+  public static ColumnDescription.DataTypes detectHiveDataType(Object object) {
+    // detect Integer
+    if (isInteger(object)) return ColumnDescription.DataTypes.INT;
+    if (isLong(object)) return ColumnDescription.DataTypes.BIGINT;
+    if (isBoolean(object)) return ColumnDescription.DataTypes.BOOLEAN;
+    if (isDouble(object)) return ColumnDescription.DataTypes.DOUBLE;
+    if (isDate(object)) return ColumnDescription.DataTypes.DATE;
+    if (isChar(object)) return ColumnDescription.DataTypes.CHAR;
+
+    return ColumnDescription.DataTypes.STRING;
+  }
+}


Mime
View raw message