ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jlun...@apache.org
Subject [13/34] ambari git commit: AMBARI-17355 & AMBARI-17354: POC: FE & BE changes for first class support for Yarn hosted services
Date Mon, 27 Jun 2016 23:36:39 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
new file mode 100644
index 0000000..79c5482
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive2.resources.uploads.TableDataReader;
+
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * provides general implementation for parsing JSON,CSV,XML file
+ * to generate preview rows, headers and column types
+ * also provides TableDataReader for converting any type to CSV.
+ */
+public abstract class Parser implements IParser {
+
+  protected Reader reader; // same as CSV reader in this case
+  protected ParseOptions parseOptions;
+  private int numberOfPreviewRows = 10;
+
+  public Parser(Reader originalReader, ParseOptions parseOptions) {
+    this.reader = originalReader;
+    this.parseOptions = parseOptions;
+  }
+
+  /**
+   * returns which datatype was detected for the maximum number of times in the given column
+   *
+   * @param typeCounts
+   * @param colNum
+   * @return
+   */
+  private int getLikelyDataType(int[][] typeCounts, int colNum) {
+    int[] colArray = typeCounts[colNum];
+    int maxIndex = 0;
+    int i = 1;
+    for (; i < colArray.length; i++) {
+      if (colArray[i] > colArray[maxIndex])
+        maxIndex = i;
+    }
+
+    return maxIndex;
+  }
+
+  @Override
+  public Reader getTableDataReader() {
+    return new TableDataReader(this.iterator());
+  }
+
+  @Override
+  public PreviewData parsePreview() {
+    List<Row> previewRows;
+    List<ColumnDescription> header;
+
+    try {
+      numberOfPreviewRows = (Integer) parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS);
+    } catch (Exception e) {
+    }
+
+    int numberOfRows = numberOfPreviewRows;
+    previewRows = new ArrayList<Row>(numberOfPreviewRows + 1); // size including the header.
+
+    Row headerRow = null;
+    Integer numOfCols = null;
+    int[][] typeCounts = null;
+
+    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null && parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
+      if (!this.iterator().hasNext()) {
+        throw new NoSuchElementException("Cannot parse Header");
+      }
+      headerRow = extractHeader();
+      numOfCols = headerRow.getRow().length;
+      typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
+      previewRows.add(headerRow);
+    }
+
+    // find data types.
+
+    Row r;
+    if (iterator().hasNext()) {
+      r = iterator().next();
+      if( null == numOfCols ) {
+        numOfCols = r.getRow().length;
+        typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
+      }
+    } else {
+        throw new NoSuchElementException("No rows in the file.");
+    }
+
+    while (true) {
+      // create Header definition from row
+      Object[] values = r.getRow();
+
+      Object[] newValues= new Object[numOfCols]; // adds null if less columns detected and removes extra columns if any
+
+      for (int colNum = 0; colNum < numOfCols; colNum++) {
+        if(colNum < values.length) {
+          // detect type
+          ColumnDescription.DataTypes type = ParseUtils.detectHiveDataType(values[colNum]);
+          typeCounts[colNum][type.ordinal()]++;
+          newValues[colNum] = values[colNum];
+        }else{
+          newValues[colNum] = null;
+        }
+      }
+
+      previewRows.add(new Row(newValues));
+
+      numberOfRows--;
+      if (numberOfRows <= 0 || !iterator().hasNext())
+        break;
+
+      r = iterator().next();
+    }
+
+    if (previewRows.size() <= 0)
+      throw new NoSuchElementException("Does not contain any rows.");
+
+    header = new ArrayList<>(numOfCols);
+    for (int colNum = 0; colNum < numOfCols; colNum++) {
+      int dataTypeId = getLikelyDataType(typeCounts, colNum);
+      ColumnDescription.DataTypes type = ColumnDescription.DataTypes.values()[dataTypeId];
+      String colName = "Column" + colNum;
+      if (null != headerRow)
+        colName = (String) headerRow.getRow()[colNum];
+
+      ColumnDescription cd = new ColumnDescriptionImpl(colName, type.toString(), colNum);
+      header.add(cd);
+    }
+
+    return new PreviewData(header,previewRows);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/PreviewData.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/PreviewData.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/PreviewData.java
new file mode 100644
index 0000000..b00e2fb
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/PreviewData.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.apache.ambari.view.hive2.client.Row;
+
+import java.util.List;
+
+/**
+ * Encapsulating preview data from parser.
+ */
+public class PreviewData {
+  private List<ColumnDescription> header;
+  private List<Row> previewRows;
+
+  public PreviewData() {
+  }
+
+  public PreviewData(List<ColumnDescription> header, List<Row> previewRows) {
+    this.header = header;
+    this.previewRows = previewRows;
+  }
+
+  public List<ColumnDescription> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescription> header) {
+    this.header = header;
+  }
+
+  public List<Row> getPreviewRows() {
+    return previewRows;
+  }
+
+  public void setPreviewRows(List<Row> previewRows) {
+    this.previewRows = previewRows;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowIterator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowIterator.java
new file mode 100644
index 0000000..747e7ff
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowIterator.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive2.client.Row;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+
+/**
+ * Converts the Map of values created by JSON/XML Parser into ordered values in Row
+ * Takes RowMapIterator as input
+ */
+public class RowIterator implements Iterator<Row> {
+
+  private LinkedList<String> headers = null;
+  private RowMapIterator iterator;
+
+  /**
+   * creates a row iterator for the map values in RowMapIterator
+   * keeps the keys in map as header.
+   * @param iterator
+   */
+  public RowIterator(RowMapIterator iterator) {
+    this.iterator = iterator;
+    LinkedHashMap<String, String> obj = iterator.peek();
+    if (null != obj)
+      headers = new LinkedList<>(obj.keySet());
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+
+  @Override
+  public Row next() {
+    LinkedHashMap<String, String> r = this.iterator.next();
+    if (null == r) {
+      return null;
+    }
+
+    return convertToRow(r);
+  }
+
+  @Override
+  public void remove() {
+    iterator.remove();
+  }
+
+  /**
+   * @return : ordered collection of string of headers
+   */
+  public LinkedList<String> extractHeaders() {
+    return headers;
+  }
+
+  /**
+   * converts the map into a Row
+   * @param lr
+   * @return
+   */
+  private Row convertToRow(LinkedHashMap<String, String> lr) {
+    Object[] data = new Object[headers.size()];
+    int i = 0;
+    for (String cd : headers) {
+      String d = lr.get(cd);
+
+      if (d != null)
+        d = d.trim(); // trim to remove any \n etc which is used as a separator for rows in TableDataReader
+
+      data[i++] = d;
+    }
+
+    return new Row(data);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowMapIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowMapIterator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowMapIterator.java
new file mode 100644
index 0000000..de13347
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/RowMapIterator.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+
+/**
+ * iterator which generates Ordered Map of column name and values for each row from streams like JSON and XML
+ */
+public interface RowMapIterator extends Iterator<LinkedHashMap<String, String>> {
+  LinkedHashMap<String, String> peek() ;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVIterator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVIterator.java
new file mode 100644
index 0000000..c4abb99
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVIterator.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers.csv;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.commons.csv.CSVRecord;
+
+import java.util.Iterator;
+
+/**
+ * iterates over the input CSV records and generates Row objects
+ */
+class CSVIterator implements Iterator<Row> {
+
+  private Iterator<CSVRecord> iterator;
+
+  public CSVIterator(Iterator<CSVRecord> iterator) {
+    this.iterator = iterator;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+  @Override
+  public Row next() {
+    CSVRecord row = iterator.next();
+    Object[] values = new Object[row.size()];
+    for (int i = 0; i < values.length; i++) {
+      values[i] = row.get(i);
+    }
+    Row r = new Row(values);
+    return r;
+  }
+
+  @Override
+  public void remove() {
+    this.iterator.remove();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
new file mode 100644
index 0000000..74cc060
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive2.resources.uploads.parsers.csv;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.Parser;
+import org.apache.commons.csv.CSVFormat;
+
+import java.io.*;
+import java.util.*;
+
+/**
+ * Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
+ */
+public class CSVParser extends Parser {
+
+  private CSVIterator iterator;
+  private org.apache.commons.csv.CSVParser parser;
+
+  public CSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    parser = new org.apache.commons.csv.CSVParser(this.reader, CSVFormat.EXCEL);
+    iterator = new CSVIterator(parser.iterator());
+  }
+
+  @Override
+  public Row extractHeader() {
+    return this.iterator().next();
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.parser.close();
+  }
+
+  public Iterator<Row> iterator() {
+    return iterator; // only one iterator per parser.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONIterator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONIterator.java
new file mode 100644
index 0000000..54aa0a9
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONIterator.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * iterates over the JsonReader and reads creates row data
+ * assumes the array of json objects.
+ * eg : [ { "col1Name" : "value-1-1", "col2Name" : "value-1-2"}, { "col1Name" : "value-2-1", "col2Name" : "value-2-2"}]
+ */
+class JSONIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+
+  private LinkedHashMap<String, String> readNextObject(JsonReader reader) throws IOException, EndOfDocumentException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    boolean shouldBeName = false;
+    String currentName = null;
+
+    while (true) {
+      JsonToken token = reader.peek();
+      switch (token) {
+        case BEGIN_ARRAY:
+          throw new IllegalArgumentException("Row data cannot have an array.");
+        case END_ARRAY:
+          throw new EndOfDocumentException("End of Json Array document.");
+        case BEGIN_OBJECT:
+          if (objectStarted == true) {
+            throw new IllegalArgumentException("Nested objects not supported.");
+          }
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("name expected, got begin_object");
+          }
+          objectStarted = true;
+          shouldBeName = true;
+          reader.beginObject();
+          break;
+        case END_OBJECT:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("value expected, got end_object");
+          }
+          reader.endObject();
+          return row;
+        case NAME:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("name not expected at this point.");
+          }
+          shouldBeName = false;
+          currentName = reader.nextName();
+          break;
+        case NUMBER:
+        case STRING:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String n = reader.nextString();
+          row.put(currentName, n);
+          shouldBeName = true;
+          break;
+        case BOOLEAN:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String b = String.valueOf(reader.nextBoolean());
+          row.put(currentName, b);
+          shouldBeName = true;
+          break;
+        case NULL:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          reader.nextNull();
+          row.put(currentName, "");
+          shouldBeName = true;
+          break;
+        case END_DOCUMENT:
+          return row;
+
+        default:
+          throw new IllegalArgumentException("Illegal token detected inside json: token : " + token.toString());
+      }
+    }
+  }
+
+  private JsonReader reader;
+
+  public JSONIterator(JsonReader reader) throws IOException {
+    this.reader = reader;
+    // test the start of array
+    JsonToken jt = reader.peek();
+    if (jt != JsonToken.BEGIN_ARRAY) {
+      throw new IllegalArgumentException("Expected the whole document to contain a single JsonArray.");
+    }
+
+    reader.beginArray(); // read the start of array
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of Json document reached with next character ending the JSON Array.");
+      nextObject = null;
+    } catch (Exception e){
+      // for any other exception throw error right away
+      throw new IllegalArgumentException(e);
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called on JSONIterator.");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
new file mode 100644
index 0000000..772b6fd
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+
+/**
+ * Parses the input data from reader as JSON and provides iterator for rows.
+ *
+ * Expects the input reader to contains a JsonArray in which each element is a JsonObject
+ * corresponding to the row.
+ * eg. :
+ *
+ * [
+ *  {row1-col1, row1-col2, row1-col3},
+ *  {row2-col1, row2-col2, row2-col3}
+ * ]
+ *
+ */
+public class JSONParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONParser.class);
+
+  private RowIterator iterator;
+  private JsonReader jsonReader;
+  private JSONIterator JSONIterator;
+
+  public JSONParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    this.jsonReader = new JsonReader(this.reader);
+    JSONIterator = new JSONIterator(this.jsonReader);
+    iterator = new RowIterator(JSONIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for(int i = 0 ; i < headers.size() ; i++){
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.jsonReader.close();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLIterator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLIterator.java
new file mode 100644
index 0000000..6b43735
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLIterator.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive2.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.namespace.QName;
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLStreamConstants;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.events.*;
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+class XMLIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+  private static final String TAG_TABLE = "table";
+  private static final String TAG_ROW = "row";
+  private static final String TAG_COL = "col";
+  private boolean documentStarted = false;
+  private XMLEventReader reader;
+
+  public XMLIterator(XMLEventReader reader) throws IOException {
+    this.reader = reader;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("error : {}", e);
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (IOException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of XML document reached with next character ending the XML.");
+      nextObject = null;
+    } catch (XMLStreamException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called.");
+  }
+
+  private LinkedHashMap<String, String> readNextObject(XMLEventReader reader) throws IOException, EndOfDocumentException, XMLStreamException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    String currentName = null;
+
+    while (true) {
+      XMLEvent event = reader.nextEvent();
+      switch (event.getEventType()) {
+        case XMLStreamConstants.START_ELEMENT:
+          StartElement startElement = event.asStartElement();
+          String qName = startElement.getName().getLocalPart();
+          LOG.debug("startName : {}" , qName);
+          switch (qName) {
+            case TAG_TABLE:
+              if (documentStarted) {
+                throw new IllegalArgumentException("Cannot have a <table> tag nested inside another <table> tag");
+              } else {
+                documentStarted = true;
+              }
+              break;
+            case TAG_ROW:
+              if (objectStarted) {
+                throw new IllegalArgumentException("Cannot have a <row> tag nested inside another <row> tag");
+              } else {
+                objectStarted = true;
+              }
+              break;
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + qName);
+              }
+              Attribute nameAttr = startElement.getAttributeByName( new QName("name"));
+              if( null == nameAttr ){
+                throw new IllegalArgumentException("Missing name attribute in col tag.");
+              }
+              currentName = nameAttr.getValue();
+              break;
+            default:
+              throw new IllegalArgumentException("Illegal start tag " + qName + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.END_ELEMENT:
+          EndElement endElement = event.asEndElement();
+          String name = endElement.getName().getLocalPart();
+          LOG.debug("endName : {}", name);
+          switch (name) {
+            case TAG_TABLE:
+              if (!documentStarted) {
+                throw new IllegalArgumentException("Stray </table> tag.");
+              }
+              throw new EndOfDocumentException("End of XML document.");
+
+            case TAG_ROW:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray </row> tag.");
+              }
+              return row;
+
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + name);
+              }
+              currentName = null;
+              break;
+
+            default:
+              throw new IllegalArgumentException("Illegal start ending " + name + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.CHARACTERS:
+          Characters characters = event.asCharacters();
+          if (characters.isWhiteSpace() && currentName == null)
+            break;
+          String data = characters.getData();
+          LOG.debug("character data : {}", data);
+          if (currentName == null) {
+            throw new IllegalArgumentException("Illegal characters outside any tag : " + data);
+          } else {
+            String oldData = row.get(currentName);
+            if (null != oldData) {
+              data = oldData + data;
+            }
+            row.put(currentName, data);
+          }
+          break;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
new file mode 100644
index 0000000..51671e5
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLInputFactory;
+import javax.xml.stream.XMLStreamException;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+public class XMLParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLParser.class);
+
+  private RowIterator iterator;
+  private XMLEventReader xmlReader;
+  private XMLIterator xmlIterator;
+
+  public XMLParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    XMLInputFactory factory = XMLInputFactory.newInstance();
+    try {
+      this.xmlReader = factory.createXMLEventReader(reader);
+    } catch (XMLStreamException e) {
+      LOG.error("error occurred while creating xml reader : ", e);
+      throw new IOException("error occurred while creating xml reader : ", e);
+    }
+    xmlIterator = new XMLIterator(this.xmlReader);
+    iterator = new RowIterator(xmlIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for (int i = 0; i < headers.size(); i++) {
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws IOException {
+    try {
+      this.xmlReader.close();
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/DeleteQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/DeleteQueryInput.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/DeleteQueryInput.java
new file mode 100644
index 0000000..dde5704
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/DeleteQueryInput.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.query;
+
+public class DeleteQueryInput {
+  private String database;
+  private String table;
+
+  public DeleteQueryInput() {
+  }
+
+  public DeleteQueryInput(String database, String table) {
+    this.database = database;
+    this.table = table;
+  }
+
+  public String getDatabase() {
+    return database;
+  }
+
+  public void setDatabase(String database) {
+    this.database = database;
+  }
+
+  public String getTable() {
+    return table;
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/InsertFromQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/InsertFromQueryInput.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/InsertFromQueryInput.java
new file mode 100644
index 0000000..22dec67
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/InsertFromQueryInput.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.query;
+
+public class InsertFromQueryInput {
+  private String fromDatabase;
+  private String fromTable;
+  private String toDatabase;
+  private String toTable;
+
+  public InsertFromQueryInput() {
+  }
+
+  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable) {
+    this.fromDatabase = fromDatabase;
+    this.fromTable = fromTable;
+    this.toDatabase = toDatabase;
+    this.toTable = toTable;
+  }
+
+  public String getFromDatabase() {
+    return fromDatabase;
+  }
+
+  public void setFromDatabase(String fromDatabase) {
+    this.fromDatabase = fromDatabase;
+  }
+
+  public String getFromTable() {
+    return fromTable;
+  }
+
+  public void setFromTable(String fromTable) {
+    this.fromTable = fromTable;
+  }
+
+  public String getToDatabase() {
+    return toDatabase;
+  }
+
+  public void setToDatabase(String toDatabase) {
+    this.toDatabase = toDatabase;
+  }
+
+  public String getToTable() {
+    return toTable;
+  }
+
+  public void setToTable(String toTable) {
+    this.toTable = toTable;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/LoadQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/LoadQueryInput.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/LoadQueryInput.java
new file mode 100644
index 0000000..6ce705d
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/LoadQueryInput.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.query;
+
+/**
+ * input for QueryGenerator for generating Load From Query
+ */
+public class LoadQueryInput {
+  private String hdfsFilePath;
+  private String databaseName;
+  private String tableName;
+
+  public LoadQueryInput(String hdfsFilePath, String databaseName, String tableName) {
+    this.hdfsFilePath = hdfsFilePath;
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+  }
+
+  public String getHdfsFilePath() {
+    return hdfsFilePath;
+  }
+
+  public void setHdfsFilePath(String hdfsFilePath) {
+    this.hdfsFilePath = hdfsFilePath;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  @Override
+  public String toString() {
+    return "LoadQueryInput{" +
+            "hdfsFilePath='" + hdfsFilePath + '\'' +
+            ", databaseName='" + databaseName + '\'' +
+            ", tableName='" + tableName + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/QueryGenerator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/QueryGenerator.java
new file mode 100644
index 0000000..a9bc2b9
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/QueryGenerator.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.query;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.apache.ambari.view.hive2.resources.uploads.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * generates the sql query from given data
+ */
+public class QueryGenerator {
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(QueryGenerator.class);
+
+  public String generateCreateQuery(TableInfo tableInfo) {
+    String tableName = tableInfo.getTableName();
+    List<ColumnDescriptionImpl> cdList = tableInfo.getColumns();
+
+    StringBuilder query = new StringBuilder();
+    query.append("create table " + tableName + " (");
+    Collections.sort(cdList, new Comparator<ColumnDescription>() {
+      @Override
+      public int compare(ColumnDescription o1, ColumnDescription o2) {
+        return o1.getPosition() - o2.getPosition();
+      }
+    });
+
+    boolean first = true;
+    for (ColumnDescriptionImpl cd : cdList) {
+      if (first) {
+        first = false;
+      } else {
+        query.append(", ");
+      }
+
+      query.append(cd.getName() + " " + cd.getType());
+      if (cd.getPrecision() != null) {
+        query.append("(").append(cd.getPrecision());
+        if (cd.getScale() != null) {
+          query.append(",").append(cd.getScale());
+        }
+        query.append(")");
+      }
+
+    }
+
+    query.append(")");
+
+    if (tableInfo.getHiveFileType() == HiveFileType.TEXTFILE)
+      query.append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;");
+    else
+      query.append(" STORED AS " + tableInfo.getHiveFileType() + ";");
+
+    String queryString = query.toString();
+    LOG.info("Query : {}", queryString);
+    return queryString;
+  }
+
+  public String generateInsertFromQuery(InsertFromQueryInput ifqi) {
+    String insertQuery = "insert into table " + ifqi.getToDatabase() + "." + ifqi.getToTable() + " select * from " + ifqi.getFromDatabase() + "." + ifqi.getFromTable();
+    LOG.info("Insert Query : {}", insertQuery);
+    return insertQuery;
+  }
+
+  public String generateDropTableQuery(DeleteQueryInput deleteQueryInput) {
+    String dropQuery = "drop table " + deleteQueryInput.getDatabase() + "." + deleteQueryInput.getTable();
+    LOG.info("Drop Query : {}", dropQuery);
+    return dropQuery;
+  }
+
+  public String generateLoadQuery(LoadQueryInput loadQueryInput) {
+    String loadFromQuery = "LOAD DATA INPATH '"  + loadQueryInput.getHdfsFilePath() + "' INTO TABLE " + loadQueryInput.getDatabaseName() + "." + loadQueryInput.getTableName() + ";" ;
+    LOG.info("Load From Query : {}", loadFromQuery);
+    return loadFromQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/TableInfo.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/TableInfo.java
new file mode 100644
index 0000000..2b91d7b
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/query/TableInfo.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.uploads.query;
+
+import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive2.resources.uploads.HiveFileType;
+
+import java.util.List;
+
+/**
+ * used as input in Query generation
+ */
+public class TableInfo {
+  private String tableName;
+  private String databaseName;
+  private List<ColumnDescriptionImpl> columns;
+  private HiveFileType hiveFileType;
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public List<ColumnDescriptionImpl> getColumns() {
+    return columns;
+  }
+
+  public void setColumns(List<ColumnDescriptionImpl> columns) {
+    this.columns = columns;
+  }
+
+  public HiveFileType getHiveFileType() {
+    return hiveFileType;
+  }
+
+  public void setHiveFileType(HiveFileType hiveFileType) {
+    this.hiveFileType = hiveFileType;
+  }
+
+  public TableInfo(String databaseName, String tableName, List<ColumnDescriptionImpl> columns, HiveFileType hiveFileType) {
+    this.tableName = tableName;
+    this.databaseName = databaseName;
+    this.columns = columns;
+    this.hiveFileType = hiveFileType;
+  }
+
+  public TableInfo(TableInfo tableInfo) {
+    this.tableName = tableInfo.tableName;
+    this.databaseName = tableInfo.databaseName;
+    this.columns = tableInfo.columns;
+    this.hiveFileType = tableInfo.hiveFileType;
+  }
+
+  public TableInfo() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/BadRequestFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/BadRequestFormattedException.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/BadRequestFormattedException.java
new file mode 100644
index 0000000..cf56be2
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/BadRequestFormattedException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+public class BadRequestFormattedException extends ServiceFormattedException {
+  private final static int STATUS = 400;
+
+  public BadRequestFormattedException(String message, Throwable exception) {
+    super(message, exception, STATUS);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/FilePaginator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/FilePaginator.java
new file mode 100644
index 0000000..cf30f55
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/FilePaginator.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hadoop.fs.FSDataInputStream;
+
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+
+import static java.lang.Math.ceil;
+
+/**
+ * Pagination for HDFS file implementation
+ */
+public class FilePaginator {
+  public static int MB = 1024*1024;
+  public static int PAGE_SIZE = 1*MB;
+
+  private String filePath;
+  private HdfsApi hdfsApi;
+
+  /**
+   * Constructor
+   * @param filePath Path to file on HDFS
+   * @param hdfsApi hdfs api
+   */
+  public FilePaginator(String filePath, HdfsApi hdfsApi) {
+    this.filePath = filePath;
+    this.hdfsApi = hdfsApi;
+  }
+
+  /**
+   * Set page size
+   * @param PAGE_SIZE size
+   */
+  public static void setPageSize(int PAGE_SIZE) {
+    FilePaginator.PAGE_SIZE = PAGE_SIZE;
+  }
+
+  /**
+   * Get page count
+   * @return page count
+   * @throws java.io.IOException
+   * @throws InterruptedException
+   */
+  public long pageCount() throws IOException, InterruptedException {
+    return (long)
+        ceil( hdfsApi.getFileStatus(filePath).getLen() / ((double)PAGE_SIZE) );
+  }
+
+  /**
+   * Read one page of size PAGE_SIZE
+   * @param page page index
+   * @return data in UTF-8
+   * @throws java.io.IOException
+   * @throws InterruptedException
+   */
+  public String readPage(long page) throws IOException, InterruptedException {
+    FSDataInputStream stream = hdfsApi.open(filePath);
+    try {
+      stream.seek(page * PAGE_SIZE);
+    } catch (IOException e) {
+      throw new IllegalArgumentException("Page " + page + " does not exists");
+    }
+
+    byte[] buffer = new byte[PAGE_SIZE];
+    int readCount = 0;
+    int read = 0;
+    while(read < PAGE_SIZE) {
+      try {
+        readCount = stream.read(buffer, read, PAGE_SIZE-read);
+      } catch (IOException e) {
+        stream.close();
+        throw e;
+      }
+      if (readCount == -1)
+        break;
+      read += readCount;
+    }
+    if (read != 0) {
+      byte[] readData = Arrays.copyOfRange(buffer, 0, read);
+      return new String(readData, Charset.forName("UTF-8"));
+    } else {
+      if (page == 0) {
+        return "";
+      }
+      throw new IllegalArgumentException("Page " + page + " does not exists");
+    }
+  }
+
+  public String readFull(long sizeLimit) throws IOException, InterruptedException {
+    StringBuilder builder = new StringBuilder();
+    int i = 0;
+    while (true) {
+      try {
+        builder.append(readPage(i++));
+      } catch (IllegalArgumentException ex) {
+        break;
+      }
+      if (sizeLimit != -1 && (i+1)*PAGE_SIZE > sizeLimit)
+        break;
+    }
+    return builder.toString();
+  }
+
+  public String readFull() throws IOException, InterruptedException {
+    return readFull(-1);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveActorConfiguration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveActorConfiguration.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveActorConfiguration.java
new file mode 100644
index 0000000..d6cb6e8
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveActorConfiguration.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+import org.apache.ambari.view.ViewContext;
+
+/**
+ * This fetches the configuration for the actor system from ambari.properties
+ */
+public class HiveActorConfiguration {
+  private static String DEFAULT_CONFIG = "default";
+  private static String CONNECTION_PREFIX = "views.ambari.hive.";
+  private static String CONNECTION_INACTIVITY_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.connection.inactivity.timeout";
+  private static String CONNECTION_TERMINATION_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.connection.termination.timeout";
+  private static String SYNC_QUERY_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.sync.query.timeout";
+  private static String RESULT_FETCH_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.result.fetch.timeout";
+
+  private final ViewContext context;
+
+  public HiveActorConfiguration(ViewContext context) {
+    this.context = context;
+  }
+
+  public long getInactivityTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(CONNECTION_INACTIVITY_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  public long getTerminationTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(CONNECTION_TERMINATION_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  public long getSyncQueryTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(SYNC_QUERY_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  public long getResultFetchTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(RESULT_FETCH_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  /**
+   * Tries to get the specific configuration with the instance name. If not found then tries to
+   * find the default set in ambari.properties. If not found then returns the default value passed
+   * @param keyPattern Pattern used to generate ambari.properties key
+   * @param defaultValue Returned when the value is not found in ambari.properties
+   * @return value of the property
+   */
+  private String getPropertiesFromContext(String keyPattern, String defaultValue) {
+    String value;
+    value = context.getAmbariProperty(String.format(keyPattern, context.getInstanceName()));
+    if(value == null) {
+      value = context.getAmbariProperty(String.format(keyPattern, DEFAULT_CONFIG));
+    }
+    return value == null ? defaultValue: value;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveClientFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveClientFormattedException.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveClientFormattedException.java
new file mode 100644
index 0000000..3edadb2
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/HiveClientFormattedException.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+public class HiveClientFormattedException extends ServiceFormattedException {
+
+  public HiveClientFormattedException(Throwable exception) {
+    super(exception.getMessage(), exception);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/LoggingOutputStream.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/LoggingOutputStream.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/LoggingOutputStream.java
new file mode 100644
index 0000000..7695893
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/LoggingOutputStream.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+import org.slf4j.Logger;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+public class LoggingOutputStream extends OutputStream {
+
+    private final ByteArrayOutputStream baos = new ByteArrayOutputStream(1000);
+    private final Logger logger;
+    private final LogLevel level;
+
+    public enum LogLevel {
+        TRACE, DEBUG, INFO, WARN, ERROR,
+    }
+
+    public LoggingOutputStream(Logger logger, LogLevel level) {
+        this.logger = logger;
+        this.level = level;
+    }
+
+    @Override
+    public void write(int b) {
+        if (b == '\n') {
+            String line = baos.toString();
+            baos.reset();
+
+            switch (level) {
+                case TRACE:
+                    logger.trace(line);
+                    break;
+                case DEBUG:
+                    logger.debug(line);
+                    break;
+                case ERROR:
+                    logger.error(line);
+                    break;
+                case INFO:
+                    logger.info(line);
+                    break;
+                case WARN:
+                    logger.warn(line);
+                    break;
+            }
+        } else {
+            baos.write(b);
+        }
+    }
+
+
+    /**
+     * Closes this output stream and releases any system resources
+     * associated with this stream. The general contract of <code>close</code>
+     * is that it closes the output stream. A closed stream cannot perform
+     * output operations and cannot be reopened.
+     * <p/>
+     * The <code>close</code> method of <code>OutputStream</code> does nothing.
+     *
+     * @throws IOException if an I/O error occurs.
+     */
+    @Override
+    public void close() throws IOException {
+        baos.close();
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/MisconfigurationFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/MisconfigurationFormattedException.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/MisconfigurationFormattedException.java
new file mode 100644
index 0000000..bee3fc1
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/MisconfigurationFormattedException.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.util.HashMap;
+
+public class MisconfigurationFormattedException extends WebApplicationException {
+  private final static int STATUS = 500;
+  private final static String message = "Parameter \"%s\" is set to null";
+  private final static Logger LOG =
+      LoggerFactory.getLogger(MisconfigurationFormattedException.class);
+
+  public MisconfigurationFormattedException(String name) {
+    super(errorEntity(name));
+  }
+
+  protected static Response errorEntity(String name) {
+    HashMap<String, Object> response = new HashMap<String, Object>();
+    response.put("message", String.format(message, name));
+    response.put("trace", null);
+    response.put("status", STATUS);
+    return Response.status(STATUS).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/NotFoundFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/NotFoundFormattedException.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/NotFoundFormattedException.java
new file mode 100644
index 0000000..b6b7dd0
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/NotFoundFormattedException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+public class NotFoundFormattedException extends ServiceFormattedException {
+  private final static int STATUS = 404;
+
+  public NotFoundFormattedException(String message, Throwable exception) {
+    super(message, exception, STATUS);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/ServiceFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/ServiceFormattedException.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/ServiceFormattedException.java
new file mode 100644
index 0000000..8c14bce
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/ServiceFormattedException.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.security.AccessControlException;
+import java.util.HashMap;
+
+public class ServiceFormattedException extends WebApplicationException {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(ServiceFormattedException.class);
+
+  public ServiceFormattedException(String message) {
+    super(errorEntity(message, null, suggestStatus(null), null));
+  }
+
+  public ServiceFormattedException(Throwable exception) {
+    super(errorEntity(null, exception, suggestStatus(exception), null));
+  }
+
+  public ServiceFormattedException(String message, Throwable exception) {
+    super(errorEntity(message, exception, suggestStatus(exception), null));
+  }
+
+  public ServiceFormattedException(String message, Throwable exception, int status) {
+    super(errorEntity(message, exception, status, null));
+  }
+
+  public ServiceFormattedException(String message, Exception ex, String curl) {
+    super(errorEntity(message, ex, suggestStatus(ex), curl));
+  }
+
+  private static int suggestStatus(Throwable exception) {
+    int status = 500;
+    if (exception == null) {
+      return status;
+    }
+    if (exception instanceof AccessControlException) {
+      status = 403;
+    }
+    /*if (exception instanceof HiveInvalidQueryException) {
+      status = 400;
+    }*/
+    return status;
+  }
+
+  protected static Response errorEntity(String message, Throwable e, int status, String header) {
+    HashMap<String, Object> response = new HashMap<String, Object>();
+
+    String trace = null;
+
+    response.put("message", message);
+    if (e != null) {
+      trace = e.toString() + "\n\n";
+      StringWriter sw = new StringWriter();
+      e.printStackTrace(new PrintWriter(sw));
+      trace += sw.toString();
+
+      if (message == null) {
+        String innerMessage = e.getMessage();
+        String autoMessage;
+
+        if (innerMessage != null)
+          autoMessage = String.format("E090 %s [%s]", innerMessage, e.getClass().getSimpleName());
+        else
+          autoMessage = "E090 " + e.getClass().getSimpleName();
+        response.put("message", autoMessage);
+      }
+    }
+    response.put("trace", trace);
+    response.put("status", status);
+
+    if(message != null && status != 400) LOG.error(message);
+    if(trace != null && status != 400) LOG.error(trace);
+
+    Response.ResponseBuilder responseBuilder = Response.status(status).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON);
+    if (header != null)
+      responseBuilder.header("X-INFO", header);
+    return responseBuilder.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
new file mode 100644
index 0000000..dd0b715
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive2.persistence.IStorageFactory;
+import org.apache.ambari.view.hive2.persistence.Storage;
+import org.apache.ambari.view.hive2.persistence.utils.StorageFactory;
+import org.apache.ambari.view.hive2.resources.jobs.atsJobs.ATSParser;
+import org.apache.ambari.view.hive2.resources.jobs.atsJobs.ATSParserFactory;
+import org.apache.ambari.view.hive2.resources.jobs.rm.RMParser;
+import org.apache.ambari.view.hive2.resources.jobs.rm.RMParserFactory;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.IJobControllerFactory;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobControllerFactory;
+import org.apache.ambari.view.hive2.resources.savedQueries.SavedQueryResourceManager;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * Generates shared connections. Clients with same tag will get the same connection.
+ * e.g. user 'admin' using view instance 'HIVE1' will use one connection, another user
+ * will use different connection.
+ */
+public class SharedObjectsFactory implements IStorageFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(SharedObjectsFactory.class);
+
+  private ViewContext context;
+  private final IStorageFactory storageFactory;
+  private final ATSParserFactory atsParserFactory;
+  private final RMParserFactory rmParserFactory;
+
+  private static final Map<Class, Map<String, Object>> localObjects = new ConcurrentHashMap<Class, Map<String, Object>>();
+
+  public SharedObjectsFactory(ViewContext context) {
+    this.context = context;
+    this.storageFactory = new StorageFactory(context);
+    this.atsParserFactory = new ATSParserFactory(context);
+    this.rmParserFactory = new RMParserFactory(context);
+
+    synchronized (localObjects) {
+      if (localObjects.size() == 0) {
+        //localObjects.put(OperationHandleControllerFactory.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(Storage.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(IJobControllerFactory.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(ATSParser.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(SavedQueryResourceManager.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(HdfsApi.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(RMParser.class, new ConcurrentHashMap<String, Object>());
+      }
+    }
+  }
+
+  // =============================
+
+  /*public OperationHandleControllerFactory getOperationHandleControllerFactory() {
+    if (!localObjects.get(OperationHandleControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(OperationHandleControllerFactory.class).put(getTagName(), new OperationHandleControllerFactory(context, this));
+    return (OperationHandleControllerFactory) localObjects.get(OperationHandleControllerFactory.class).get(getTagName());
+  }*/
+
+  // =============================
+  @Override
+  public Storage getStorage() {
+    if (!localObjects.get(Storage.class).containsKey(getTagName()))
+      localObjects.get(Storage.class).put(getTagName(), storageFactory.getStorage());
+    return (Storage) localObjects.get(Storage.class).get(getTagName());
+  }
+
+  // =============================
+  public IJobControllerFactory getJobControllerFactory() {
+    if (!localObjects.get(IJobControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(IJobControllerFactory.class).put(getTagName(), new JobControllerFactory(context, this));
+    return (IJobControllerFactory) localObjects.get(IJobControllerFactory.class).get(getTagName());
+  }
+
+  // =============================
+
+  public SavedQueryResourceManager getSavedQueryResourceManager() {
+    if (!localObjects.get(SavedQueryResourceManager.class).containsKey(getTagName()))
+      localObjects.get(SavedQueryResourceManager.class).put(getTagName(), new SavedQueryResourceManager(context, this));
+    return (SavedQueryResourceManager) localObjects.get(SavedQueryResourceManager.class).get(getTagName());
+  }
+
+  // =============================
+  public ATSParser getATSParser() {
+    if (!localObjects.get(ATSParser.class).containsKey(getTagName()))
+      localObjects.get(ATSParser.class).put(getTagName(), atsParserFactory.getATSParser());
+    return (ATSParser) localObjects.get(ATSParser.class).get(getTagName());
+  }
+
+  // =============================
+  public RMParser getRMParser() {
+    if (!localObjects.get(RMParser.class).containsKey(getTagName()))
+      localObjects.get(RMParser.class).put(getTagName(), rmParserFactory.getRMParser());
+    return (RMParser) localObjects.get(RMParser.class).get(getTagName());
+  }
+
+  // =============================
+  public HdfsApi getHdfsApi() {
+    if (!localObjects.get(HdfsApi.class).containsKey(getTagName())) {
+      try {
+        localObjects.get(HdfsApi.class).put(getTagName(), HdfsUtil.connectToHDFSApi(context));
+      } catch (HdfsApiException e) {
+        String message = "F060 Couldn't open connection to HDFS";
+        LOG.error(message);
+        throw new ServiceFormattedException(message, e);
+      }
+    }
+    return (HdfsApi) localObjects.get(HdfsApi.class).get(getTagName());
+  }
+
+  /**
+   * Generates tag name. Clients with same tag will share one connection.
+   * @return tag name
+   */
+  public String getTagName() {
+    if (context == null)
+      return "<null>";
+    return String.format("%s:%s", context.getInstanceName(), context.getUsername());
+  }
+
+  /**
+   * For testing purposes, ability to substitute some local object
+   */
+  public void setInstance(Class clazz, Object object) {
+    localObjects.get(clazz).put(getTagName(), object);
+  }
+
+  /**
+   * For testing purposes, ability to clear all local objects of particular class
+   */
+  public void clear(Class clazz) {
+    localObjects.get(clazz).clear();
+  }
+
+  /**
+   * For testing purposes, ability to clear all connections
+   */
+  public void clear() {
+    for(Map<String, Object> map : localObjects.values()) {
+      map.clear();
+    }
+  }
+
+  /**
+   *
+   * Drops all objects for give instance name.
+   *
+   * @param instanceName
+   */
+  public static void dropInstanceCache(String instanceName){
+    for(Map<String,Object> cache : localObjects.values()){
+      for(Iterator<Map.Entry<String, Object>> it = cache.entrySet().iterator(); it.hasNext();){
+        Map.Entry<String, Object> entry = it.next();
+        if(entry.getKey().startsWith(instanceName+":")){
+          it.remove();
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b88db3cc/contrib/views/hive-next/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/application.conf b/contrib/views/hive-next/src/main/resources/application.conf
new file mode 100644
index 0000000..cb0cc7b
--- /dev/null
+++ b/contrib/views/hive-next/src/main/resources/application.conf
@@ -0,0 +1,57 @@
+#
+# Copyright 2011 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+akka {
+
+  # Log level used by the configured loggers (see "loggers") as soon
+  # as they have been started; before that, see "stdout-loglevel"
+  # Options: OFF, ERROR, WARNING, INFO, DEBUG
+  loglevel = "DEBUG"
+
+  # Log level for the very basic logger activated during ActorSystem startup.
+  # This logger prints the log messages to stdout (System.out).
+  # Options: OFF, ERROR, WARNING, INFO, DEBUG
+  stdout-loglevel = "INFO"
+
+  actor {
+
+    # Dispatcher to be used for creating Jdbc Connector actors (sync/Async)
+    jdbc-connector-dispatcher {
+      fork-join-executor {
+        parallelism-factor = 5.0
+      }
+    }
+
+    # Dispatcher to be used for creating Resultset related actors
+    result-dispatcher {
+      fork-join-executor {
+        parallelism-factor = 10.0
+      }
+    }
+
+    # Dispatcher to be used for creating miscellaneous actors like logger, exceptionWriter etc
+    misc-dispatcher {
+      fork-join-executor {
+        parallelism-factor = 5.0
+      }
+    }
+  }
+
+}
\ No newline at end of file


Mime
View raw message