ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pallav...@apache.org
Subject [1/3] ambari git commit: AMBARI-15046. Hive view Upload table feature now supports upload from HDFS and Local.Supports JSON/XML/CSV as uploaded files and ORC and all other hive internal storage types. (Nitiraj Rathore via pallavkul)
Date Fri, 26 Feb 2016 10:00:21 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk a5f81f861 -> 2fee278f6


http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
index d811e34..b7d9011 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
@@ -16,77 +16,153 @@
 * limitations under the License.
 }}
 
-<div class="pull-right">
-  <i class="query-menu-tab fa queries-icon query-context-tab fa-envelope" {{ action 'toggleErrors'}}></i>
-</div>
-
 <div style="width : 90%">
-<div class="main-content">
-  {{#if showErrors}}
-    {{render 'messages'}}
-  {{/if}}
+  <div class="main-content">
+    {{#if showErrors}}
+      {{render 'messages'}}
+    {{/if}}
+  </div>
 </div>
+
+<div id="uploadProgressModal" class="modal fade" role="dialog" data-backdrop="static">
+  <div class="modal-dialog">
+
+    <!-- Modal content-->
+    <div class="modal-content">
+      <div class="modal-header">
+        <h4 class="modal-title">Upload Progress</h4>
+      </div>
+      <div class="modal-body">
+        <p>
+        <ul>
+          {{uploadProgressInfo}}
+        </ul>
+        </p>
+      </div>
+    </div>
+
+  </div>
 </div>
 
-{{#unless showErrors}}
-<div>
-  {{file-upload filesUploaded="filesUploaded"}}
+<div class="pull-right">
+  <i class="query-menu-tab fa queries-icon fa-envelope" {{ action 'toggleErrors'}}></i>
 </div>
+<div {{bind-attr class="showErrors:hide-data:show-data"}}>
+  <div>
+    <table class="table data-upload-form pull-left">
+      <tr>
+        <td class="data-upload-form-label"><label>Upload from Local</label></td>
+        <td  class="data-upload-form-field"> {{radio-button value='local' checked=uploadSource}}</td>
 
-  <!--<div class='fa query-menu-tab fa queries-icon query-context-tab fa-envelope'></div>-->
+        <td class="data-upload-form-label"><label>Upload from HDFS</label></td>
+        <td  class="data-upload-form-field">{{radio-button value='hdfs' checked=uploadSource}}</td>
+      </tr>
+      <tr>
+        <td class="data-upload-form-label"><label>File type</label></td>
+        <td class="data-upload-form-field">
+          {{typeahead-widget
+          content=inputFileTypes
+          optionValuePath="id"
+          optionLabelPath="name"
+          selection=inputFileType
+          placeholder="Select File Type"}}
+        </td>
+
+
+        {{#if isLocalUpload }}
+          <td class="data-upload-form-label"><label>Select from local</label></td>
+          <td class="data-upload-form-field">{{file-upload  filesUploaded="filesUploaded"}}</td>
+        {{else}}
+          <td class="data-upload-form-label"><label>HDFS Path</label></td>
+          <td class="data-upload-form-field" id="hdfs-param">{{input type="text" class="form-control" placeholder="Enter full HDFS path" value=hdfsPath }}
+            <button style="margin-left: 5px; padding-top: 6px;padding-bottom: 6px; padding-right: 10px; padding-left: 10px;" type="button" {{action "previewFromHdfs"}}
+            {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.showPreview"}}</button></td>
+        {{/if}}
+      </tr>
+      {{#if rows}}
+        <tr>
+          <td class="data-upload-form-label"><label>Database</label></td>
+          <td class="data-upload-form-field">
+            {{typeahead-widget
+            content=controllers.databases.databases
+            optionValuePath="id"
+            optionLabelPath="name"
+            selection=selectedDatabase
+            placeholder="Select a Database"
+            }}
+          </td>
+
+          <td class="data-upload-form-label"><label>Table name</label></td>
+          <td
+            class="data-upload-form-field">{{input type="text" class="form-control" placeholder="Table Name" value=tableName }}</td>
+        </tr>
+        <tr>
+          <td class="data-upload-form-label"><label>Stored as</label></td>
+          <td class="data-upload-form-field">
+            {{typeahead-widget
+            content=fileTypes
+            selection=selectedFileType}}
+          </td>
+          {{#if inputFileTypeCSV }}
+            <td class="data-upload-form-label"><label>Is first row header ?</label></td>
+            <td class="data-upload-form-field">
+              {{input id="isFirstRowHeader" type="checkbox" checked=isFirstRowHeader }}
+            </td>
+
+          {{/if}}
+        </tr>
+      {{/if}}
+    </table>
+
+    <table class="pull-right">
+      <tr>
+        <td>
+          {{#if rows}}
+            <button type="button" {{action "uploadTable"}}
+              {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.uploadTable"}}</button>
+          {{/if}}
+        </td>
+      </tr>
+    </table>
 
-  {{#if rows}}
-  <div class="query-results-tools">
-    <div class="pull-right">
-      <button type="button" {{action
-      "createTableAndUploadFile"}}
-      {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.uploadTable"}}</button>
-    </div>
   </div>
 
   <div>
-    <div class="col-md-3">Database :
-      {{typeahead-widget
-      content=controllers.databases.databases
-      optionValuePath="id"
-      optionLabelPath="name"
-      selection=selectedDatabase
-      placeholder="Select a Database"
-      }}
-    </div>
-    <div class="col-md-3">Table Name : {{input type="text" class="form-control" placeHolder="Table Name" value=tableName }}
-    </div>
-    <div class="col-md-3">Is First Row Header? :{{input id="isFirstRowHeader" type="checkbox" class="form-control" checked=isFirstRowHeader }}
-    </div>
+    {{#if rows}}
+      <div id="upload-table">
+        <table class="table table-expandable no-border">
+          <thead>
+          <tr>
+            {{#each column in header}}
+              <th>{{input placeholder="column-name" type="text" class="form-control" value=column.name}}</th>
+            {{/each}}
+          </tr>
+          <tr id="upload-controls">
+            {{#each column in header}}
+              <th>
+                <table>
+                  <tbody>
+                  <tr>
+                    <td>{{typeahead-widget content=dataTypes selection=column.type }}</td>
+                    {{input-header column=column dataTypes=dataTypes}}
+                  </tr>
+                  </tbody>
+                </table>
+              </th>
+            {{/each}}
+          </tr>
+          </thead>
+          <tbody>
+          {{#each row in rows}}
+            <tr>
+              {{#each item in row.row}}
+                <td>{{item}}</td>
+              {{/each}}
+            </tr>
+          {{/each}}
+          </tbody>
+        </table>
+      </div>
+    {{/if}}
   </div>
-<div id="upload-table">
-  <table class="table table-expandable">
-    <thead>
-    <tr>
-      {{#each column in header}}
-      <th> {{input type="text" class="form-control" value=column.name}}</th>
-      {{/each}}
-    </tr>
-    <tr>
-      {{#each column in header}}
-      <th> {{typeahead-widget
-        content=dataTypes
-        selection=column.type
-        }}
-      </th>
-      {{/each}}
-    </tr>
-    </thead>
-    <tbody>
-    {{#each row in rows}}
-    <tr>
-      {{#each item in row.row}}
-      <td>{{item}}</td>
-      {{/each}}
-    </tr>
-    {{/each}}
-    </tbody>
-  </table>
-</div>
-{{/if}}
-{{/unless}}
\ No newline at end of file
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
new file mode 100644
index 0000000..fb4c4da
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.csv.CSVParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class CSVParserTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws IOException {
+    String csv = "";
+
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+
+      jp = new CSVParser(sr, null);
+
+      Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
+
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws IOException {
+    String csv = "       ";
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+      jp = new CSVParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should not be empty",new Object[]{"       "},iterator.next().getRow());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws IOException {
+    String csv = "value1,c,10,10.1";
+
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+      jp = new CSVParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws IOException {
+
+    String csv = "value1,c,10,10.1\n" +
+            "value2,c2,102,true";
+
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+      jp = new CSVParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
new file mode 100644
index 0000000..479c934
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonObject;
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import com.google.gson.stream.JsonWriter;
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+
+public class DataParserCSVTest {
+  @Test
+  public void testParsePreviewCSV() throws IOException {
+    String str = "1,a\n" +
+            "2,b\n" +
+            "3,c\n";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(3, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+              new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "1";
+      cols1[1] = "a";
+      Row row1 = new Row(cols1);
+
+      Object cols2[] = new Object[2];
+      cols2[0] = "2";
+      cols2[1] = "b";
+      Row row2 = new Row(cols2);
+
+      Object cols3[] = new Object[2];
+      cols3[0] = "3";
+      cols3[1] = "c";
+      Row row3 = new Row(cols3);
+
+      Row[] rows = {row1, row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * more number of columns in a row => igore the extra columns. Number of columns is decided by the first row.
+   * If other row contains more columns then those columns will be ignored
+   * Here first row has 2 columns and second row has 3 columns so the value 'x' is ignored
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws IOException {
+    String str = "1,a\n" +
+            "2,b,x\n" +  // contains 3 cols, more number of columns
+            "3,c\n";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Row row2 = new Row(new Object[]{"2","b"});
+
+      Assert.assertArrayEquals("Additional columns not properly handled.", row2.getRow(),pd.getPreviewRows().get(1).getRow());
+    } finally {
+      if (null != dp) {
+        dp.close();
+      }
+
+      sr.close();
+    }
+  }
+
+  /**
+   * less number of columns => treat missing values as null. Number of columns is decided by the first row of the table
+   * if other rows has less number of columns then it treats other columns as null
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws IOException {
+    String str = "1,a\n" +
+            "2\n" +  // contains 1 col, less number of columns
+            "3,c\n";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+//    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Missing value not detected as null.",pd.getPreviewRows().get(1).getRow()[1],null);
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyColumn() throws IOException {
+    String str = "1,a,x\n" +
+            "2,,y\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+//    System.out.println("str : " + str);
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[1],"");
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testLastEmptyColumn() throws IOException {
+    String str = "1,a,x\n" +
+            "2,,\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+//    System.out.println("str : " + str);
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[1],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[2],"");
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
new file mode 100644
index 0000000..555d8e3
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
@@ -0,0 +1,209 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserJSONTest {
+
+  @Test
+  public void testParsePreviewJSON() throws IOException {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5.4\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" },"
+            + "{\"col1\": \"e\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"8.4\" },"
+            + "{\"col1\": \"f\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"9.4\" },"
+            + "{\"col1\": \"g\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"10.4\" },"
+            + "{\"col1\": \"h\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"11.4\" },"
+            + "{\"col1\": \"i\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4\" },"
+            + "{\"col1\": \"j\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5\" },"
+            + "{\"col1\": \"k\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6\" },"
+            + "{\"col1\": \"l\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7\" },"
+            + "{\"col1\": \"m\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"24.4\" },"
+            + "{\"col1\": \"n\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"14.4\" },"
+            + "{\"col1\": \"o\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"34.4\" },"
+            + "{\"col1\": \"p\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"44.4\" },"
+            + "{\"col1\": \"q\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"54.4\" },"
+            + "{\"col1\": \"r\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"64.4\" }"
+            + "]";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS, 7);
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(8, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(14, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.STRING.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.STRING.toString(), 4),
+              new ColumnDescriptionImpl("col6", ColumnDescriptionShort.DataTypes.STRING.toString(), 5),
+              new ColumnDescriptionImpl("col7", ColumnDescriptionShort.DataTypes.STRING.toString(), 6),
+              new ColumnDescriptionImpl("col8", ColumnDescriptionShort.DataTypes.STRING.toString(), 7),
+              new ColumnDescriptionImpl("col9", ColumnDescriptionShort.DataTypes.STRING.toString(), 8),
+              new ColumnDescriptionImpl("col10", ColumnDescriptionShort.DataTypes.STRING.toString(), 9),
+              new ColumnDescriptionImpl("col11", ColumnDescriptionShort.DataTypes.STRING.toString(), 10),
+              new ColumnDescriptionImpl("col12", ColumnDescriptionShort.DataTypes.STRING.toString(), 11),
+              new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(), 12),
+              new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 13)};
+
+      Row row1 = new Row(new Object[]{"col1", "col2", "col3", "col4", "col5", "col6", "col7", "col8", "col9", "col10", "col11", "col12", "col13", "col14"});
+      Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"});
+      Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"});
+      Row row4 = new Row(new Object[]{"c", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "6.4"});
+      Row row5 = new Row(new Object[]{"d", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "7.4"});
+      Row row6 = new Row(new Object[]{"e", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "8.4"});
+      Row row7 = new Row(new Object[]{"f", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "9.4"});
+      Row row8 = new Row(new Object[]{"g", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "10.4"});
+
+      Row[] rows = {row1, row2, row3, row4, row5, row6, row7, row8};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * additional columns in rows of JSON are ignored.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws IOException {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" , \"col14\" : \"43.4\" ,\"col15\" : \"asafsfa\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "43.4"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * less columns in json makes them null.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws IOException {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\"  },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+    StringReader sr = new StringReader(str);
+    DataParser dp = null;
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Assert.assertNull(pd.getPreviewRows().get(2).getRow()[13]);
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * illegal json format gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongJsonFormat() throws IOException {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\"  },"
+            + "{\"col1\": \"d\",, \n\"col2\": \"abcd\"  }"       // extra comma in this line
+            + "]";
+    DataParser dp = null;
+    StringReader sr = new StringReader(str);
+
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java
deleted file mode 100644
index 3d77d29..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.upload;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
-import org.apache.ambari.view.hive.client.Row;
-import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
-import org.apache.ambari.view.hive.resources.uploads.DataParser;
-import org.apache.ambari.view.hive.resources.uploads.ParseOptions;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-
-public class DataParserTest {
-
-  @Test
-  public void testDataParser() throws IOException {
-    String str = "1,a\n" +
-            "2,b\n" +
-            "3,c\n";
-    StringReader sr = new StringReader(str);
-
-    ParseOptions parseOptions = new ParseOptions();
-    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.FILE_TYPE_CSV);
-    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER_FIRST_RECORD);
-
-    DataParser dp = new DataParser(sr, parseOptions);
-    dp.parsePreview();
-    Assert.assertNotNull(dp.getPreviewRows());
-    Assert.assertNotNull(dp.getHeader());
-    Assert.assertEquals(3, dp.getPreviewRows().size());
-    Assert.assertEquals(2, dp.getHeader().size());
-    ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
-            new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
-
-    Assert.assertArrayEquals("Header Not Correct.", cd, dp.getHeader().toArray());
-
-    // TODO : include testing of each row element. Below comparison does not work properly.
-    // Object[] rows = {new Row(new Object[]{'1','a'}),new Row(new Object[]{'2','b'}),new Row(new Object[]{'3','c'})};
-    // Assert.assertArrayEquals("Rows Not Correct.", rows, dp.getPreviewRows().toArray());
-
-    sr.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
new file mode 100644
index 0000000..f925dc3
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserXMLTest {
+
+  @Test
+  public void testParsePreviewXML() throws IOException {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(3, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(5, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.INT.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(), 4)
+      };
+
+      Row row1 = new Row(new Object[]{"col1", "col2", "col3", "col4", "col5"});
+      Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value", "10", "11"});
+      Row row3 = new Row(new Object[]{"row2-col1-Value", "row2-col2-Value", "row2-col3-Value", "20", "21"});
+
+      Row[] rows = {row1, row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+
+  /**
+   * additional columns in rows of XML are ignored.
+   * number of columns are decided by the first row of the table and here second row contains more columns so those are ignored.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws IOException {
+    String str ="<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value","20","21"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * less columns in xml makes them null.
+   * number of columns are decided by the first row of the table and here second row does not contain col99 and col100
+   * columns so those are set to null.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws IOException {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    StringReader sr = new StringReader(str);
+    DataParser dp = null;
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value",null,null,"20","21"});
+      Assert.assertArrayEquals("Less number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(2).getRow());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * illegal xml format gives error. adding illegal tag gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongXMLFormat() throws IOException {
+    String str = "<table>" +
+            "<row>" +
+            "<ccc></ccc>" +   // illegal tag.
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+    DataParser dp = null;
+    StringReader sr = new StringReader(str);
+
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java
new file mode 100644
index 0000000..38ed225
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.xml.XMLParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class JsonParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws IOException {
+    String json = "";
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+
+      jp = new JSONParser(sr, null);
+
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws IOException {
+    JsonObject jo = new JsonObject();
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws IOException {
+    JsonArray ja = new JsonArray();
+    String json = ja.toString();
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws IOException {
+    JsonObject jo = new JsonObject();
+    jo.addProperty("key1","value1");
+    jo.addProperty("key2",'c');
+    jo.addProperty("key3",10);
+    jo.addProperty("key4",10.1);
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws IOException {
+    JsonObject jo1 = new JsonObject();
+    jo1.addProperty("key1","value1");
+    jo1.addProperty("key2","c");
+    jo1.addProperty("key3","10");
+    jo1.addProperty("key4","10.1");
+
+    JsonObject jo2 = new JsonObject();
+    jo2.addProperty("key1","value2");
+    jo2.addProperty("key2","c2");
+    jo2.addProperty("key3","102");
+    jo2.addProperty("key4",true);
+
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo1);
+    ja.add(jo2);
+
+    String json = ja.toString();
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
new file mode 100644
index 0000000..5941aaaa
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.HiveFileType;
+import org.apache.ambari.view.hive.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive.resources.uploads.query.TableInfo;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class QueryGeneratorTest {
+  @Test
+  public void testCreateTextFile() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testCreateORC() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testInsertFromQuery() {
+
+    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable");
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("insert from one table to another not correct ","insert into table toDB.toTable select * from fromDB.fromTable",qg.generateInsertFromQuery(ifqi));
+  }
+
+  @Test
+  public void testDropTableQuery() {
+
+    DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName");
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("drop table query not correct ","drop table dbName.tableName",qg.generateDropTableQuery(deleteQueryInput ));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
new file mode 100644
index 0000000..d94eace
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.TableDataReader;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+
+public class TableDataReaderTest {
+
+  private class RowIter implements Iterator<Row> {
+    int numberOfRows;
+    int numberOfCols;
+    int index = 0 ;
+    ArrayList<Row> rows = new ArrayList<Row>();
+    public RowIter(int numberOfRows, int numberOfCols){
+      this.numberOfRows = numberOfRows;
+      this.numberOfCols = numberOfCols;
+      int x = 0 ;
+      for(int i = 0; i < this.numberOfRows; i++ ){
+        Object [] objArray = new Object[10];
+        for(int j = 0; j < this.numberOfCols; j++ ){
+          objArray[j] = x++ + "" ;
+        }
+        Row row = new Row(objArray);
+        rows.add(row);
+      }
+    }
+    @Override
+    public boolean hasNext() {
+      return index < numberOfRows;
+    }
+
+    @Override
+    public Row next() {
+      return rows.get(index++);
+    }
+
+    @Override
+    public void remove() {
+      throw new RuntimeException("Operation not supported.");
+    }
+
+    @Override
+    public String toString() {
+      return "RowIter{" +
+              "index=" + index +
+              ", rows=" + rows +
+              '}';
+    }
+  }
+
+  @Test
+  public void testCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(10,10);
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter);
+
+    char[] first10 = "0,1,2,3,4,".toCharArray();
+    char [] buf = new char[10];
+    tableDataReader.read(buf,0,10);
+
+//    System.out.println("first10 : " + Arrays.toString(first10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+    Assert.assertArrayEquals(first10,buf);
+
+
+    char[] next11 = "5,6,7,8,9\n1".toCharArray();
+    char [] buf1 = new char[11];
+    tableDataReader.read(buf1,0,11);
+
+//    System.out.println("next11 : " + Arrays.toString(next11));
+//    System.out.println("buf1 : " + Arrays.toString(buf1));
+    Assert.assertArrayEquals(next11,buf1);
+
+    // read it fully
+    while( tableDataReader.read(buf,0,10) != -1 );
+
+    char [] last10 = "97,98,99\n,".toCharArray(); // last comma is the left over of previous read.
+//    System.out.println("last10 : " + Arrays.toString(last10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+
+    Assert.assertArrayEquals(last10,buf);
+  }
+
+  @Test
+  public void testEmptyCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(0,0);
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter);
+
+    char[] first10 = new char [10];
+    char [] buf = new char[10];
+    for( int i = 0 ; i < 10 ; i++ ){
+      first10[i] = '\0';
+      buf[i] = '\0';
+    }
+
+    tableDataReader.read(buf,0,10);
+
+//    System.out.println("first10 : " + Arrays.toString(first10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+    Assert.assertArrayEquals(first10,buf);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fee278f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java
new file mode 100644
index 0000000..d048a62
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java
@@ -0,0 +1,180 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.xml.XMLParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class XMLParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws IOException {
+    String xml = "";
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+
+      jp = new XMLParser(sr, null);
+
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws IOException {
+    String xml = "<table><row></row></table>";
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws IOException {
+    String xml = "<table></table>";
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws IOException {
+
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "</table>"  ;
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws IOException {
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "<row>"
+    + "<col name=\"key1\">value2</col>"
+    + "<col name=\"key2\">c2</col>"
+    + "<col name=\"key3\">102</col>"
+    + "<col name=\"key4\">true</col>"
+    + "</row>"
+    + "</table>"  ;
+
+
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+}


Mime
View raw message