ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pallav...@apache.org
Subject ambari git commit: AMBARI-16266. [Hive View]Upload Table- Error on uploading csv with 1 row (Nitiraj Rathore via pallavkul)
Date Mon, 16 May 2016 13:30:15 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 16ef5b1e3 -> afd68d9c9


AMBARI-16266. [Hive View]Upload Table- Error on uploading csv with 1 row (Nitiraj Rathore
via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/afd68d9c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/afd68d9c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/afd68d9c

Branch: refs/heads/branch-2.4
Commit: afd68d9c9cfe0ae0ec2733ac66cc0b39809cc563
Parents: 16ef5b1
Author: Pallav Kulshreshtha <pallav.kul@gmail.com>
Authored: Mon May 16 18:55:44 2016 +0530
Committer: Pallav Kulshreshtha <pallav.kul@gmail.com>
Committed: Mon May 16 18:59:37 2016 +0530

----------------------------------------------------------------------
 .../view/hive/resources/uploads/TableInput.java |  22 ++--
 .../resources/uploads/UploadFromHdfsInput.java  |  16 +--
 .../hive/resources/uploads/UploadService.java   |  55 +++++++++-
 .../resources/uploads/parsers/ParseOptions.java |   4 +-
 .../hive/resources/uploads/parsers/Parser.java  |  11 +-
 .../ui/hive-web/app/controllers/upload-table.js |   8 +-
 .../ui/hive-web/app/templates/upload-table.hbs  |   9 +-
 .../resources/upload/DataParserCSVTest.java     |  90 +++++++++++++---
 .../resources/upload/DataParserJSONTest.java    | 102 +++++++++++++++++--
 .../resources/upload/DataParserXMLTest.java     | 101 +++++++++++++++++-
 10 files changed, 354 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
index 4a50e93..f7fbbba 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
@@ -24,7 +24,7 @@ import java.util.List;
  * used as input in REST call
  */
 class TableInput {
-  public Boolean isFirstRowHeader;
+  public Boolean isFirstRowHeader = Boolean.FALSE;
   public List<ColumnDescriptionImpl> header;
   public String tableName;
   public String databaseName;
@@ -32,10 +32,6 @@ class TableInput {
    * the format of the file created for the table inside hive : ORC TEXTFILE etc.
    */
   public String fileType;
-  /**
-   * the format of the file uploaded. CSV, JSON, XML etc.
-   */
-  public String fileFormat;
 
   public TableInput() {
   }
@@ -80,11 +76,15 @@ class TableInput {
     this.fileType = fileType;
   }
 
-  public String getFileFormat() {
-    return fileFormat;
-  }
-
-  public void setFileFormat(String fileFormat) {
-    this.fileFormat = fileFormat;
+  public void validate(){
+    if( null == this.getFileType()){
+      throw new IllegalArgumentException("fileType parameter cannot be null.");
+    }
+    if( null == this.getTableName()){
+      throw new IllegalArgumentException("tableName parameter cannot be null.");
+    }
+    if( null == this.getDatabaseName()){
+      throw new IllegalArgumentException("databaseName parameter cannot be null.");
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
index 9b052ab..14bd27a 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
@@ -21,7 +21,7 @@ package org.apache.ambari.view.hive.resources.uploads;
 import java.io.Serializable;
 
 public class UploadFromHdfsInput implements Serializable{
-  private Boolean isFirstRowHeader;
+  private Boolean isFirstRowHeader = Boolean.FALSE;
   private String inputFileType;
   private String hdfsPath;
   private String tableName;
@@ -80,12 +80,12 @@ public class UploadFromHdfsInput implements Serializable{
 
   @Override
   public String toString() {
-    return "UploadFromHdfsInput{" +
-            "isFirstRowHeader=" + isFirstRowHeader +
-            ", inputFileType='" + inputFileType + '\'' +
-            ", hdfsPath='" + hdfsPath + '\'' +
-            ", tableName='" + tableName + '\'' +
-            ", databaseName='" + databaseName + '\'' +
-            '}';
+    return new StringBuilder("UploadFromHdfsInput{" )
+            .append("isFirstRowHeader=").append( isFirstRowHeader )
+            .append(", inputFileType='" ).append(inputFileType)
+            .append(", hdfsPath='").append(hdfsPath)
+            .append(", tableName='").append( tableName )
+            .append(", databaseName='").append(databaseName )
+            .append('}').toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
index 4afe80b..b6b000a 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
@@ -70,6 +70,38 @@ public class UploadService extends BaseService {
   private static final String HIVE_DEFAULT_METASTORE_LOCATION = "/apps/hive/warehouse" ;
   final private static String HIVE_DEFAULT_DB = "default";
 
+  public void validateForUploadFile(UploadFromHdfsInput input){
+    if( null == input.getInputFileType()){
+      throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+    }
+    if( null == input.getHdfsPath()){
+      throw new IllegalArgumentException("hdfsPath parameter cannot be null.");
+    }
+    if( null == input.getTableName()){
+      throw new IllegalArgumentException("tableName parameter cannot be null.");
+    }
+    if( null == input.getDatabaseName()){
+      throw new IllegalArgumentException("databaseName parameter cannot be null.");
+    }
+
+    if( input.getIsFirstRowHeader() == null ){
+      input.setIsFirstRowHeader(false);
+    }
+  }
+
+  public void validateForPreview(UploadFromHdfsInput input){
+    if( input.getIsFirstRowHeader() == null ){
+      input.setIsFirstRowHeader(false);
+    }
+
+    if( null == input.getInputFileType()){
+      throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+    }
+    if( null == input.getHdfsPath()){
+      throw new IllegalArgumentException("hdfsPath parameter cannot be null.");
+    }
+  }
+
   @POST
   @Path("/previewFromHdfs")
   @Consumes(MediaType.APPLICATION_JSON)
@@ -79,6 +111,7 @@ public class UploadService extends BaseService {
     InputStream uploadedInputStream = null;
     try {
       uploadedInputStream = getHDFSFileStream(input.getHdfsPath());
+      this.validateForPreview(input);
       PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(),
uploadedInputStream);
       String tableName = getBasenameFromPath(input.getHdfsPath());
       return createPreviewResponse(pd, input.getIsFirstRowHeader(),tableName);
@@ -106,6 +139,12 @@ public class UploadService extends BaseService {
           @FormDataParam("inputFileType") String inputFileType
   ) {
     try {
+      if( null == inputFileType)
+        throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+
+      if( null == isFirstRowHeader )
+        isFirstRowHeader = false;
+
       PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, uploadedInputStream);
       return createPreviewResponse(pd, isFirstRowHeader,getBasename(fileDetail.getFileName()));
     } catch (Exception e) {
@@ -121,6 +160,7 @@ public class UploadService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response createTable(TableInput tableInput) {
     try {
+      tableInput.validate();
       List<ColumnDescriptionImpl> header = tableInput.getHeader();
       String databaseName = tableInput.getDatabaseName();
       String tableName = tableInput.getTableName();
@@ -128,7 +168,6 @@ public class UploadService extends BaseService {
       String fileTypeStr = tableInput.getFileType();
       HiveFileType hiveFileType = HiveFileType.valueOf(fileTypeStr);
 
-
       TableInfo ti = new TableInfo(databaseName, tableName, header, hiveFileType);
       String tableCreationQuery = generateCreateQuery(ti);
 
@@ -153,7 +192,9 @@ public class UploadService extends BaseService {
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
   public Response uploadFileFromHdfs(UploadFromHdfsInput input ) {
-    if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) &&
input.getIsFirstRowHeader().equals(Boolean.FALSE)) {
+    this.validateForUploadFile(input);
+
+    if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) &&
Boolean.FALSE.equals(input.getIsFirstRowHeader())) {
       // upload using the LOAD query
       LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(),
input.getTableName());
       String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
@@ -352,10 +393,14 @@ public class UploadService extends BaseService {
   private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, InputStream
uploadedInputStream) throws IOException {
     ParseOptions parseOptions = new ParseOptions();
     parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
-    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString()) && !isFirstRowHeader)
-      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString())){
+      if(isFirstRowHeader)
+        parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+      else
+        parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.NONE.toString());
+    }
     else
-      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
 
     LOG.info("isFirstRowHeader : {}, inputFileType : {}", isFirstRowHeader, inputFileType);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
index adbdf7f..e592b5f 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
@@ -29,7 +29,9 @@ public class ParseOptions {
 
   public enum HEADER {
     FIRST_RECORD,
-    PROVIDED_BY_USER
+    PROVIDED_BY_USER, // not used right now but can be used when some metadata of file provide
this information
+    EMBEDDED, // this one is for JSON/ XML and may be other file formats where its embedded
with the data
+    NONE   // if the file does not contain header information at all
   }
   final public static String OPTIONS_FILE_TYPE = "FILE_TYPE";
   final public static String OPTIONS_HEADER = "HEADER";

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
index d94ad3c..1644c13 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
@@ -85,14 +85,13 @@ public abstract class Parser implements IParser {
     Integer numOfCols = null;
     int[][] typeCounts = null;
 
-    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null && parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString()))
{
-      if (!this.iterator().hasNext()) {
-        throw new NoSuchElementException("Cannot parse Header");
-      }
+    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null &&
+      ( parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString())
||
+        parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.EMBEDDED.toString())
+      )) {
       headerRow = extractHeader();
       numOfCols = headerRow.getRow().length;
       typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
-      previewRows.add(headerRow);
     }
 
     // find data types.
@@ -141,7 +140,7 @@ public abstract class Parser implements IParser {
     for (int colNum = 0; colNum < numOfCols; colNum++) {
       int dataTypeId = getLikelyDataType(typeCounts, colNum);
       ColumnDescription.DataTypes type = ColumnDescription.DataTypes.values()[dataTypeId];
-      String colName = "Column" + colNum;
+      String colName = "Column" + (colNum + 1);
       if (null != headerRow)
         colName = (String) headerRow.getRow()[colNum];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
index d883743..8b8415c 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
@@ -31,7 +31,7 @@ export default Ember.Controller.extend({
   showErrors: false,
   uploader: Uploader.create(),
   baseUrl: "/resources/upload",
-  isFirstRowHeader: true, // is first row  header
+  isFirstRowHeader: false, // is first row  header
   header: null,  // header received from server
   files: null, // files that need to be uploaded only file[0] is relevant
   firstRow: [], // the actual first row of the table.
@@ -41,6 +41,7 @@ export default Ember.Controller.extend({
   filePath: null,
   tableName: null,
   uploadProgressInfos : [],
+  showPreview : false,
   onChangeUploadSource : function(){
     this.clearFields();
   }.observes("uploadSource"),
@@ -135,11 +136,12 @@ export default Ember.Controller.extend({
   },
 
   clearFields: function () {
+    this.set("showPreview",false);
     this.set("hdfsPath");
     this.set("header");
     this.set("rows");
     this.set("error");
-    this.set('isFirstRowHeader',true);
+    this.set('isFirstRowHeader',false);
     this.set('files');
     this.set("firstRow");
     this.set("selectedDatabase",null);
@@ -242,12 +244,14 @@ export default Ember.Controller.extend({
 
   onGeneratePreviewSuccess: function (data) {
     console.log("onGeneratePreviewSuccess");
+    this.set("showPreview",true);
     this.hideUploadModal();
     this.previewTable(data);
   },
 
   onGeneratePreviewFailure: function (error) {
     console.log("onGeneratePreviewFailure");
+    this.set("showPreview",false);
     this.hideUploadModal();
     this.setError(error);
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
index b7d9011..eb95292 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
@@ -79,7 +79,7 @@
             {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.showPreview"}}</button></td>
         {{/if}}
       </tr>
-      {{#if rows}}
+      {{#if showPreview}}
         <tr>
           <td class="data-upload-form-label"><label>Database</label></td>
           <td class="data-upload-form-field">
@@ -114,21 +114,20 @@
       {{/if}}
     </table>
 
+    {{#if showPreview}}
     <table class="pull-right">
       <tr>
         <td>
-          {{#if rows}}
             <button type="button" {{action "uploadTable"}}
               {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.uploadTable"}}</button>
-          {{/if}}
         </td>
       </tr>
     </table>
-
+    {{/if}}
   </div>
 
   <div>
-    {{#if rows}}
+    {{#if showPreview}}
       <div id="upload-table">
         <table class="table table-expandable no-border">
           <thead>

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
index 479c934..99f758b 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
@@ -55,16 +55,11 @@ public class DataParserCSVTest {
       PreviewData pd = dp.parsePreview();
       Assert.assertNotNull(pd.getPreviewRows());
       Assert.assertNotNull(pd.getHeader());
-      Assert.assertEquals(3, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getPreviewRows().size()); // now it will not return the first
row which is header
       Assert.assertEquals(2, pd.getHeader().size());
       ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(),
0),
               new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(),
1)};
 
-      Object cols1[] = new Object[2];
-      cols1[0] = "1";
-      cols1[1] = "a";
-      Row row1 = new Row(cols1);
-
       Object cols2[] = new Object[2];
       cols2[0] = "2";
       cols2[1] = "b";
@@ -75,7 +70,7 @@ public class DataParserCSVTest {
       cols3[1] = "c";
       Row row3 = new Row(cols3);
 
-      Row[] rows = {row1, row2, row3};
+      Row[] rows = { row2, row3};
 
       Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
       Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
@@ -88,6 +83,74 @@ public class DataParserCSVTest {
   }
 
   /**
+   * One row csv will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER
is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowCSV() throws IOException {
+    String str = "1,a\n" ;
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.INT.toString(),
0),
+        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.CHAR.toString(),
1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "1";
+      cols1[1] = "a";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * One row csv will throw exception in preview if HEADER.FIRST_RECORD is selected.
+   * @throws IOException
+   */
+  @Test(expected = java.util.NoSuchElementException.class)
+  public void testParsePreview1RowCSVFirstRowHeader() throws IOException {
+    String str = "col1,col2\n" ;
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
    * more number of columns in a row => igore the extra columns. Number of columns is
decided by the first row.
    * If other row contains more columns then those columns will be ignored
    * Here first row has 2 columns and second row has 3 columns so the value 'x' is ignored
@@ -109,9 +172,9 @@ public class DataParserCSVTest {
       dp = new DataParser(sr, parseOptions);
 
       PreviewData pd = dp.parsePreview();
-      Row row2 = new Row(new Object[]{"2","b"});
+      Row row = new Row(new Object[]{"2","b"});
 
-      Assert.assertArrayEquals("Additional columns not properly handled.", row2.getRow(),pd.getPreviewRows().get(1).getRow());
+      Assert.assertArrayEquals("Additional columns not properly handled.", row.getRow(),pd.getPreviewRows().get(0).getRow());
     } finally {
       if (null != dp) {
         dp.close();
@@ -135,7 +198,6 @@ public class DataParserCSVTest {
 
     ParseOptions parseOptions = new ParseOptions();
     parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
-//    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
 
     DataParser dp = null;
     try {
@@ -160,7 +222,6 @@ public class DataParserCSVTest {
     String str = "1,a,x\n" +
             "2,,y\n" +  // contains 1 col, less number of columns
             "3,c,z\n";
-//    System.out.println("str : " + str);
     StringReader sr = new StringReader(str);
 
     ParseOptions parseOptions = new ParseOptions();
@@ -172,7 +233,7 @@ public class DataParserCSVTest {
       dp = new DataParser(sr, parseOptions);
 
       PreviewData pd = dp.parsePreview();
-      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[1],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
     } finally {
       if (null != dp)
         dp.close();
@@ -190,7 +251,6 @@ public class DataParserCSVTest {
     String str = "1,a,x\n" +
             "2,,\n" +  // contains 1 col, less number of columns
             "3,c,z\n";
-//    System.out.println("str : " + str);
     StringReader sr = new StringReader(str);
 
     ParseOptions parseOptions = new ParseOptions();
@@ -202,8 +262,8 @@ public class DataParserCSVTest {
       dp = new DataParser(sr, parseOptions);
 
       PreviewData pd = dp.parsePreview();
-      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[1],"");
-      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[2],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[2],"");
     } finally {
       if (null != dp)
         dp.close();

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
index 555d8e3..79166bb 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
@@ -59,7 +59,7 @@ public class DataParserJSONTest {
 
     ParseOptions parseOptions = new ParseOptions();
     parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
-    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
     parseOptions.setOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS, 7);
 
     DataParser dp = null;
@@ -69,7 +69,7 @@ public class DataParserJSONTest {
       PreviewData pd = dp.parsePreview();
       Assert.assertNotNull(pd.getPreviewRows());
       Assert.assertNotNull(pd.getHeader());
-      Assert.assertEquals(8, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(7, pd.getPreviewRows().size()); // header row + preview rows
       Assert.assertEquals(14, pd.getHeader().size());
       ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(),
0),
               new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(),
1),
@@ -86,7 +86,6 @@ public class DataParserJSONTest {
               new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(),
12),
               new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(),
13)};
 
-      Row row1 = new Row(new Object[]{"col1", "col2", "col3", "col4", "col5", "col6", "col7",
"col8", "col9", "col10", "col11", "col12", "col13", "col14"});
       Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd",
"abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"});
       Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd",
"abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"});
       Row row4 = new Row(new Object[]{"c", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd",
"abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "6.4"});
@@ -95,7 +94,7 @@ public class DataParserJSONTest {
       Row row7 = new Row(new Object[]{"f", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd",
"abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "9.4"});
       Row row8 = new Row(new Object[]{"g", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd",
"abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "10.4"});
 
-      Row[] rows = {row1, row2, row3, row4, row5, row6, row7, row8};
+      Row[] rows = { row2, row3, row4, row5, row6, row7, row8};
 
       Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
       Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
@@ -160,13 +159,13 @@ public class DataParserJSONTest {
     try {
       ParseOptions parseOptions = new ParseOptions();
       parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
-      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
 
       dp = new DataParser(sr, parseOptions);
 
       PreviewData pd = dp.parsePreview();
 
-      Assert.assertNull(pd.getPreviewRows().get(2).getRow()[13]);
+      Assert.assertNull(pd.getPreviewRows().get(1).getRow()[13]);
     } finally {
       if (null != dp)
         dp.close();
@@ -194,7 +193,7 @@ public class DataParserJSONTest {
     try {
       ParseOptions parseOptions = new ParseOptions();
       parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
-      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
 
       dp = new DataParser(sr, parseOptions);
 
@@ -206,4 +205,93 @@ public class DataParserJSONTest {
       sr.close();
     }
   }
+
+  /**
+   * One row JSON will give embedde column names and 1st row in preview if HEADER.EMBEDDED
is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowJSON() throws IOException {
+    String str = "[ "
+      + "{\"col1\": \"d\", \n\"col2\": \"abcd\"  }"       // extra comma in this line
+      + "]";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(),
0),
+        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(),
1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "d";
+      cols1[1] = "abcd";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * One row JSON will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER
is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowJSONHeaderProvided() throws IOException {
+    String str = "[ "
+      + "{\"col1\": \"d\", \n\"col2\": \"abcd\"  }"       // extra comma in this line
+      + "]";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.CHAR.toString(),
0),
+        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.STRING.toString(),
1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "d";
+      cols1[1] = "abcd";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/afd68d9c/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
index f925dc3..4199257 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
@@ -65,7 +65,7 @@ public class DataParserXMLTest {
       PreviewData pd = dp.parsePreview();
       Assert.assertNotNull(pd.getPreviewRows());
       Assert.assertNotNull(pd.getHeader());
-      Assert.assertEquals(3, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(2, pd.getPreviewRows().size()); // header row + preview rows
       Assert.assertEquals(5, pd.getHeader().size());
       ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(),
0),
               new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(),
1),
@@ -74,11 +74,10 @@ public class DataParserXMLTest {
               new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(),
4)
       };
 
-      Row row1 = new Row(new Object[]{"col1", "col2", "col3", "col4", "col5"});
       Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value",
"10", "11"});
       Row row3 = new Row(new Object[]{"row2-col1-Value", "row2-col2-Value", "row2-col3-Value",
"20", "21"});
 
-      Row[] rows = {row1, row2, row3};
+      Row[] rows = {row2, row3};
 
       Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
       Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
@@ -177,7 +176,7 @@ public class DataParserXMLTest {
       PreviewData pd = dp.parsePreview();
 
       Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value",null,null,"20","21"});
-      Assert.assertArrayEquals("Less number of columns do not give correct result.", row2.getRow(),
pd.getPreviewRows().get(2).getRow());
+      Assert.assertArrayEquals("Less number of columns do not give correct result.", row2.getRow(),
pd.getPreviewRows().get(1).getRow());
     } finally {
       if (null != dp)
         dp.close();
@@ -230,4 +229,98 @@ public class DataParserXMLTest {
       sr.close();
     }
   }
+
+  /**
+   * One row XML will give embedde column names and 1st row in preview if HEADER.EMBEDDED
is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowXML() throws IOException {
+    String str = "<table>" +
+                      "<row>" +
+                      "<col name=\"col1\">row1-col1-Value</col>" +
+                      "<col name=\"col2\">11</col>" +
+                      "</row>" +
+                 "</table>";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(),
0),
+        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.INT.toString(),
1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "row1-col1-Value";
+      cols1[1] = "11";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * One row XML will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER
is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowXMLHeaderProvided() throws IOException {
+    String str = "<table>" +
+                    "<row>" +
+                    "<col name=\"col1\">row1-col1-Value</col>" +
+                    "<col name=\"col2\">11</col>" +
+                    "</row>" +
+                 "</table>";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.STRING.toString(),
0),
+        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.INT.toString(),
1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "row1-col1-Value";
+      cols1[1] = "11";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
 }


Mime
View raw message