hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r796732 [2/2] - in /hadoop/mapreduce/trunk: ./ src/contrib/sqoop/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/lib/ src/contr...
Date Wed, 22 Jul 2009 14:10:13 GMT
Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java?rev=796732&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java Wed Jul 22 14:10:12 2009
@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop;
+
+import junit.framework.TestCase;
+
+
+/**
+ * Test aspects of the ImportOptions class
+ */
+public class TestImportOptions extends TestCase {
+
+  // tests for the toChar() parser
+  public void testNormalChar() throws ImportOptions.InvalidOptionsException {
+    assertEquals('a', ImportOptions.toChar("a"));
+  }
+
+  public void testEmptyString() throws ImportOptions.InvalidOptionsException {
+    try {
+      ImportOptions.toChar("");
+      fail("Expected exception");
+    } catch (ImportOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testNullString() throws ImportOptions.InvalidOptionsException {
+    try {
+      ImportOptions.toChar(null);
+      fail("Expected exception");
+    } catch (ImportOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testTooLong() throws ImportOptions.InvalidOptionsException {
+    // Should just use the first character and log a warning.
+    assertEquals('x', ImportOptions.toChar("xyz"));
+  }
+
+  public void testHexChar1() throws ImportOptions.InvalidOptionsException {
+    assertEquals(0xF, ImportOptions.toChar("\\0xf"));
+  }
+
+  public void testHexChar2() throws ImportOptions.InvalidOptionsException {
+    assertEquals(0xF, ImportOptions.toChar("\\0xF"));
+  }
+
+  public void testHexChar3() throws ImportOptions.InvalidOptionsException {
+    assertEquals(0xF0, ImportOptions.toChar("\\0xf0"));
+  }
+
+  public void testHexChar4() throws ImportOptions.InvalidOptionsException {
+    assertEquals(0xF0, ImportOptions.toChar("\\0Xf0"));
+  }
+
+  public void testEscapeChar1() throws ImportOptions.InvalidOptionsException {
+    assertEquals('\n', ImportOptions.toChar("\\n"));
+  }
+
+  public void testEscapeChar2() throws ImportOptions.InvalidOptionsException {
+    assertEquals('\\', ImportOptions.toChar("\\\\"));
+  }
+
+  public void testEscapeChar3() throws ImportOptions.InvalidOptionsException {
+    assertEquals('\\', ImportOptions.toChar("\\"));
+  }
+
+  public void testUnknownEscape1() throws ImportOptions.InvalidOptionsException {
+    try {
+      ImportOptions.toChar("\\Q");
+      fail("Expected exception");
+    } catch (ImportOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testUnknownEscape2() throws ImportOptions.InvalidOptionsException {
+    try {
+      ImportOptions.toChar("\\nn");
+      fail("Expected exception");
+    } catch (ImportOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testEscapeNul1() throws ImportOptions.InvalidOptionsException {
+    assertEquals('\000', ImportOptions.toChar("\\0"));
+  }
+
+  public void testEscapeNul2() throws ImportOptions.InvalidOptionsException {
+    assertEquals('\000', ImportOptions.toChar("\\00"));
+  }
+
+  public void testEscapeNul3() throws ImportOptions.InvalidOptionsException {
+    assertEquals('\000', ImportOptions.toChar("\\0000"));
+  }
+
+  public void testEscapeNul4() throws ImportOptions.InvalidOptionsException {
+    assertEquals('\000', ImportOptions.toChar("\\0x0"));
+  }
+
+  public void testOctalChar1() throws ImportOptions.InvalidOptionsException {
+    assertEquals(04, ImportOptions.toChar("\\04"));
+  }
+
+  public void testOctalChar2() throws ImportOptions.InvalidOptionsException {
+    assertEquals(045, ImportOptions.toChar("\\045"));
+  }
+
+  public void testErrOctalChar() throws ImportOptions.InvalidOptionsException {
+    try {
+      ImportOptions.toChar("\\095");
+      fail("Expected exception");
+    } catch (NumberFormatException nfe) {
+      // expected.
+    }
+  }
+
+  public void testErrHexChar() throws ImportOptions.InvalidOptionsException {
+    try {
+      ImportOptions.toChar("\\0x9K5");
+      fail("Expected exception");
+    } catch (NumberFormatException nfe) {
+      // expected.
+    }
+  }
+
+  // test that setting output delimiters also sets input delimiters 
+  public void testDelimitersInherit() throws ImportOptions.InvalidOptionsException {
+    String [] args = {
+        "--fields-terminated-by",
+        "|"
+    };
+
+    ImportOptions opts = new ImportOptions();
+    opts.parse(args);
+    assertEquals('|', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // test that setting output delimiters and setting input delims separately works
+  public void testDelimOverride1() throws ImportOptions.InvalidOptionsException {
+    String [] args = {
+        "--fields-terminated-by",
+        "|",
+        "--input-fields-terminated-by",
+        "*"
+    };
+
+    ImportOptions opts = new ImportOptions();
+    opts.parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // test that the order in which delims are specified doesn't matter
+  public void testDelimOverride2() throws ImportOptions.InvalidOptionsException {
+    String [] args = {
+        "--input-fields-terminated-by",
+        "*",
+        "--fields-terminated-by",
+        "|"
+    };
+
+    ImportOptions opts = new ImportOptions();
+    opts.parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiCols.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiCols.java?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiCols.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiCols.java Wed Jul 22 14:10:12 2009
@@ -28,8 +28,6 @@
  * some of which may contain null values.
  *
  * Also test loading only selected columns from the db.
- * 
- *
  */
 public class TestMultiCols extends ImportJobTestCase {
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestOrderBy.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestOrderBy.java?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestOrderBy.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestOrderBy.java Wed Jul 22 14:10:12 2009
@@ -35,8 +35,6 @@
 
 /**
  * Test that --order-by works
- * 
- *
  */
 public class TestOrderBy extends ImportJobTestCase {
 
@@ -152,11 +150,11 @@
 
   public void testOrderByFirstCol() throws IOException {
     String orderByCol = "INTFIELD1";
-    runOrderByTest(orderByCol, "1,8", HsqldbTestServer.getFirstColSum());
+    runOrderByTest(orderByCol, "1,8\n", HsqldbTestServer.getFirstColSum());
   }
 
   public void testOrderBySecondCol() throws IOException {
     String orderByCol = "INTFIELD2";
-    runOrderByTest(orderByCol, "7,2", HsqldbTestServer.getFirstColSum());
+    runOrderByTest(orderByCol, "7,2\n", HsqldbTestServer.getFirstColSum());
   }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java Wed Jul 22 14:10:12 2009
@@ -38,8 +38,6 @@
  * Methods essentially copied out of the other Test* classes.
  * TODO(kevin or aaron): Factor out these common test methods
  * so that every new Test* class doesn't need to copy the code.
- * 
- *
  */
 public class TestWhere extends ImportJobTestCase {
 
@@ -97,8 +95,8 @@
     return Integer.parseInt(parts[0]);
   }
 
-  public void runWhereTest(String whereClause, String firstValStr, int numExpectedResults, int expectedSum)
-      throws IOException {
+  public void runWhereTest(String whereClause, String firstValStr, int numExpectedResults,
+      int expectedSum) throws IOException {
 
     String [] columns = HsqldbTestServer.getFieldNames();
     ClassLoader prevClassLoader = null;
@@ -160,11 +158,11 @@
 
   public void testSingleClauseWhere() throws IOException {
     String whereClause = "INTFIELD2 > 4";
-    runWhereTest(whereClause, "1,8", 2, 4);
+    runWhereTest(whereClause, "1,8\n", 2, 4);
   }
 
   public void testMultiClauseWhere() throws IOException {
     String whereClause = "INTFIELD1 > 4 AND INTFIELD2 < 3";
-    runWhereTest(whereClause, "7,2", 1, 7);
+    runWhereTest(whereClause, "7,2\n", 1, 7);
   }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java Wed Jul 22 14:10:12 2009
@@ -42,7 +42,7 @@
    * Create the argv to pass to Sqoop
    * @return the argv as an array of strings.
    */
-  private String [] getArgv(boolean includeHadoopFlags) {
+  private String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
@@ -64,13 +64,19 @@
     args.add("--order-by");
     args.add(getColNames()[0]);
 
+    if (null != moreArgs) {
+      for (String arg: moreArgs) {
+        args.add(arg);
+      }
+    }
+
     return args.toArray(new String[0]);
   }
 
-  private ImportOptions getImportOptions() {
+  private ImportOptions getImportOptions(String [] extraArgs) {
     ImportOptions opts = new ImportOptions();
     try {
-      opts.parse(getArgv(false));
+      opts.parse(getArgv(false, extraArgs));
     } catch (ImportOptions.InvalidOptionsException ioe) {
       fail("Invalid options: " + ioe.toString());
     }
@@ -79,7 +85,7 @@
   }
 
   private void runImportTest(String tableName, String [] types, String [] values,
-      String verificationScript) throws IOException {
+      String verificationScript, String [] extraArgs) throws IOException {
 
     // create a table and populate it with a row...
     setCurTableName(tableName);
@@ -87,14 +93,14 @@
     
     // set up our mock hive shell to compare our generated script
     // against the correct expected one.
-    ImportOptions options = getImportOptions();
+    ImportOptions options = getImportOptions(extraArgs);
     String hiveHome = options.getHiveHome();
     assertNotNull("hive.home was not set", hiveHome);
     Path testDataPath = new Path(new Path(hiveHome), "scripts/" + verificationScript);
     System.setProperty("expected.script", testDataPath.toString());
 
     // verify that we can import it correctly into hive.
-    runImport(getArgv(true));
+    runImport(getArgv(true, extraArgs));
   }
 
   /** Test that strings and ints are handled in the normal fashion */
@@ -102,7 +108,7 @@
   public void testNormalHiveImport() throws IOException {
     String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
     String [] vals = { "'test'", "42", "'somestring'" };
-    runImportTest("NORMAL_HIVE_IMPORT", types, vals, "normalImport.q");
+    runImportTest("NORMAL_HIVE_IMPORT", types, vals, "normalImport.q", null);
   }
 
   /** Test that dates are coerced properly to strings */
@@ -110,7 +116,7 @@
   public void testDate() throws IOException {
     String [] types = { "VARCHAR(32)", "DATE" };
     String [] vals = { "'test'", "'2009-05-12'" };
-    runImportTest("DATE_HIVE_IMPORT", types, vals, "dateImport.q");
+    runImportTest("DATE_HIVE_IMPORT", types, vals, "dateImport.q", null);
   }
 
   /** Test that NUMERICs are coerced to doubles */
@@ -118,7 +124,7 @@
   public void testNumeric() throws IOException {
     String [] types = { "NUMERIC", "CHAR(64)" };
     String [] vals = { "3.14159", "'foo'" };
-    runImportTest("NUMERIC_HIVE_IMPORT", types, vals, "numericImport.q");
+    runImportTest("NUMERIC_HIVE_IMPORT", types, vals, "numericImport.q", null);
   }
 
   /** If bin/hive returns an error exit status, we should get an IOException */
@@ -129,7 +135,7 @@
     String [] types = { "NUMERIC", "CHAR(64)" };
     String [] vals = { "3.14159", "'foo'" };
     try {
-      runImportTest("FAILING_HIVE_IMPORT", types, vals, "failingImport.q");
+      runImportTest("FAILING_HIVE_IMPORT", types, vals, "failingImport.q", null);
       // If we get here, then the run succeeded -- which is incorrect.
       fail("FAILING_HIVE_IMPORT test should have thrown IOException");
     } catch (IOException ioe) {
@@ -137,5 +143,14 @@
     }
   }
 
+  /** Test that we can set delimiters how we want them */
+  @Test
+  public void testCustomDelimiters() throws IOException {
+    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    String [] vals = { "'test'", "42", "'somestring'" };
+    String [] extraArgs = { "--fields-terminated-by", ",", "--lines-terminated-by", "|" };
+    runImportTest("CUSTOM_DELIM_IMPORT", types, vals, "customDelimImport.q", extraArgs);
+  }
+
 }
 

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java?rev=796732&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java Wed Jul 22 14:10:12 2009
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.lib;
+
+import java.util.ArrayList;
+import java.util.List;
+import junit.framework.TestCase;
+
+
+/**
+ * Test that the field formatter works in a variety of configurations
+ */
+public class TestFieldFormatter extends TestCase {
+  
+  public void testAllEmpty() {
+    char [] chars = new char[0];
+    String result = FieldFormatter.escapeAndEnclose("", "", "", chars, false);
+    assertEquals("", result);
+  }
+
+  public void testNullArgs() {
+    String result = FieldFormatter.escapeAndEnclose("", null, null, null, false);
+    assertEquals("", result);
+  }
+
+  public void testBasicStr() {
+    String result = FieldFormatter.escapeAndEnclose("foo", null, null, null, false);
+    assertEquals("foo", result);
+  }
+
+  public void testEscapeSlash() {
+    String result = FieldFormatter.escapeAndEnclose("foo\\bar", "\\", "\"", null, false);
+    assertEquals("foo\\\\bar", result);
+  }
+
+  public void testMustEnclose() {
+    String result = FieldFormatter.escapeAndEnclose("foo", null, "\"", null, true);
+    assertEquals("\"foo\"", result);
+  }
+
+  public void testEncloseComma1() {
+    char [] chars = { ',' };
+
+    String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"", chars, false);
+    assertEquals("\"foo,bar\"", result);
+  }
+
+  public void testEncloseComma2() {
+    char [] chars = { '\n', ',' };
+
+    String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"", chars, false);
+    assertEquals("\"foo,bar\"", result);
+  }
+
+  public void testEncloseComma3() {
+    char [] chars = { ',', '\n' };
+
+    String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "\"", chars, false);
+    assertEquals("\"foo,bar\"", result);
+  }
+
+  public void testNoNeedToEnclose() {
+    char [] chars = { ',', '\n' };
+
+    String result = FieldFormatter.escapeAndEnclose(
+        "just another string", "\\", "\"", chars, false);
+    assertEquals("just another string", result);
+  }
+
+  public void testCannotEnclose1() {
+    char [] chars = { ',', '\n' };
+
+    // can't enclose because encloser is ""
+    String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", "", chars, false);
+    assertEquals("foo,bar", result);
+  }
+
+  public void testCannotEnclose2() {
+    char [] chars = { ',', '\n' };
+
+    // can't enclose because encloser is null
+    String result = FieldFormatter.escapeAndEnclose("foo,bar", "\\", null, chars, false);
+    assertEquals("foo,bar", result);
+  }
+
+  public void testEmptyCharToEscapeString() {
+    // test what happens when the escape char is null. It should encode the null char.
+
+    char nul = '\000';
+    String s = "" + nul;
+    assertEquals("\000", s);
+  }
+  
+  public void testEscapeCentralQuote() {
+    String result = FieldFormatter.escapeAndEnclose("foo\"bar", "\\", "\"", null, false);
+    assertEquals("foo\\\"bar", result);
+  }
+
+  public void testEscapeMultiCentralQuote() {
+    String result = FieldFormatter.escapeAndEnclose("foo\"\"bar", "\\", "\"", null, false);
+    assertEquals("foo\\\"\\\"bar", result);
+  }
+
+  public void testDoubleEscape() {
+    String result = FieldFormatter.escapeAndEnclose("foo\\\"bar", "\\", "\"", null, false);
+    assertEquals("foo\\\\\\\"bar", result);
+  }
+
+  public void testReverseEscape() {
+    String result = FieldFormatter.escapeAndEnclose("foo\"\\bar", "\\", "\"", null, false);
+    assertEquals("foo\\\"\\\\bar", result);
+  }
+
+  public void testQuotedEncloser() {
+    char [] chars = { ',', '\n' };
+    
+    String result = FieldFormatter.escapeAndEnclose("foo\",bar", "\\", "\"", chars, false);
+    assertEquals("\"foo\\\",bar\"", result);
+  }
+
+  public void testQuotedEscape() {
+    char [] chars = { ',', '\n' };
+    
+    String result = FieldFormatter.escapeAndEnclose("foo\\,bar", "\\", "\"", chars, false);
+    assertEquals("\"foo\\\\,bar\"", result);
+  }
+}

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java?rev=796732&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestRecordParser.java Wed Jul 22 14:10:12 2009
@@ -0,0 +1,356 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.lib;
+
+import java.util.ArrayList;
+import java.util.List;
+import junit.framework.TestCase;
+
+
+/**
+ * Test that the record parser works in a variety of configurations.
+ */
+public class TestRecordParser extends TestCase {
+
+  private void assertListsEqual(String msg, List<String> expected, List<String> actual) {
+    if (expected == null && actual != null) {
+      if (null == msg) {
+        msg = "expected null list";
+      }
+
+      fail(msg);
+    } else if (expected != null && actual == null) {
+      if (null == msg) {
+        msg = "expected non-null list";
+      }
+
+      fail(msg);
+    }
+
+    if (expected == null && actual == null) {
+      return; // ok. Both null; nothing to do.
+    }
+
+    int expectedLen = expected.size();
+    int actualLen = actual.size();
+
+    if (expectedLen != actualLen) {
+      if (null == msg) {
+        msg = "Expected list of length " + expectedLen + "; got " + actualLen;
+      }
+
+      fail(msg);
+    }
+
+    // Check the list contents.
+    for (int i = 0; i < expectedLen; i++) {
+      String expectedElem = expected.get(i);
+      String actualElem = actual.get(i);
+
+      if (expectedElem == null && actualElem != null) {
+        if (null == msg) {
+          msg = "Expected null element at position " + i + "; got [" + actualElem + "]";
+        }
+
+        fail(msg);
+      }
+
+      if (!expectedElem.equals(actualElem)) {
+        if (null == msg) {
+          msg = "Expected [" + expectedElem + "] at position " + i + "; got [" + actualElem + "]";
+        }
+
+        fail(msg);
+      }
+    }
+  }
+
+  private List<String> list(String [] items) {
+
+    if (null == items) {
+      return null;
+    }
+
+    ArrayList<String> asList = new ArrayList<String>();
+    for (int i = 0; i < items.length; i++) {
+      asList.add(items[i]);
+    }
+
+    return asList;
+  }
+  
+  public void testEmptyLine() throws RecordParser.ParseError {
+    // an empty line should return no fields.
+
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { };
+    assertListsEqual(null, list(strings), parser.parseRecord(""));
+  }
+
+  public void testJustEOR() throws RecordParser.ParseError {
+    // a line with just a newline char should return a single zero-length field.
+
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\n"));
+  }
+
+  public void testOneField() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("the field"));
+  }
+
+  public void testOneField2() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("the field\n"));
+  }
+
+  public void testQuotedField1() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the field\"\n"));
+  }
+
+  public void testQuotedField2() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the field\""));
+  }
+
+  public void testQuotedField3() throws RecordParser.ParseError {
+    // quoted containing EOF
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the ,field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the ,field\""));
+  }
+
+  public void testQuotedField4() throws RecordParser.ParseError {
+    // quoted containing multiple EOFs
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the ,,field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the ,,field\""));
+  }
+
+  public void testQuotedField5() throws RecordParser.ParseError {
+    // quoted containing EOF and EOR
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the ,\nfield" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the ,\nfield\""));
+  }
+
+  public void testQuotedField6() throws RecordParser.ParseError {
+    // quoted containing EOR
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the \nfield" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the \nfield\""));
+  }
+
+  public void testQuotedField7() throws RecordParser.ParseError {
+    // quoted containing multiple EORs
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the \n\nfield" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the \n\nfield\""));
+  }
+
+  public void testQuotedField8() throws RecordParser.ParseError {
+    // quoted containing escaped quoted char
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the \"field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"the \\\"field\""));
+  }
+
+  public void testUnquotedEscape1() throws RecordParser.ParseError {
+    // field without quotes with an escaped EOF char.
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the ,field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("the \\,field"));
+  }
+
+  public void testUnquotedEscape2() throws RecordParser.ParseError {
+    // field without quotes with an escaped escape char.
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "the \\field" };
+    assertListsEqual(null, list(strings), parser.parseRecord("the \\\\field"));
+  }
+
+  public void testTwoFields1() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "field1", "field2" };
+    assertListsEqual(null, list(strings), parser.parseRecord("field1,field2"));
+  }
+
+  public void testTwoFields2() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "field1", "field2" };
+    assertListsEqual(null, list(strings), parser.parseRecord("field1,field2\n"));
+  }
+
+  public void testTwoFields3() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "field1", "field2" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"field1\",field2\n"));
+  }
+
+  public void testTwoFields4() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "field1", "field2" };
+    assertListsEqual(null, list(strings), parser.parseRecord("field1,\"field2\"\n"));
+  }
+
+  public void testTwoFields5() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "field1", "field2" };
+    assertListsEqual(null, list(strings), parser.parseRecord("field1,\"field2\""));
+  }
+  
+  public void testRequiredQuotes0() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
+    String [] strings = { "field1", "field2" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"field1\",\"field2\"\n"));
+  }
+
+  public void testRequiredQuotes1() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
+    String [] strings = { "field1", "field2" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\"field1\",\"field2\""));
+  }
+
+  public void testRequiredQuotes2() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
+    String [] strings = { "field1", "field2" };
+    try {
+      parser.parseRecord("\"field1\",field2");
+      fail("Expected parse error for required quotes");
+    } catch (RecordParser.ParseError pe) {
+      // ok. expected.
+    }
+  }
+
+  public void testRequiredQuotes3() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
+    String [] strings = { "field1", "field2" };
+    try {
+      parser.parseRecord("field1,\"field2\"");
+      fail("Expected parse error for required quotes");
+    } catch (RecordParser.ParseError pe) {
+      // ok. expected.
+    }
+  }
+
+  public void testRequiredQuotes4() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
+    String [] strings = { "field1", "field2" };
+    try {
+      parser.parseRecord("field1,\"field2\"\n");
+      fail("Expected parse error for required quotes");
+    } catch (RecordParser.ParseError pe) {
+      // ok. expected.
+    }
+  }
+
+  public void testNull() {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', true);
+    String input = null;
+    try {
+      parser.parseRecord(input);
+      fail("Expected parse error for null string");
+    } catch (RecordParser.ParseError pe) {
+      // ok. expected.
+    }
+  }
+
+
+  public void testEmptyFields1() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "", ""};
+    assertListsEqual(null, list(strings), parser.parseRecord(","));
+  }
+
+  public void testEmptyFields2() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "", "" };
+    assertListsEqual(null, list(strings), parser.parseRecord(",\n"));
+  }
+
+  public void testEmptyFields3() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "", "", "" };
+    assertListsEqual(null, list(strings), parser.parseRecord(",,\n"));
+  }
+
+  public void testEmptyFields4() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "", "foo", "" };
+    assertListsEqual(null, list(strings), parser.parseRecord(",foo,\n"));
+  }
+
+  public void testEmptyFields5() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "", "foo", "" };
+    assertListsEqual(null, list(strings), parser.parseRecord(",foo,"));
+  }
+
+  public void testEmptyFields6() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "foo", "" };
+    assertListsEqual(null, list(strings), parser.parseRecord("foo,"));
+  }
+
+  public void testTrailingText() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "foo", "bar" };
+    assertListsEqual(null, list(strings), parser.parseRecord("foo,bar\nbaz"));
+  }
+
+  public void testTrailingText2() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\nbaz"));
+  }
+
+  public void testLeadingEscape() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', '\n', '\"', '\\', false);
+    String [] strings = { "\nbaz" };
+    assertListsEqual(null, list(strings), parser.parseRecord("\\\nbaz"));
+  }
+
+  public void testEofIsEor() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', ',', '\"', '\\', false);
+    String [] strings = { "three", "different", "fields" };
+    assertListsEqual(null, list(strings), parser.parseRecord("three,different,fields"));
+  }
+
+  public void testEofIsEor2() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', ',', '\"', '\\', false);
+    String [] strings = { "three", "different", "fields" };
+    assertListsEqual(null, list(strings), parser.parseRecord("three,\"different\",fields"));
+  }
+
+  public void testRepeatedParse() throws RecordParser.ParseError {
+    RecordParser parser = new RecordParser(',', ',', '\"', '\\', false);
+    String [] strings = { "three", "different", "fields" };
+    assertListsEqual(null, list(strings), parser.parseRecord("three,\"different\",fields"));
+
+    String [] strings2 = { "foo", "bar" };
+    assertListsEqual(null, list(strings2), parser.parseRecord("foo,\"bar\""));
+  }
+
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java Wed Jul 22 14:10:12 2009
@@ -39,6 +39,7 @@
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.sqoop.ImportOptions;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
+import org.apache.hadoop.sqoop.util.FileListing;
 
 /**
  * Test the LocalMySQLManager implementation.
@@ -180,13 +181,11 @@
     }
   }
 
-  private String [] getArgv(boolean includeHadoopFlags) {
+  private String [] getArgv(boolean mysqlOutputDelims) {
     ArrayList<String> args = new ArrayList<String>();
 
-    if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("fs.default.name=file:///");
-    }
+    args.add("-D");
+    args.add("fs.default.name=file:///");
 
     args.add("--table");
     args.add(TABLE_NAME);
@@ -200,12 +199,27 @@
     args.add("--where");
     args.add("id > 1");
 
+    if (mysqlOutputDelims) {
+      args.add("--mysql-delimiters");
+    }
+
     return args.toArray(new String[0]);
   }
 
-  @Test
-  public void testLocalBulkImport() {
-    String [] argv = getArgv(true);
+  private void doLocalBulkImport(boolean mysqlOutputDelims, String [] expectedResults)
+      throws IOException {
+
+    Path warehousePath = new Path(this.getWarehouseDir());
+    Path tablePath = new Path(warehousePath, TABLE_NAME);
+    Path filePath = new Path(tablePath, "data-00000");
+
+    File tableFile = new File(tablePath.toString());
+    if (tableFile.exists() && tableFile.isDirectory()) {
+      // remove the directory before running the import.
+      FileListing.recursiveDeleteDir(tableFile);
+    }
+
+    String [] argv = getArgv(mysqlOutputDelims);
     try {
       runImport(argv);
     } catch (IOException ioe) {
@@ -214,18 +228,15 @@
       fail(ioe.toString());
     }
 
-    Path warehousePath = new Path(this.getWarehouseDir());
-    Path tablePath = new Path(warehousePath, TABLE_NAME);
-    Path filePath = new Path(tablePath, "data-00000");
-
     File f = new File(filePath.toString());
     assertTrue("Could not find imported data file", f.exists());
     BufferedReader r = null;
     try {
       // Read through the file and make sure it's all there.
       r = new BufferedReader(new InputStreamReader(new FileInputStream(f)));
-      assertEquals("2,'Bob','2009-04-20',400,'sales'", r.readLine());
-      assertEquals("3,'Fred','2009-01-23',15,'marketing'", r.readLine());
+      for (String expectedLine : expectedResults) {
+        assertEquals(expectedLine, r.readLine());
+      }
     } catch (IOException ioe) {
       LOG.error("Got IOException verifying results: " + ioe.toString());
       ioe.printStackTrace();
@@ -234,4 +245,26 @@
       IOUtils.closeStream(r);
     }
   }
+
+  @Test
+  public void testLocalBulkImportWithDefaultDelims() throws IOException {
+    // no quoting of strings allowed.
+    String [] expectedResults = {
+        "2,Bob,2009-04-20,400,sales",
+        "3,Fred,2009-01-23,15,marketing"
+    };
+
+    doLocalBulkImport(false, expectedResults);
+  }
+
+  @Test
+  public void testLocalBulkImportWithMysqlQuotes() throws IOException {
+    // mysql quotes all string-based output.
+    String [] expectedResults = {
+        "2,'Bob','2009-04-20',400,'sales'",
+        "3,'Fred','2009-01-23',15,'marketing'"
+    };
+
+    doLocalBulkImport(true, expectedResults);
+  }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java Wed Jul 22 14:10:12 2009
@@ -149,6 +149,7 @@
     args.add(AUTH_TEST_USER);
     args.add("--password");
     args.add(AUTH_TEST_PASS);
+    args.add("--mysql-delimiters");
 
     return args.toArray(new String[0]);
   }

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java?rev=796732&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java Wed Jul 22 14:10:12 2009
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.orm;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.mapred.RawKeyTextOutputFormat;
+import org.apache.hadoop.sqoop.orm.CompilationManager;
+import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
+import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
+import org.apache.hadoop.sqoop.testutil.ReparseMapper;
+import org.apache.hadoop.sqoop.util.ClassLoaderStack;
+
+/**
+ * Test that the parse() methods generated in user SqoopRecord implementations
+ * work.
+ */
+public class TestParseMethods extends ImportJobTestCase {
+
+  /**
+   * Create the argv to pass to Sqoop
+   * @return the argv as an array of strings.
+   */
+  private String [] getArgv(boolean includeHadoopFlags, String fieldTerminator, 
+      String lineTerminator, String encloser, String escape, boolean encloserRequired) {
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      args.add("-D");
+      args.add("mapred.job.tracker=local");
+      args.add("-D");
+      args.add("mapred.map.tasks=1");
+      args.add("-D");
+      args.add("fs.default.name=file:///");
+    }
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--as-textfile");
+    args.add("--order-by");
+    args.add("DATA_COL0"); // always order by first column.
+    args.add("--fields-terminated-by");
+    args.add(fieldTerminator);
+    args.add("--lines-terminated-by");
+    args.add(lineTerminator);
+    args.add("--escaped-by");
+    args.add(escape);
+    if (encloserRequired) {
+      args.add("--enclosed-by");
+    } else {
+      args.add("--optionally-enclosed-by");
+    }
+    args.add(encloser);
+
+
+    return args.toArray(new String[0]);
+  }
+
+  public void runParseTest(String fieldTerminator, String lineTerminator, String encloser,
+      String escape, boolean encloseRequired) throws IOException {
+
+    ClassLoader prevClassLoader = null;
+
+    String [] argv = getArgv(true, fieldTerminator, lineTerminator, encloser, escape,
+        encloseRequired);
+    runImport(argv);
+    try {
+      ImportOptions opts = new ImportOptions();
+
+      String tableClassName = getTableName();
+
+      opts.parse(getArgv(false, fieldTerminator, lineTerminator, encloser, escape,
+          encloseRequired));
+
+      CompilationManager compileMgr = new CompilationManager(opts);
+      String jarFileName = compileMgr.getJarFilename();
+
+      // make sure the user's class is loaded into our address space.
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, tableClassName);
+
+      JobConf job = new JobConf();
+      job.setJar(jarFileName);
+
+      // Tell the job what class we're testing.
+      job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName);
+
+      // use local mode in the same JVM.
+      job.set("mapred.job.tracker", "local");
+      job.set("fs.default.name", "file:///");
+
+      String warehouseDir = getWarehouseDir();
+      Path warehousePath = new Path(warehouseDir);
+      Path inputPath = new Path(warehousePath, getTableName());
+      Path outputPath = new Path(warehousePath, getTableName() + "-out");
+
+      job.setMapperClass(ReparseMapper.class);
+      job.setNumReduceTasks(0);
+      FileInputFormat.addInputPath(job, inputPath);
+      FileOutputFormat.setOutputPath(job, outputPath);
+
+      job.setOutputKeyClass(Text.class);
+      job.setOutputValueClass(NullWritable.class);
+      job.setOutputFormat(RawKeyTextOutputFormat.class);
+
+      JobClient.runJob(job);
+    } catch (InvalidOptionsException ioe) {
+      fail(ioe.toString());
+    } finally {
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  public void testDefaults() throws IOException {
+    String [] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
+    String [] vals = { "64", "'foo'", "128" };
+
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\"", "\\", false);
+  }
+
+  public void testRequiredEnclose() throws IOException {
+    String [] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
+    String [] vals = { "64", "'foo'", "128" };
+
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\"", "\\", true);
+  }
+
+  public void testStringEscapes() throws IOException {
+    String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
+    String [] vals = { "'foo'", "'foo,bar'", "'foo''bar'", "'foo\\bar'", "'foo,bar''baz'" };
+
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\'", "\\", false);
+  }
+
+  public void testNumericTypes() throws IOException {
+    String [] types = { "INTEGER", "REAL", "FLOAT", "DATE", "TIME",
+        "TIMESTAMP", "NUMERIC", "BOOLEAN" };
+    String [] vals = { "42", "36.0", "127.1", "'2009-07-02'", "'11:24:00'",
+        "'2009-08-13 20:32:00.1234567'", "92104916282869291837672829102857271948687.287475322",
+        "true" };
+    
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\'", "\\", false);
+  }
+}
+

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java Wed Jul 22 14:10:12 2009
@@ -372,10 +372,12 @@
       assertTrue("Error: " + dataFilePath.toString() + " does not exist", f.exists());
 
       Object readValue = SeqFileReader.getFirstValue(dataFilePath.toString());
+      // add trailing '\n' to expected value since SqoopRecord.toString() encodes the record delim
       if (null == expectedVal) {
-        assertEquals("Error validating result from SeqFile", "null", readValue.toString());
+        assertEquals("Error validating result from SeqFile", "null\n", readValue.toString());
       } else {
-        assertEquals("Error validating result from SeqFile", expectedVal, readValue.toString());
+        assertEquals("Error validating result from SeqFile", expectedVal + "\n",
+            readValue.toString());
       }
     } catch (IOException ioe) {
       fail("IOException: " + ioe.toString());

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ReparseMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ReparseMapper.java?rev=796732&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ReparseMapper.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ReparseMapper.java Wed Jul 22 14:10:12 2009
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.testutil;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.sqoop.lib.RecordParser;
+import org.apache.hadoop.sqoop.lib.SqoopRecord;
+import org.apache.hadoop.util.ReflectionUtils;
+
+
+/**
+ * Test harness mapper. Instantiate the user's specific type, parse() the input 
+ * line of text, and throw an IOException if the output toString() line of text
+ * differs.
+ */
+public class ReparseMapper extends MapReduceBase
+    implements Mapper<LongWritable, Text, Text, NullWritable> {
+
+  public static final Log LOG = LogFactory.getLog(ReparseMapper.class.getName());
+
+  public static final String USER_TYPE_NAME_KEY = "sqoop.user.class";
+
+  private SqoopRecord userRecord;
+
+  public void configure(JobConf job) {
+    String userTypeName = job.get(USER_TYPE_NAME_KEY);
+    if (null == userTypeName) {
+      throw new RuntimeException("Unconfigured parameter: " + USER_TYPE_NAME_KEY);
+    }
+
+    LOG.info("User type name set to " + userTypeName);
+
+    this.userRecord = null;
+
+    try {
+      Configuration conf = new Configuration();
+      Class userClass = Class.forName(userTypeName, true,
+          Thread.currentThread().getContextClassLoader());
+      this.userRecord =
+          (SqoopRecord) ReflectionUtils.newInstance(userClass, conf);
+    } catch (ClassNotFoundException cnfe) {
+      // handled by the next block.
+      LOG.error("ClassNotFound exception: " + cnfe.toString());
+    } catch (Exception e) {
+      LOG.error("Got an exception reflecting user class: " + e.toString());
+    }
+
+    if (null == this.userRecord) {
+      LOG.error("Could not instantiate user record of type " + userTypeName);
+      throw new RuntimeException("Could not instantiate user record of type " + userTypeName);
+    }
+  }
+
+  public void map(LongWritable key, Text val, OutputCollector<Text, NullWritable> out, Reporter r)
+      throws IOException {
+
+    LOG.info("Mapper input line: " + val.toString());
+
+    try {
+      // Use the user's record class to parse the line back in.
+      userRecord.parse(val);
+    } catch (RecordParser.ParseError pe) {
+      LOG.error("Got parse error: " + pe.toString());
+      throw new IOException(pe);
+    }
+
+    LOG.info("Mapper output line: " + userRecord.toString());
+
+    out.collect(new Text(userRecord.toString()), NullWritable.get());
+
+    if (!userRecord.toString().equals(val.toString() + "\n")) {
+      // misparsed.
+      throw new IOException("Returned string has value [" + userRecord.toString() + "] when ["
+          + val.toString() + "\n] was expected.");
+    }
+  }
+}
+

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q?rev=796732&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q Wed Jul 22 14:10:12 2009
@@ -0,0 +1,2 @@
+CREATE TABLE CUSTOM_DELIM_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\054' LINES TERMINATED BY '\0174' STORED AS TEXTFILE;
+LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/CUSTOM_DELIM_IMPORT' INTO TABLE CUSTOM_DELIM_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q Wed Jul 22 14:10:12 2009
@@ -1,2 +1,2 @@
-CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
+CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/DATE_HIVE_IMPORT' INTO TABLE DATE_HIVE_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q Wed Jul 22 14:10:12 2009
@@ -1,2 +1,2 @@
-CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
+CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/DATE_HIVE_IMPORT' INTO TABLE DATE_HIVE_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q Wed Jul 22 14:10:12 2009
@@ -1,2 +1,2 @@
-CREATE TABLE NORMAL_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
+CREATE TABLE NORMAL_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/NORMAL_HIVE_IMPORT' INTO TABLE NORMAL_HIVE_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q?rev=796732&r1=796731&r2=796732&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q Wed Jul 22 14:10:12 2009
@@ -1,2 +1,2 @@
-CREATE TABLE NUMERIC_HIVE_IMPORT ( DATA_COL0 DOUBLE, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
+CREATE TABLE NUMERIC_HIVE_IMPORT ( DATA_COL0 DOUBLE, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/NUMERIC_HIVE_IMPORT' INTO TABLE NUMERIC_HIVE_IMPORT;



Mime
View raw message