hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r911234 - in /hadoop/mapreduce/trunk: ./ src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/ src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/ src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/ src/contrib/sqoop/src/tes...
Date Thu, 18 Feb 2010 00:21:59 GMT
Author: tomwhite
Date: Thu Feb 18 00:21:58 2010
New Revision: 911234

URL: http://svn.apache.org/viewvc?rev=911234&view=rev
Log:
MAPREDUCE-1445. Refactor Sqoop tests to support better ConnManager testing. Contributed by Aaron Kimball.

Added:
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLUtils.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ManagerCompatTestCase.java
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Thu Feb 18 00:21:58 2010
@@ -174,6 +174,9 @@
     MAPREDUCE-1441. Trim whitespace from directory lists pulled from the
     configuration. (Todd Lipcon via cdouglas)
 
+    MAPREDUCE-1445. Refactor Sqoop tests to support better ConnManager testing.
+    (Aaron Kimball via tomwhite)
+
   OPTIMIZATIONS
 
     MAPREDUCE-270. Fix the tasktracker to optionally send an out-of-band

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestColumnTypes.java Thu Feb 18 00:21:58 2010
@@ -20,15 +20,16 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.junit.Test;
 
-import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
+import org.apache.hadoop.sqoop.testutil.ManagerCompatTestCase;
 
 /**
  * Test that each of the different SQL Column types that we support
  * can, in fact, be imported into HDFS. Test that the writable
  * that we expect to work, does.
  *
+ * This uses hsqldb as its test database.
+ *
  * This requires testing:
  * - That we can pull from the database into HDFS:
  *    readFields(ResultSet), toString()
@@ -37,266 +38,25 @@
  * - And optionally, that we can push to the database:
  *    write(PreparedStatement)
  */
-public class TestColumnTypes extends ImportJobTestCase {
+public class TestColumnTypes extends ManagerCompatTestCase {
 
   public static final Log LOG = LogFactory.getLog(TestColumnTypes.class.getName());
 
-  /**
-   * Do a full verification test on the singleton value of a given type.
-   * @param colType  The SQL type to instantiate the column
-   * @param insertVal The SQL text to insert a value into the database
-   * @param returnVal The string representation of the value as extracted from the db
-   */
-  private void verifyType(String colType, String insertVal, String returnVal) {
-    verifyType(colType, insertVal, returnVal, returnVal);
-  }
-
-  /**
-   * Do a full verification test on the singleton value of a given type.
-   * @param colType  The SQL type to instantiate the column
-   * @param insertVal The SQL text to insert a value into the database
-   * @param returnVal The string representation of the value as extracted from the db
-   * @param seqFileVal The string representation of the value as extracted through
-   *        the DBInputFormat, serialized, and injected into a SequenceFile and put
-   *        through toString(). This may be slightly different than what ResultSet.getString()
-   *        returns, which is used by returnVal.
-   */
-  private void verifyType(String colType, String insertVal, String returnVal, String seqFileVal) {
-    createTableForColType(colType, insertVal);
-    verifyReadback(1, returnVal);
-    verifyImport(seqFileVal, null);
-  }
-
-  static final String STRING_VAL_IN = "'this is a short string'";
-  static final String STRING_VAL_OUT = "this is a short string";
-
-  @Test
-  public void testStringCol1() {
-    verifyType("VARCHAR(32)", STRING_VAL_IN, STRING_VAL_OUT);
-  }
-
-  @Test
-  public void testStringCol2() {
-    verifyType("CHAR(32)", STRING_VAL_IN, STRING_VAL_OUT);
-  }
-
-  @Test
-  public void testEmptyStringCol() {
-    verifyType("VARCHAR(32)", "''", "");
-  }
-
-  @Test
-  public void testNullStringCol() {
-    verifyType("VARCHAR(32)", "NULL", null);
-  }
-
-  @Test
-  public void testInt() {
-    verifyType("INTEGER", "42", "42");
-  }
-
-  @Test
-  public void testNullInt() {
-    verifyType("INTEGER", "NULL", null);
-  }
-
-  @Test
-  public void testBit1() {
-    verifyType("BIT", "1", "true");
-  }
-
-  @Test
-  public void testBit2() {
-    verifyType("BIT", "0", "false");
-  }
-
-  @Test
-  public void testBit3() {
-    verifyType("BIT", "false", "false");
-  }
-
-  @Test
-  public void testTinyInt1() {
-    verifyType("TINYINT", "0", "0");
-  }
-
-  @Test
-  public void testTinyInt2() {
-    verifyType("TINYINT", "42", "42");
-  }
-
-  @Test
-  public void testSmallInt1() {
-    verifyType("SMALLINT", "-1024", "-1024");
-  }
-
-  @Test
-  public void testSmallInt2() {
-    verifyType("SMALLINT", "2048", "2048");
-  }
-
-  @Test
-  public void testBigInt1() {
-    verifyType("BIGINT", "10000000000", "10000000000");
-  }
-
-  @Test
-  public void testReal1() {
-    verifyType("REAL", "256", "256.0");
-  }
-
-  @Test
-  public void testReal2() {
-    verifyType("REAL", "256.45", "256.45");
-  }
-
-  @Test
-  public void testFloat1() {
-    verifyType("FLOAT", "256", "256.0");
-  }
-
-  @Test
-  public void testFloat2() {
-    verifyType("FLOAT", "256.45", "256.45");
-  }
-
-  @Test
-  public void testDouble1() {
-    verifyType("DOUBLE", "-256", "-256.0");
-  }
-
-  @Test
-  public void testDouble2() {
-    verifyType("DOUBLE", "256.45", "256.45");
+  @Override
+  protected Log getLogger() {
+    return LOG;
   }
 
-  @Test
-  public void testDate1() {
-    verifyType("DATE", "'2009-1-12'", "2009-01-12");
+  @Override
+  protected String getDbFriendlyName() {
+    return "HSQLDB";
   }
 
-  @Test
-  public void testDate2() {
-    verifyType("DATE", "'2009-01-12'", "2009-01-12");
-  }
-
-  @Test
-  public void testDate3() {
-    verifyType("DATE", "'2009-04-24'", "2009-04-24");
-  }
-
-  @Test
-  public void testTime1() {
-    verifyType("TIME", "'12:24:00'", "12:24:00");
-  }
-
-  @Test
-  public void testTime2() {
-    verifyType("TIME", "'06:24:00'", "06:24:00");
-  }
-
-  @Test
-  public void testTime3() {
-    verifyType("TIME", "'6:24:00'", "06:24:00");
-  }
-
-  @Test
-  public void testTime4() {
-    verifyType("TIME", "'18:24:00'", "18:24:00");
-  }
-
-  @Test
-  public void testTimestamp1() {
-    verifyType("TIMESTAMP", "'2009-04-24 18:24:00'",
-        "2009-04-24 18:24:00.000000000",
-        "2009-04-24 18:24:00.0");
-  }
-
-  @Test
-  public void testTimestamp2() {
-    try {
-    LOG.debug("Beginning testTimestamp2");
-    verifyType("TIMESTAMP", "'2009-04-24 18:24:00.0002'",
-        "2009-04-24 18:24:00.000200000",
-        "2009-04-24 18:24:00.0002");
-    } finally {
-      LOG.debug("End testTimestamp2");
-    }
-  }
-
-  @Test
-  public void testTimestamp3() {
-    try {
-    LOG.debug("Beginning testTimestamp3");
-    verifyType("TIMESTAMP", "null", null);
-    } finally {
-      LOG.debug("End testTimestamp3");
-    }
-  }
-
-  @Test
-  public void testNumeric1() {
-    verifyType("NUMERIC", "1", "1");
-  }
-
-  @Test
-  public void testNumeric2() {
-    verifyType("NUMERIC", "-10", "-10");
-  }
-
-  @Test
-  public void testNumeric3() {
-    verifyType("NUMERIC", "3.14159", "3.14159");
-  }
-
-  @Test
-  public void testNumeric4() {
-    verifyType("NUMERIC", "30000000000000000000000000.14159", "30000000000000000000000000.14159");
-  }
-
-  @Test
-  public void testNumeric5() {
-    verifyType("NUMERIC", "999999999999999999999999999999.14159", "999999999999999999999999999999.14159");
-  }
-
-  @Test
-  public void testNumeric6() {
-    verifyType("NUMERIC", "-999999999999999999999999999999.14159", "-999999999999999999999999999999.14159");
-  }
-
-  @Test
-  public void testDecimal1() {
-    verifyType("DECIMAL", "1", "1");
-  }
-
-  @Test
-  public void testDecimal2() {
-    verifyType("DECIMAL", "-10", "-10");
-  }
-
-  @Test
-  public void testDecimal3() {
-    verifyType("DECIMAL", "3.14159", "3.14159");
-  }
-
-  @Test
-  public void testDecimal4() {
-    verifyType("DECIMAL", "30000000000000000000000000.14159", "30000000000000000000000000.14159");
-  }
-
-  @Test
-  public void testDecimal5() {
-    verifyType("DECIMAL", "999999999999999999999999999999.14159", "999999999999999999999999999999.14159");
-  }
-
-  @Test
-  public void testDecimal6() {
-    verifyType("DECIMAL", "-999999999999999999999999999999.14159", "-999999999999999999999999999999.14159");
-  }
-
-  @Test
-  public void testLongVarChar() {
-    verifyType("LONGVARCHAR", "'this is a long varchar'", "this is a long varchar");
+  @Override
+  protected boolean useHsqldbTestServer() {
+    return true;
   }
 
+  // Don't need to override getConnectString() because the default uses hsqldb.
 }
+

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java Thu Feb 18 00:21:58 2010
@@ -152,5 +152,8 @@
 
     public void importTable(ImportJobContext context) {
     }
+
+    public void release() {
+    }
   }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java Thu Feb 18 00:21:58 2010
@@ -48,7 +48,8 @@
    * Create the argv to pass to Sqoop
    * @return the argv as an array of strings.
    */
-  private String [] getArgv(boolean includeHadoopFlags, String [] colNames, String splitByCol) {
+  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
+      String splitByCol) {
     String columnsString = "";
     for (String col : colNames) {
       columnsString += col + ",";

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java Thu Feb 18 00:21:58 2010
@@ -43,7 +43,8 @@
    * Create the argv to pass to Sqoop
    * @return the argv as an array of strings.
    */
-  private String [] getArgv(boolean includeHadoopFlags, String [] colNames, String splitByCol) {
+  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
+      String splitByCol) {
     String columnsString = "";
     for (String col : colNames) {
       columnsString += col + ",";

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java Thu Feb 18 00:21:58 2010
@@ -46,7 +46,8 @@
    * Create the argv to pass to Sqoop
    * @return the argv as an array of strings.
    */
-  private String [] getArgv(boolean includeHadoopFlags, String [] colNames, String whereClause) {
+  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
+      String whereClause) {
     String columnsString = "";
     for (String col : colNames) {
       columnsString += col + ",";

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/ThirdPartyTests.java Thu Feb 18 00:21:58 2010
@@ -24,7 +24,9 @@
 
 import org.apache.hadoop.sqoop.manager.LocalMySQLTest;
 import org.apache.hadoop.sqoop.manager.MySQLAuthTest;
+import org.apache.hadoop.sqoop.manager.MySQLCompatTest;
 import org.apache.hadoop.sqoop.manager.OracleManagerTest;
+import org.apache.hadoop.sqoop.manager.OracleCompatTest;
 import org.apache.hadoop.sqoop.manager.PostgresqlTest;
 
 /**
@@ -41,7 +43,9 @@
       + "implementations in Sqoop");
     suite.addTestSuite(LocalMySQLTest.class);
     suite.addTestSuite(MySQLAuthTest.class);
+    suite.addTestSuite(MySQLCompatTest.class);
     suite.addTestSuite(OracleManagerTest.class);
+    suite.addTestSuite(OracleCompatTest.class);
     suite.addTestSuite(PostgresqlTest.class);
 
     return suite;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java Thu Feb 18 00:21:58 2010
@@ -79,7 +79,7 @@
   @Before
   public void setUp() {
     SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
-    options.setUsername(getCurrentUser());
+    options.setUsername(MySQLUtils.getCurrentUser());
     manager = new LocalMySQLManager(options);
 
     Connection connection = null;
@@ -135,53 +135,6 @@
     }
   }
 
-  /** @return the current username. */
-  private String getCurrentUser() {
-    // First, check the $USER environment variable.
-    String envUser = System.getenv("USER");
-    if (null != envUser) {
-      return envUser;
-    }
-
-    // Try `whoami`
-    String [] whoamiArgs = new String[1];
-    whoamiArgs[0] = "whoami";
-    Process p = null;
-    BufferedReader r = null;
-    try {
-      p = Runtime.getRuntime().exec(whoamiArgs);
-      InputStream is = p.getInputStream();
-      r = new BufferedReader(new InputStreamReader(is));
-      return r.readLine();
-    } catch (IOException ioe) {
-      LOG.error("IOException reading from `whoami`: " + ioe.toString());
-      return null;
-    } finally {
-      // close our stream.
-      if (null != r) {
-        try {
-          r.close();
-        } catch (IOException ioe) {
-          LOG.warn("IOException closing input stream from `whoami`: " + ioe.toString());
-        }
-      }
-
-      // wait for whoami to exit.
-      while (p != null) {
-        try {
-          int ret = p.waitFor();
-          if (0 != ret) {
-            LOG.error("whoami exited with error status " + ret);
-            // suppress original return value from this method.
-            return null; 
-          }
-        } catch (InterruptedException ie) {
-          continue; // loop around.
-        }
-      }
-    }
-  }
-
   private String [] getArgv(boolean mysqlOutputDelims, boolean isDirect,
       String tableName, String... extraArgs) {
     ArrayList<String> args = new ArrayList<String>();
@@ -198,7 +151,7 @@
       args.add("--direct");
     }
     args.add("--username");
-    args.add(getCurrentUser());
+    args.add(MySQLUtils.getCurrentUser());
     args.add("--where");
     args.add("id > 1");
     args.add("--num-mappers");
@@ -289,7 +242,7 @@
   }
 
   @Test
-  public void testLocalBulkImportWithMysqlQuotes() throws IOException {
+  public void testLocalBulkImportWithMySQLQuotes() throws IOException {
     // mysql quotes all string-based output.
     String [] expectedResults = {
         "2,'Bob','2009-04-20',400,'sales'",
@@ -300,7 +253,7 @@
   }
 
   @Test
-  public void testMysqlJdbcImport() throws IOException {
+  public void testMySQLJdbcImport() throws IOException {
     String [] expectedResults = {
         "2,Bob,2009-04-20,400.0,sales",
         "3,Fred,2009-01-23,15.0,marketing"
@@ -316,7 +269,7 @@
     final String reservedTableName = "TABLE";
     SqoopOptions options = new SqoopOptions(CONNECT_STRING,
         reservedTableName);
-    options.setUsername(getCurrentUser());
+    options.setUsername(MySQLUtils.getCurrentUser());
     ConnManager mgr = new MySQLManager(options);
 
     Connection connection = null;
@@ -363,7 +316,7 @@
     final String tableName = "mysql_escaped_col_table";
     SqoopOptions options = new SqoopOptions(CONNECT_STRING,
         tableName);
-    options.setUsername(getCurrentUser());
+    options.setUsername(MySQLUtils.getCurrentUser());
     ConnManager mgr = new MySQLManager(options);
 
     Connection connection = null;

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java?rev=911234&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLCompatTest.java Thu Feb 18 00:21:58 2010
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.manager;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.testutil.ManagerCompatTestCase;
+
+/**
+ * Test the basic mysql connection manager with the various column types.
+ */
+public class MySQLCompatTest extends ManagerCompatTestCase {
+
+  public static final Log LOG = LogFactory.getLog(MySQLCompatTest.class.getName());
+
+  @Override
+  protected Log getLogger() {
+    return LOG;
+  }
+
+  @Override
+  protected String getDbFriendlyName() {
+    return "MySQL";
+  }
+
+  @Override
+  protected String getConnectString() {
+    return MySQLUtils.CONNECT_STRING;
+  }
+
+  @Override
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(MySQLUtils.getCurrentUser());
+    return opts;
+
+  }
+
+  @Override
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    PreparedStatement statement = conn.prepareStatement(
+        "DROP TABLE IF EXISTS " + table,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    statement.executeUpdate();
+    statement.close();
+    conn.commit();
+  }
+
+  @Override
+  protected String getLongVarCharType() {
+    return "MEDIUMTEXT";
+  }
+
+  @Override
+  protected String getTimestampType() {
+    // return a nullable timestamp type.
+    return "TIMESTAMP NULL";
+  }
+
+  @Override
+  protected String getTrueBoolDbOutput() {
+    return "1";
+  }
+
+  @Override
+  protected String getFalseBoolDbOutput() {
+    return "0";
+  }
+
+  @Override
+  protected String getRealDbOutput(String realAsInserted) {
+    return realAsInserted;
+  }
+
+  @Override
+  protected String getRealSeqOutput(String realAsInserted) {
+    return withDecimalZero(realAsInserted);
+  }
+
+  @Override
+  protected String getFloatDbOutput(String floatAsInserted) {
+    return floatAsInserted;
+  }
+
+  @Override
+  protected String getFloatSeqOutput(String floatAsInserted) {
+    return withDecimalZero(floatAsInserted);
+  }
+
+  @Override
+  protected String getDoubleDbOutput(String doubleAsInserted) {
+    return doubleAsInserted;
+  }
+
+  @Override
+  protected String getDoubleSeqOutput(String doubleAsInserted) {
+    return withDecimalZero(doubleAsInserted);
+  }
+
+  @Override
+  protected String getTimestampDbOutput(String tsAsInserted) {
+    // We trim timestamps to exactly one tenth of a second.
+    if ("null".equals(tsAsInserted)) {
+      return tsAsInserted;
+    }
+
+    int dotPos = tsAsInserted.indexOf(".");
+    if (-1 == dotPos) {
+      return tsAsInserted + ".0";
+    } else {
+      return tsAsInserted.substring(0, dotPos + 2);
+    }
+  }
+
+  @Override
+  protected String getTimestampSeqOutput(String tsAsInserted) {
+    return getTimestampDbOutput(tsAsInserted);
+  }
+
+  @Override
+  protected String getNumericDbOutput(String numAsInserted) {
+    // We always pad to exactly the number of digits in
+    // getNumericDecPartDigits().
+
+    int totalDecPartSize = getNumericDecPartDigits();
+    int numPad; // number of digits to pad by.
+
+    int dotPos =  numAsInserted.indexOf(".");
+    if (-1 == dotPos) {
+      numAsInserted = numAsInserted + ".";
+      numPad = totalDecPartSize;
+    } else {
+      int existingDecimalSize = numAsInserted.length() - dotPos;
+      numPad = totalDecPartSize - existingDecimalSize;
+    }
+
+    if (numPad < 0) {
+      // We actually have to trim the value.
+      return numAsInserted.substring(0, numAsInserted.length() + numPad + 1);
+    } else {
+      String zeros = "";
+      for (int i = 0; i < numPad; i++) {
+        zeros =  zeros + "0";
+      }
+      return numAsInserted + zeros;
+    }
+  }
+
+  @Override
+  protected String getDecimalDbOutput(String numAsInserted) {
+    return getNumericDbOutput(numAsInserted);
+  }
+}
+

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLUtils.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLUtils.java?rev=911234&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLUtils.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLUtils.java Thu Feb 18 00:21:58 2010
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.manager;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * Utilities for mysql-based tests.
+ */
+public final class MySQLUtils {
+
+  public static final Log LOG = LogFactory.getLog(MySQLUtils.class.getName());
+
+  public static final String HOST_URL = "jdbc:mysql://localhost/";
+
+  public static final String MYSQL_DATABASE_NAME = "sqooptestdb";
+  public static final String TABLE_NAME = "EMPLOYEES_MYSQL";
+  public static final String CONNECT_STRING = HOST_URL + MYSQL_DATABASE_NAME;
+
+  private MySQLUtils() { }
+
+  /** @return the current username. */
+  public static String getCurrentUser() {
+    // First, check the $USER environment variable.
+    String envUser = System.getenv("USER");
+    if (null != envUser) {
+      return envUser;
+    }
+
+    // Try `whoami`
+    String [] whoamiArgs = new String[1];
+    whoamiArgs[0] = "whoami";
+    Process p = null;
+    BufferedReader r = null;
+    try {
+      p = Runtime.getRuntime().exec(whoamiArgs);
+      InputStream is = p.getInputStream();
+      r = new BufferedReader(new InputStreamReader(is));
+      return r.readLine();
+    } catch (IOException ioe) {
+      LOG.error("IOException reading from `whoami`: " + ioe.toString());
+      return null;
+    } finally {
+      // close our stream.
+      if (null != r) {
+        try {
+          r.close();
+        } catch (IOException ioe) {
+          LOG.warn("IOException closing input stream from `whoami`: " + ioe.toString());
+        }
+      }
+
+      // wait for whoami to exit.
+      while (p != null) {
+        try {
+          int ret = p.waitFor();
+          if (0 != ret) {
+            LOG.error("whoami exited with error status " + ret);
+            // suppress original return value from this method.
+            return null; 
+          }
+        } catch (InterruptedException ie) {
+          continue; // loop around.
+        }
+      }
+    }
+  }
+}

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java?rev=911234&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleCompatTest.java Thu Feb 18 00:21:58 2010
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.manager;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.testutil.ManagerCompatTestCase;
+
+/**
+ * Test the basic Oracle connection manager with the various column types.
+ */
+public class OracleCompatTest extends ManagerCompatTestCase {
+
+  public static final Log LOG = LogFactory.getLog(OracleCompatTest.class.getName());
+
+  @Override
+  protected Log getLogger() {
+    return LOG;
+  }
+
+  @Override
+  protected String getDbFriendlyName() {
+    return "Oracle";
+  }
+
+  @Override
+  protected String getConnectString() {
+    return OracleUtils.CONNECT_STRING;
+  }
+
+  @Override
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+    SqoopOptions opts = new SqoopOptions(conf);
+    OracleUtils.setOracleAuth(opts);
+    return opts;
+
+  }
+
+  @Override
+  protected void dropTableIfExists(String table) throws SQLException {
+    OracleUtils.dropTable(table, getManager());
+  }
+
+  @Override
+  protected Path getDataFilePath() {
+    return new Path(getTablePath(), "part-00000");
+  }
+
+  private String padString(int width, String str) {
+    int extra = width - str.length();
+    for (int i = 0; i < extra; i++) {
+      str = str + " ";
+    }
+
+    return str;
+  }
+
+  @Override
+  public void tearDown() {
+    super.tearDown();
+
+    // If we actually ran the test, we'll need to 'cool off' afterwards.
+    if (!skipped) {
+      // Oracle XE will block connections if you create new ones too quickly.
+      // See http://forums.oracle.com/forums/thread.jspa?messageID=1145120
+      LOG.info("Sleeping to wait for Oracle connection cache clear...");
+      try {
+        Thread.sleep(750);
+      } catch (InterruptedException ie) {
+      }
+    }
+  }
+
+  @Override
+  protected String getDoubleType() {
+    return "DOUBLE PRECISION";
+  }
+
+  // Oracle does not provide a BOOLEAN type.
+  @Override
+  protected boolean supportsBoolean() {
+    return false;
+  }
+
+  // Oracle does not provide a BIGINT type.
+  @Override
+  protected boolean supportsBigInt() {
+    return false;
+  }
+
+  // Oracle does not provide a TINYINT type.
+  @Override
+  protected boolean supportsTinyInt() {
+    return false;
+  }
+
+  // Oracle does not provide a LONGVARCHAR type.
+  @Override
+  protected boolean supportsLongVarChar() {
+    return false;
+  }
+
+  // Oracle does not provide a TIME type. We test DATE and TIMESTAMP
+  @Override
+  protected boolean supportsTime() {
+    return false;
+  }
+
+  @Override
+  protected String getDateInsertStr(String dateStr) {
+    return "TO_DATE(" + dateStr + ", 'YYYY-MM-DD')";
+  }
+
+  @Override
+  protected String getTimestampInsertStr(String tsStr) {
+    return "TO_TIMESTAMP(" + tsStr + ", 'YYYY-MM-DD HH24:MI:SS.FF')";
+  }
+
+  @Override
+  protected String getDateDbOutput(String asInserted) {
+    // DATE is actually a TIMESTAMP in Oracle; add a time component.
+    return asInserted + " 00:00:00.0";
+  }
+
+  @Override
+  protected String getDateSeqOutput(String asInserted) {
+    return getDateDbOutput(asInserted);
+  }
+
+  @Override
+  protected String getFixedCharDbOut(int fieldWidth, String asInserted) {
+    return padString(fieldWidth, asInserted);
+  }
+
+  @Override
+  protected String getFixedCharSeqOut(int fieldWidth, String asInserted) {
+    return padString(fieldWidth, asInserted);
+  }
+
+  @Override
+  protected String getRealDbOutput(String realAsInserted) {
+    return realAsInserted;
+  }
+
+  @Override
+  protected String getRealSeqOutput(String realAsInserted) {
+    return realAsInserted;
+  }
+
+  @Override
+  protected String getFloatDbOutput(String floatAsInserted) {
+    return floatAsInserted;
+  }
+
+  @Override
+  protected String getFloatSeqOutput(String floatAsInserted) {
+    return floatAsInserted;
+  }
+
+  @Override
+  protected String getDoubleDbOutput(String doubleAsInserted) {
+    return doubleAsInserted;
+  }
+
+  @Override
+  protected String getDoubleSeqOutput(String doubleAsInserted) {
+    return doubleAsInserted;
+  }
+
+  // Disable this test since Oracle isn't ANSI compliant.
+  @Override
+  public void testEmptyStringCol() {
+    LOG.info(
+        "Oracle treats empty strings as null (non-ANSI compliant). Skipping.");
+  }
+
+  // The timestamp output values are in a nonstandard format; we override
+  // the test to more carefully control the expected output; writing a
+  // compliant getTimestampDbOutput() method isn't worth it.
+  @Override
+  public void testTimestamp1() {
+    verifyType(getTimestampType(),
+        getTimestampInsertStr("'2009-04-24 18:24:00'"),
+        "2009-4-24 18:24:0. 0",
+        "2009-04-24 18:24:00.0");
+  }
+
+  @Override
+  public void testTimestamp2() {
+    try {
+      LOG.debug("Beginning testTimestamp2");
+      verifyType(getTimestampType(),
+          getTimestampInsertStr("'2009-04-24 18:24:00.0002'"),
+          "2009-4-24 18:24:0. 200000",
+          "2009-04-24 18:24:00.0002");
+    } finally {
+      LOG.debug("End testTimestamp2");
+    }
+  }
+}
+

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java Thu Feb 18 00:21:58 2010
@@ -73,23 +73,26 @@
 
   public static final Log LOG = LogFactory.getLog(OracleManagerTest.class.getName());
 
-  static final String ORACLE_DATABASE_NAME = "xe"; // Express edition hardcoded name.
   static final String TABLE_NAME = "EMPLOYEES";
-  static final String CONNECT_STRING = "jdbc:oracle:thin:@//localhost/" + ORACLE_DATABASE_NAME;
-  static final String ORACLE_USER_NAME = "SQOOPTEST";
-  static final String ORACLE_USER_PASS = "12345";
 
   // instance variables populated during setUp, used during tests
   private OracleManager manager;
 
   @Before
   public void setUp() {
-    SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
-    options.setUsername(ORACLE_USER_NAME);
-    options.setPassword(ORACLE_USER_PASS);
+    SqoopOptions options = new SqoopOptions(OracleUtils.CONNECT_STRING,
+        TABLE_NAME);
+    OracleUtils.setOracleAuth(options);
 
     manager = new OracleManager(options);
 
+    // Drop the existing table, if there is one.
+    try {
+      OracleUtils.dropTable(TABLE_NAME, manager);
+    } catch (SQLException sqlE) {
+      fail("Could not drop table " + TABLE_NAME + ": " + sqlE);
+    }
+
     Connection connection = null;
     Statement st = null;
 
@@ -98,9 +101,7 @@
       connection.setAutoCommit(false);
       st = connection.createStatement();
 
-      // create the database table and populate it with data. 
-      st.executeUpdate("BEGIN EXECUTE IMMEDIATE 'DROP TABLE " + TABLE_NAME + "'; "
-          + "exception when others then null; end;");
+      // create the database table and populate it with data.
       st.executeUpdate("CREATE TABLE " + TABLE_NAME + " ("
           + "id INT NOT NULL, "
           + "name VARCHAR2(24) NOT NULL, "
@@ -157,11 +158,11 @@
     args.add("--warehouse-dir");
     args.add(getWarehouseDir());
     args.add("--connect");
-    args.add(CONNECT_STRING);
+    args.add(OracleUtils.CONNECT_STRING);
     args.add("--username");
-    args.add(ORACLE_USER_NAME);
+    args.add(OracleUtils.ORACLE_USER_NAME);
     args.add("--password");
-    args.add(ORACLE_USER_PASS);
+    args.add(OracleUtils.ORACLE_USER_PASS);
     args.add("--num-mappers");
     args.add("1");
 

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java?rev=911234&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleUtils.java Thu Feb 18 00:21:58 2010
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.manager;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.FileInputStream;
+import java.io.File;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
+import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
+import org.apache.hadoop.sqoop.util.FileListing;
+
+/**
+ * Helper methods for Oracle testing.
+ */
+public final class OracleUtils {
+
+  public static final Log LOG = LogFactory.getLog(OracleUtils.class.getName());
+
+  // Express edition hardcoded name.
+  public static final String ORACLE_DATABASE_NAME = "xe";
+
+  public static final String CONNECT_STRING =
+      "jdbc:oracle:thin:@//localhost/" + ORACLE_DATABASE_NAME;
+  public static final String ORACLE_USER_NAME = "SQOOPTEST";
+  public static final String ORACLE_USER_PASS = "12345";
+
+  private OracleUtils() { }
+
+  public static void setOracleAuth(SqoopOptions options) {
+    options.setUsername(ORACLE_USER_NAME);
+    options.setPassword(ORACLE_USER_PASS);
+  }
+
+  /**
+   * Drop a table if it exists
+   */
+  public static void dropTable(String tableName, ConnManager manager)
+      throws SQLException {
+    Connection connection = null;
+    Statement st = null;
+
+    try {
+      connection = manager.getConnection();
+      connection.setAutoCommit(false);
+      st = connection.createStatement();
+
+      // create the database table and populate it with data. 
+      st.executeUpdate("BEGIN EXECUTE IMMEDIATE 'DROP TABLE " + tableName + "'; "
+          + "exception when others then null; end;");
+
+      connection.commit();
+    } finally {
+      try {
+        if (null != st) {
+          st.close();
+        }
+      } catch (SQLException sqlE) {
+        LOG.warn("Got SQLException when closing connection: " + sqlE);
+      }
+    }
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java Thu Feb 18 00:21:58 2010
@@ -170,6 +170,8 @@
           fail("SQL Exception in ResultSet.close(): " + sqlE.toString());
         }
       }
+
+      manager.release();
     }
   }
 
@@ -190,6 +192,8 @@
           fail("SQL Exception in ResultSet.close(): " + sqlE.toString());
         }
       }
+
+      manager.release();
     }
   }
 
@@ -220,6 +224,7 @@
           + " INT NOT NULL PRIMARY KEY, foo INT)",
           ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
       statement.executeUpdate();
+      statement.close();
     } catch (SQLException sqlException) {
       fail("Could not create table with primary key: " + sqlException.toString());
     } finally {

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java Thu Feb 18 00:21:58 2010
@@ -57,14 +57,16 @@
     // Create a table to attempt to import.
     createTableForColType("VARCHAR(32)", "'meep'");
 
+    Configuration conf = new Configuration();
+
     // Make the output dir exist so we know the job will fail via IOException.
     Path outputPath = new Path(new Path(getWarehouseDir()), getTableName());
-    FileSystem fs = FileSystem.getLocal(new Configuration());
+    FileSystem fs = FileSystem.getLocal(conf);
     fs.mkdirs(outputPath);
 
     assertTrue(fs.exists(outputPath));
 
-    String [] argv = getArgv(true, new String [] { "DATA_COL0" });
+    String [] argv = getArgv(true, new String [] { "DATA_COL0" }, conf);
 
     Sqoop importer = new Sqoop();
     try {
@@ -94,11 +96,11 @@
     // Create a table to attempt to import.
     createTableForColType("VARCHAR(32)", "'meep'");
 
-    String [] argv = getArgv(true, new String [] { "DATA_COL0" });
+    Configuration conf = new Configuration();
+    String [] argv = getArgv(true, new String [] { "DATA_COL0" }, conf);
 
     // Use dependency injection to specify a mapper that we know
     // will fail.
-    Configuration conf = new Configuration();
     conf.setClass(DataDrivenImportJob.DATA_DRIVEN_MAPPER_KEY,
         NullDereferenceMapper.class,
         Mapper.class);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java Thu Feb 18 00:21:58 2010
@@ -34,6 +34,8 @@
 import org.junit.After;
 import org.junit.Before;
 
+import org.apache.hadoop.sqoop.ConnFactory;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 
 import junit.framework.TestCase;
@@ -119,6 +121,37 @@
     tableNum++;
   }
 
+  /**
+   * @return true if we need an in-memory database to run these tests.
+   */
+  protected boolean useHsqldbTestServer() {
+    return true;
+  }
+
+  /**
+   * @return the connect string to use for interacting with the database.
+   * If useHsqldbTestServer is false, you need to override this and provide
+   * a different connect string.
+   */
+  protected String getConnectString() {
+    return HsqldbTestServer.getUrl();
+  }
+
+  /**
+   * @return a Configuration object used to configure tests.
+   */
+  protected Configuration getConf() {
+    return new Configuration();
+  }
+
+  /**
+   * @return a new SqoopOptions customized for this particular test, but one
+   * which has not had any arguments parsed yet.
+   */
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+    return new SqoopOptions(conf);
+  }
+
   @Before
   public void setUp() {
 
@@ -130,18 +163,31 @@
       LOG.info("Configured log4j with console appender.");
     }
 
-    testServer = new HsqldbTestServer();
-    try {
-      testServer.resetServer();
-    } catch (SQLException sqlE) {
-      LOG.error("Got SQLException: " + sqlE.toString());
-      fail("Got SQLException: " + sqlE.toString());
-    } catch (ClassNotFoundException cnfe) {
-      LOG.error("Could not find class for db driver: " + cnfe.toString());
-      fail("Could not find class for db driver: " + cnfe.toString());
-    }
+    if (useHsqldbTestServer()) {
+      testServer = new HsqldbTestServer();
+      try {
+        testServer.resetServer();
+      } catch (SQLException sqlE) {
+        LOG.error("Got SQLException: " + sqlE.toString());
+        fail("Got SQLException: " + sqlE.toString());
+      } catch (ClassNotFoundException cnfe) {
+        LOG.error("Could not find class for db driver: " + cnfe.toString());
+        fail("Could not find class for db driver: " + cnfe.toString());
+      }
 
-    manager = testServer.getManager();
+      manager = testServer.getManager();
+    } else {
+      Configuration conf = getConf();
+      SqoopOptions opts = getSqoopOptions(conf);
+      opts.setConnectString(getConnectString());
+      opts.setTableName(getTableName());
+      ConnFactory f = new ConnFactory(conf);
+      try {
+        this.manager = f.getManager(opts);
+      } catch (IOException ioe) {
+        fail("IOException instantiating manager: " + ioe);
+      }
+    }
   }
 
   @After
@@ -156,69 +202,111 @@
       LOG.error("Got SQLException: " + sqlE.toString());
       fail("Got SQLException: " + sqlE.toString());
     }
-
   }
 
   static final String BASE_COL_NAME = "DATA_COL";
 
   /**
+   * Drop a table if it already exists in the database.
+   * @param table the name of the table to drop.
+   * @throws SQLException if something goes wrong.
+   */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    PreparedStatement statement = conn.prepareStatement(
+        "DROP TABLE " + table + " IF EXISTS",
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    statement.executeUpdate();
+    statement.close();
+    conn.commit();
+  }
+
+  /**
    * Create a table with a set of columns and add a row of values.
    * @param colTypes the types of the columns to make
    * @param vals the SQL text for each value to insert
    */
   protected void createTableWithColTypes(String [] colTypes, String [] vals) {
     Connection conn = null;
+    PreparedStatement statement = null;
+    String createTableStr = null;
+    String columnDefStr = "";
+    String columnListStr = "";
+    String valueListStr = "";
+    String [] myColNames = new String[colTypes.length];
+
     try {
-      conn = getTestServer().getConnection();
-      PreparedStatement statement = conn.prepareStatement(
-          "DROP TABLE " + getTableName() + " IF EXISTS",
-          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-      statement.executeUpdate();
-      statement.close();
-
-      String columnDefStr = "";
-      String columnListStr = "";
-      String valueListStr = "";
-
-      String [] myColNames = new String[colTypes.length];
-
-      for (int i = 0; i < colTypes.length; i++) {
-        String colName = BASE_COL_NAME + Integer.toString(i);
-        columnDefStr += colName + " " + colTypes[i];
-        columnListStr += colName;
-        valueListStr += vals[i];
-        myColNames[i] = colName;
-        if (i < colTypes.length - 1) {
-          columnDefStr += ", ";
-          columnListStr += ", ";
-          valueListStr += ", ";
+      try {
+        dropTableIfExists(getTableName());
+
+        conn = getManager().getConnection();
+
+        for (int i = 0; i < colTypes.length; i++) {
+          String colName = BASE_COL_NAME + Integer.toString(i);
+          columnDefStr += colName + " " + colTypes[i];
+          columnListStr += colName;
+          valueListStr += vals[i];
+          myColNames[i] = colName;
+          if (i < colTypes.length - 1) {
+            columnDefStr += ", ";
+            columnListStr += ", ";
+            valueListStr += ", ";
+          }
+        }
+
+        createTableStr = "CREATE TABLE " + getTableName()
+            + "(" + columnDefStr + ")";
+        LOG.info("Creating table: " + createTableStr);
+        statement = conn.prepareStatement(
+            createTableStr,
+            ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+        statement.executeUpdate();
+      } catch (SQLException sqlException) {
+        fail("Could not create table: " + sqlException.toString());
+      } finally {
+        if (null != statement) {
+          try {
+            statement.close();
+          } catch (SQLException se) {
+          }
+
+          statement = null;
+        }
+      }
+
+      try {
+        String insertValsStr = "INSERT INTO " + getTableName()
+            + "(" + columnListStr + ")"
+            + " VALUES(" + valueListStr + ")";
+        LOG.info("Inserting values: " + insertValsStr);
+        statement = conn.prepareStatement(
+            insertValsStr,
+            ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+        statement.executeUpdate();
+        statement.close();
+      } catch (SQLException sqlException) {
+        fail("Could not create table: " + sqlException.toString());
+      } finally {
+        if (null != statement) {
+          try {
+            statement.close();
+          } catch (SQLException se) {
+          }
+
+          statement = null;
         }
       }
 
-      statement = conn.prepareStatement(
-          "CREATE TABLE " + getTableName() + "(" + columnDefStr + ")",
-          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-      statement.executeUpdate();
-      statement.close();
-
-      statement = conn.prepareStatement(
-          "INSERT INTO " + getTableName() + "(" + columnListStr + ")"
-          + " VALUES(" + valueListStr + ")",
-          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-      statement.executeUpdate();
-      statement.close();
       conn.commit();
       this.colNames = myColNames;
-    } catch (SQLException sqlException) {
-      fail("Could not create table: " + sqlException.toString());
-    } finally {
+    } catch (SQLException se) {
       if (null != conn) {
         try {
           conn.close();
-        } catch (SQLException sqlE) {
-          LOG.warn("Got SQLException during close: " + sqlE.toString());
+        } catch (SQLException connSE) {
         }
       }
+      fail("Could not create table: " + se.toString());
     }
   }
 
@@ -265,6 +353,9 @@
       assertNotNull("Null results from readTable()!", results);
       assertTrue("Expected at least one row returned", results.next());
       String resultVal = results.getString(colNum);
+      LOG.info("Verifying readback from " + getTableName()
+          + ": got value [" + resultVal + "]");
+      LOG.info("Expected value is: [" + expectedVal + "]");
       if (null != expectedVal) {
         assertNotNull("Expected non-null result value", resultVal);
       }
@@ -281,6 +372,9 @@
           fail("Got SQLException in resultset.close(): " + sqlE.toString());
         }
       }
+
+      // Free internal resources after the readTable.
+      getManager().release();
     }
   }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java?rev=911234&r1=911233&r2=911234&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java Thu Feb 18 00:21:58 2010
@@ -21,18 +21,21 @@
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ToolRunner;
-
 import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.Sqoop;
 import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
 
+import org.junit.Test;
+
 /**
  * Class that implements common methods required for tests which import data
  * from SQL into HDFS and verify correct import.
@@ -46,12 +49,22 @@
   }
 
   /**
+   * @return a list of additional args to pass to the sqoop command line.
+   */
+  protected List<String> getExtraArgs(Configuration conf) {
+    return new ArrayList<String>();
+  }
+
+  /**
    * Create the argv to pass to Sqoop
    * @param includeHadoopFlags if true, then include -D various.settings=values
    * @param colNames the columns to import. If null, all columns are used.
+   * @param conf a Configuration specifying additional properties to use when
+   * determining the arguments.
    * @return the argv as an array of strings.
    */
-  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames) {
+  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
+      Configuration conf) {
     if (null == colNames) {
       colNames = getColNames();
     }
@@ -77,11 +90,13 @@
     args.add("--warehouse-dir");
     args.add(getWarehouseDir());
     args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
+    args.add(getConnectString());
     args.add("--as-sequencefile");
     args.add("--num-mappers");
     args.add("1");
 
+    args.addAll(getExtraArgs(conf));
+
     return args.toArray(new String[0]);
   }
 
@@ -98,11 +113,14 @@
 
     removeTableDir();
 
+    Configuration conf = getConf();
+    SqoopOptions opts = getSqoopOptions(conf);
+
     // run the tool through the normal entry-point.
     int ret;
     try {
-      Sqoop importer = new Sqoop();
-      ret = ToolRunner.run(importer, getArgv(true, importCols));
+      Sqoop importer = new Sqoop(conf, opts);
+      ret = ToolRunner.run(importer, getArgv(true, importCols, conf));
     } catch (Exception e) {
       LOG.error("Got exception running Sqoop: " + e.toString());
       throw new RuntimeException(e);
@@ -111,9 +129,9 @@
     // expect a successful return.
     assertEquals("Failure during job", 0, ret);
 
-    SqoopOptions opts = new SqoopOptions();
+    opts = getSqoopOptions(conf);
     try {
-      opts.parse(getArgv(false, importCols));
+      opts.parse(getArgv(false, importCols, conf));
     } catch (InvalidOptionsException ioe) {
       fail(ioe.toString());
     }
@@ -128,6 +146,7 @@
       assertTrue("Error: " + dataFilePath.toString() + " does not exist", f.exists());
 
       Object readValue = SeqFileReader.getFirstValue(dataFilePath.toString());
+      LOG.info("Read back from sequencefile: " + readValue);
       // add trailing '\n' to expected value since SqoopRecord.toString() encodes the record delim
       if (null == expectedVal) {
         assertEquals("Error validating result from SeqFile", "null\n", readValue.toString());
@@ -153,7 +172,9 @@
     // run the tool through the normal entry-point.
     int ret;
     try {
-      Sqoop importer = new Sqoop();
+      Configuration conf = getConf();
+      SqoopOptions opts = getSqoopOptions(conf);
+      Sqoop importer = new Sqoop(conf, opts);
       ret = ToolRunner.run(importer, argv);
     } catch (Exception e) {
       LOG.error("Got exception running Sqoop: " + e.toString());

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ManagerCompatTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ManagerCompatTestCase.java?rev=911234&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ManagerCompatTestCase.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ManagerCompatTestCase.java Thu Feb 18 00:21:58 2010
@@ -0,0 +1,798 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.testutil;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
+
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.Sqoop;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.orm.CompilationManager;
+import org.apache.hadoop.sqoop.util.ClassLoaderStack;
+
+import org.junit.Test;
+
+/**
+ * Class that implements common tests that should be applied to all jdbc
+ * drivers that we want to interop with.
+ *
+ * The purpose of these tests is to ensure that if a database supports a
+ * given data type, we can import this data type into Sqoop. The test is
+ * not intended to check whether all data types are supported by all
+ * databases, nor that the representation of a given data type has a canonical
+ * representation after being imported. Some databases may not support certain
+ * data types, and the format of the imported data may vary from database to
+ * database. It is not Sqoop's goal to resolve inter-database differences.
+ * However, if a database provides a particular type, we should verify that
+ * we can import this data in some form into HDFS.
+ *
+ * This test battery subjects a database to a variety of import tasks. Many
+ * adapter methods are provided to allow subclasses to modify the exact type
+ * names injected, expected output values, etc., to account for inter-database
+ * discrepencies.
+ *
+ * Each subclass of this class should test a single ConnManager implementation.
+ * Subclasses must implement all abstract methods of this class. They may
+ * also wish to override several members of the class hierarchy above this.
+ * In particular:
+ *
+ * String getConnectString() -- Return the connect string to use to get the db.
+ * void dropTableIfExists(tableName) -- how to drop a table that may not exist.
+ * void createTableWithColTypes() -- how to create a table with a set of cols.
+ * Configuration getConf() -- specifies config properties specific to a test.
+ * SqoopOptions getSqoopOptions(conf) -- Instantiates the SqoopOptions to use. 
+ * List&lt;String&gt; getExtraArgs() -- specifies extra argv elements.
+ */
+public abstract class ManagerCompatTestCase extends ImportJobTestCase {
+
+  public Log LOG;
+
+  public ManagerCompatTestCase() {
+    this.LOG = LogFactory.getLog(ManagerCompatTestCase.class.getName());
+  }
+
+  /**
+   * @return the Log object to use for reporting during this test
+   */
+  protected abstract Log getLogger();
+
+  /**
+   * @return a "friendly" name for the database. e.g "mysql" or "oracle".
+   */
+  protected abstract String getDbFriendlyName();
+
+  /** Set to true during tearDown() if a test is skipped. */
+  protected boolean skipped;
+
+  @Override
+  protected String getTablePrefix() {
+    return "MGR_" + getDbFriendlyName().toUpperCase() + "_";
+  }
+
+  @Override
+  protected boolean useHsqldbTestServer() {
+    // Compat tests, by default, do not use hsqldb.
+    return false;
+  }
+
+  @Override
+  public void setUp() {
+    LOG = getLogger();
+    skipped = false;
+    super.setUp();
+  }
+
+  @Override
+  public void tearDown() {
+    try {
+      // Clean up the database on our way out.
+      dropTableIfExists(getTableName());
+    } catch (SQLException e) {
+      LOG.warn("Error trying to drop table '" + getTableName()
+          + "' on tearDown: " + e);
+    }
+    super.tearDown();
+  }
+
+  //////// These methods indicate whether certain datatypes are supported
+  //////// by the underlying database.
+
+  /** @return true if the database under test has a BOOLEAN type */
+  protected boolean supportsBoolean() {
+    return true;
+  }
+
+  /** @return true if the database under test has a BIGINT type */
+  protected boolean supportsBigInt() {
+    return true;
+  }
+
+  /** @return true if the database under test has a TINYINT type */
+  protected boolean supportsTinyInt() {
+    return true;
+  }
+
+  /** @return true if the database under test has a LONGVARCHAR type */
+  protected boolean supportsLongVarChar() {
+    return true;
+  }
+
+  /** @return true if the database under test has a TIME type */
+  protected boolean supportsTime() {
+    return true;
+  }
+
+  //////// These methods indicate how to define various datatypes.
+
+  /**
+   * Define a NUMERIC type that can handle 30 digits total, and 5
+   * digits to the right of the decimal point.
+   */
+  protected String getNumericType() {
+    return "NUMERIC(" + getNumericScale() + ", "
+        + getNumericDecPartDigits() + ")";
+  }
+
+  protected String getDecimalType() {
+    return "DECIMAL(" + getDecimalScale() + ", "
+        + getDecimalDecPartDigits() + ")";
+  }
+
+  /**
+   * Return the number of digits to use in the integral part of a
+   * NUMERIC type
+   */
+  protected int getNumericScale() {
+    return 30;
+  }
+
+  /**
+   * Return the number of digits to use in the decimal part of a
+   * NUMERIC type
+   */
+  protected int getNumericDecPartDigits() {
+    return 5;
+  }
+
+  /**
+   * Return the number of digits to use in the integral part of a
+   * DECIMAL type
+   */
+  protected int getDecimalScale() {
+    return 30;
+  }
+
+  /**
+   * Return the number of digits to use in the decimal part of a
+   * DECIMAL type
+   */
+  protected int getDecimalDecPartDigits() {
+    return 5;
+  }
+
+  /**
+   * Define a DOUBLE column.
+   */
+  protected String getDoubleType() {
+    return "DOUBLE";
+  }
+
+  /**
+   * Define a LONGVARCHAR type that can handle at least 24 characters.
+   */
+  protected String getLongVarCharType() {
+    return "LONGVARCHAR";
+  }
+
+  /**
+   * Define a TIMESTAMP type that can handle null values.
+   */
+  protected String getTimestampType() {
+    return "TIMESTAMP";
+  }
+
+  //////// These methods indicate how databases respond to various datatypes.
+  //////// Since our comparisons are all string-based, these return strings.
+
+  /** @return How a BOOLEAN column with value TRUE is communicated over JDBC */
+  protected String getTrueBoolDbOutput() {
+    return "true";
+  }
+
+  /** @return How a BOOLEAN column with value TRUE is represented in a seq-file
+   * import. */
+  protected String getTrueBoolSeqOutput() {
+    return "true";
+  }
+
+  /** @return How a BOOLEAN column with value FALSE is communicated over JDBC */
+  protected String getFalseBoolDbOutput() {
+    return "false";
+  }
+
+  /** @return How a BOOLEAN column with value FALSE is represented in a seq-file
+   * import. */
+  protected String getFalseBoolSeqOutput() {
+    return "false";
+  }
+
+  /**
+   * helper method: return a floating-point string in the same way
+   * it was entered, but integers get a trailing '.0' attached.
+   */
+  protected String withDecimalZero(String floatingPointStr) {
+    if (floatingPointStr.indexOf(".") == -1) {
+      return floatingPointStr + ".0";
+    } else {
+      return floatingPointStr;
+    }
+  }
+
+  /**
+   * A real value inserted as '40' may be returned as '40', '40.', or '40.0',
+   * etc. Given a string that defines how a real value is inserted, determine
+   * how it is returned.
+   *
+   * @param realAsInserted the string we used in the SQL INSERT statement
+   * @return how the string version of this as returned by the database is
+   * represented.
+   */
+  protected String getRealDbOutput(String realAsInserted) {
+    return withDecimalZero(realAsInserted);
+  }
+
+  /**
+   * @return how a given real value is represented in an imported sequence
+   * file
+   */
+  protected String getRealSeqOutput(String realAsInserted) {
+    return getRealDbOutput(realAsInserted);
+  }
+
+  /**
+   * A float value inserted as '40' may be returned as '40', '40.', or '40.0',
+   * etc. Given a string that defines how a float value is inserted, determine
+   * how it is returned.
+   *
+   * @param floatAsInserted the string we used in the SQL INSERT statement
+   * @return how the string version of this as returned by the database is
+   * represented.
+   */
+  protected String getFloatDbOutput(String floatAsInserted) {
+    return withDecimalZero(floatAsInserted);
+  }
+
+  protected String getFloatSeqOutput(String floatAsInserted) {
+    return getFloatDbOutput(floatAsInserted);
+  }
+
+  /**
+   * A double value inserted as '40' may be returned as '40', '40.', or '40.0',
+   * etc. Given a string that defines how a double value is inserted, determine
+   * how it is returned.
+   *
+   * @param doubleAsInserted the string we used in the SQL INSERT statement
+   * @return how the string version of this as returned by the database is
+   * represented.
+   */
+  protected String getDoubleDbOutput(String doubleAsInserted) {
+    return withDecimalZero(doubleAsInserted);
+  }
+
+  protected String getDoubleSeqOutput(String doubleAsInserted) {
+    return getDoubleDbOutput(doubleAsInserted);
+  }
+
+  /**
+   * Some databases require that we insert dates using a special format.
+   * This takes the canonical string used to insert a DATE into a table,
+   * and specializes it to the SQL dialect used by the database under
+   * test.
+   */
+  protected String getDateInsertStr(String insertStr) {
+    return insertStr;
+  }
+
+  /**
+   * Some databases require that we insert times using a special format.
+   * This takes the canonical string used to insert a TIME into a table,
+   * and specializes it to the SQL dialect used by the database under
+   * test.
+   */
+  protected String getTimeInsertStr(String insertStr) {
+    return insertStr;
+  }
+
+  /**
+   * Some databases require that we insert timestamps using a special format.
+   * This takes the canonical string used to insert a TIMESTAMP into a table,
+   * and specializes it to the SQL dialect used by the database under
+   * test.
+   */
+  protected String getTimestampInsertStr(String insertStr) {
+    return insertStr;
+  }
+
+  protected String getDateDbOutput(String dateAsInserted) {
+    return dateAsInserted;
+  }
+
+  protected String getDateSeqOutput(String dateAsInserted) {
+    return dateAsInserted;
+  }
+
+  /**
+   * Convert an input timestamp to the string representation of the timestamp
+   * returned by a database select query.
+   *
+   * @param tsAsInserted the input timestamp
+   * @return the string version of this as returned by the database is
+   * represented.
+   */
+  protected String getTimestampDbOutput(String tsAsInserted) {
+    if ("null".equals(tsAsInserted)) {
+      return tsAsInserted;
+    }
+
+    int dotPos = tsAsInserted.indexOf(".");
+    if (-1 == dotPos) {
+      // No dot in the original string; expand to 9 places.
+      return tsAsInserted + ".000000000";
+    } else {
+      // Default with a dot is to pad the nanoseconds column to 9 places.
+      int numZerosNeeded = tsAsInserted.length() - dotPos;
+      String zeros = "";
+      for (int i = 0; i < numZerosNeeded; i++) {
+        zeros = zeros + "0";
+      }
+
+      return tsAsInserted + zeros;
+    }
+  }
+
+  /**
+   * Convert an input timestamp to the string representation of the timestamp
+   * returned by a sequencefile-based import.
+   *
+   * @param tsAsInserted the input timestamp
+   * @return the string version of this as returned by the database is
+   * represented.
+   */
+  protected String getTimestampSeqOutput(String tsAsInserted) {
+    if ("null".equals(tsAsInserted)) {
+      return tsAsInserted;
+    }
+
+    int dotPos = tsAsInserted.indexOf(".");
+    if (-1 == dotPos) {
+      // No dot in the original string; expand to add a single item after the dot.
+      return tsAsInserted + ".0";
+    } else {
+      // all other strings return as-is.
+      return tsAsInserted;
+    }
+  }
+
+  protected String getNumericDbOutput(String numAsInserted) {
+    return numAsInserted;
+  }
+
+  protected String getNumericSeqOutput(String numAsInserted) {
+    return getNumericDbOutput(numAsInserted);
+  }
+
+  protected String getDecimalDbOutput(String numAsInserted) {
+    return numAsInserted;
+  }
+
+  protected String getDecimalSeqOutput(String numAsInserted) {
+    return getDecimalDbOutput(numAsInserted);
+  }
+
+  /**
+   * @return how a CHAR(fieldWidth) field is returned by the database
+   * for a given input.
+   */
+  protected String getFixedCharDbOut(int fieldWidth, String asInserted) {
+    return asInserted;
+  }
+
+  protected String getFixedCharSeqOut(int fieldWidth, String asInserted) {
+    return asInserted;
+  }
+
+  //////// The actual tests occur below here. ////////
+
+  /**
+   * Do a full verification test on the singleton value of a given type.
+   * @param colType  The SQL type to instantiate the column.
+   * @param insertVal The SQL text to insert a value into the database.
+   * @param returnVal The string representation of the value as extracted
+   *        from the db.
+   */
+  protected void verifyType(String colType, String insertVal,
+      String returnVal) {
+    verifyType(colType, insertVal, returnVal, returnVal);
+  }
+
+  /**
+   * Do a full verification test on the singleton value of a given type.
+   * @param colType  The SQL type to instantiate the column.
+   * @param insertVal The SQL text to insert a value into the database.
+   * @param returnVal The string representation of the value as extracted from
+   *        the db.
+   * @param seqFileVal The string representation of the value as extracted
+   *        through the DBInputFormat, serialized, and injected into a
+   *        SequenceFile and put through toString(). This may be slightly
+   *        different than what ResultSet.getString() returns, which is used
+   *        by returnVal.
+   */
+  protected void verifyType(String colType, String insertVal, String returnVal,
+      String seqFileVal) {
+    createTableForColType(colType, insertVal);
+    verifyReadback(1, returnVal);
+    verifyImport(seqFileVal, null);
+  }
+
+  static final String STRING_VAL_IN = "'this is a short string'";
+  static final String STRING_VAL_OUT = "this is a short string";
+
+  @Test
+  public void testStringCol1() {
+    verifyType("VARCHAR(32)", STRING_VAL_IN, STRING_VAL_OUT);
+  }
+
+  @Test
+  public void testStringCol2() {
+    verifyType("CHAR(32)", STRING_VAL_IN,
+        getFixedCharDbOut(32, STRING_VAL_OUT),
+        getFixedCharSeqOut(32, STRING_VAL_OUT));
+  }
+
+  @Test
+  public void testEmptyStringCol() {
+    verifyType("VARCHAR(32)", "''", "");
+  }
+
+  @Test
+  public void testNullStringCol() {
+    verifyType("VARCHAR(32)", "NULL", null);
+  }
+
+  @Test
+  public void testInt() {
+    verifyType("INTEGER", "42", "42");
+  }
+
+  @Test
+  public void testNullInt() {
+    verifyType("INTEGER", "NULL", null);
+  }
+
+  @Test
+  public void testBoolean() {
+    if (!supportsBoolean()) {
+      LOG.info("Skipping boolean test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("BOOLEAN", "1", getTrueBoolDbOutput(), getTrueBoolSeqOutput());
+  }
+
+  @Test
+  public void testBoolean2() {
+    if (!supportsBoolean()) {
+      LOG.info("Skipping boolean test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("BOOLEAN", "0", getFalseBoolDbOutput(), getFalseBoolSeqOutput());
+  }
+
+  @Test
+  public void testBoolean3() {
+    if (!supportsBoolean()) {
+      LOG.info("Skipping boolean test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("BOOLEAN", "false", getFalseBoolDbOutput(), getFalseBoolSeqOutput());
+  }
+
+  @Test
+  public void testTinyInt1() {
+    if (!supportsTinyInt()) {
+      LOG.info("Skipping tinyint test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("TINYINT", "0", "0");
+  }
+
+  @Test
+  public void testTinyInt2() {
+    if (!supportsTinyInt()) {
+      LOG.info("Skipping tinyint test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("TINYINT", "42", "42");
+  }
+
+  @Test
+  public void testSmallInt1() {
+    verifyType("SMALLINT", "-1024", "-1024");
+  }
+
+  @Test
+  public void testSmallInt2() {
+    verifyType("SMALLINT", "2048", "2048");
+  }
+
+  @Test
+  public void testBigInt1() {
+    if (!supportsBigInt()) {
+      LOG.info("Skipping bigint test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("BIGINT", "10000000000", "10000000000");
+  }
+
+
+  @Test
+  public void testReal1() {
+    verifyType("REAL", "256", getRealDbOutput("256"), getRealSeqOutput("256"));
+  }
+
+  @Test
+  public void testReal2() {
+    verifyType("REAL", "256.45", getRealDbOutput("256.45"),
+        getRealSeqOutput("256.45"));
+  }
+
+  @Test
+  public void testFloat1() {
+    verifyType("FLOAT", "256", getFloatDbOutput("256"),
+        getFloatSeqOutput("256"));
+  }
+
+  @Test
+  public void testFloat2() {
+    verifyType("FLOAT", "256.5", getFloatDbOutput("256.5"),
+        getFloatSeqOutput("256.5"));
+  }
+
+  @Test
+  public void testDouble1() {
+    verifyType(getDoubleType(), "-256", getDoubleDbOutput("-256"),
+        getDoubleSeqOutput("-256"));
+  }
+
+  @Test
+  public void testDouble2() {
+    verifyType(getDoubleType(), "256.45", getDoubleDbOutput("256.45"),
+        getDoubleSeqOutput("256.45"));
+  }
+
+  @Test
+  public void testDate1() {
+    verifyType("DATE", getDateInsertStr("'2009-1-12'"),
+        getDateDbOutput("2009-01-12"),
+        getDateSeqOutput("2009-01-12"));
+  }
+
+  @Test
+  public void testDate2() {
+    verifyType("DATE", getDateInsertStr("'2009-01-12'"),
+        getDateDbOutput("2009-01-12"),
+        getDateSeqOutput("2009-01-12"));
+  }
+
+  @Test
+  public void testDate3() {
+    verifyType("DATE", getDateInsertStr("'2009-04-24'"),
+        getDateDbOutput("2009-04-24"),
+        getDateSeqOutput("2009-04-24"));
+  }
+
+  @Test
+  public void testTime1() {
+    if (!supportsTime()) {
+      LOG.info("Skipping time test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("TIME", getTimeInsertStr("'12:24:00'"), "12:24:00");
+  }
+
+  @Test
+  public void testTime2() {
+    if (!supportsTime()) {
+      LOG.info("Skipping time test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("TIME", getTimeInsertStr("'06:24:00'"), "06:24:00");
+  }
+
+  @Test
+  public void testTime3() {
+    if (!supportsTime()) {
+      LOG.info("Skipping time test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("TIME", getTimeInsertStr("'6:24:00'"), "06:24:00");
+  }
+
+  @Test
+  public void testTime4() {
+    if (!supportsTime()) {
+      LOG.info("Skipping time test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType("TIME", getTimeInsertStr("'18:24:00'"), "18:24:00");
+  }
+
+  @Test
+  public void testTimestamp1() {
+    verifyType(getTimestampType(),
+        getTimestampInsertStr("'2009-04-24 18:24:00'"),
+        getTimestampDbOutput("2009-04-24 18:24:00"),
+        getTimestampSeqOutput("2009-04-24 18:24:00"));
+  }
+
+  @Test
+  public void testTimestamp2() {
+    try {
+      LOG.debug("Beginning testTimestamp2");
+      verifyType(getTimestampType(),
+          getTimestampInsertStr("'2009-04-24 18:24:00.0002'"),
+          getTimestampDbOutput("2009-04-24 18:24:00.0002"),
+          getTimestampSeqOutput("2009-04-24 18:24:00.0002"));
+    } finally {
+      LOG.debug("End testTimestamp2");
+    }
+  }
+
+  @Test
+  public void testTimestamp3() {
+    try {
+      LOG.debug("Beginning testTimestamp3");
+      verifyType(getTimestampType(), "null", null);
+    } finally {
+      LOG.debug("End testTimestamp3");
+    }
+  }
+
+  @Test
+  public void testNumeric1() {
+    verifyType(getNumericType(), "1",
+        getNumericDbOutput("1"),
+        getNumericSeqOutput("1"));
+  }
+
+  @Test
+  public void testNumeric2() {
+    verifyType(getNumericType(), "-10",
+        getNumericDbOutput("-10"),
+        getNumericSeqOutput("-10"));
+  }
+
+  @Test
+  public void testNumeric3() {
+    verifyType(getNumericType(), "3.14159",
+        getNumericDbOutput("3.14159"),
+        getNumericSeqOutput("3.14159"));
+  }
+
+  @Test
+  public void testNumeric4() {
+    verifyType(getNumericType(),
+        "3000000000000000000.14159",
+        getNumericDbOutput("3000000000000000000.14159"),
+        getNumericSeqOutput("3000000000000000000.14159"));
+  }
+
+  @Test
+  public void testNumeric5() {
+    verifyType(getNumericType(),
+        "99999999999999999999.14159",
+        getNumericDbOutput("99999999999999999999.14159"),
+        getNumericSeqOutput("99999999999999999999.14159"));
+
+  }
+
+  @Test
+  public void testNumeric6() {
+    verifyType(getNumericType(),
+        "-99999999999999999999.14159",
+        getNumericDbOutput("-99999999999999999999.14159"),
+        getNumericSeqOutput("-99999999999999999999.14159"));
+  }
+
+  @Test
+  public void testDecimal1() {
+    verifyType(getDecimalType(), "1",
+        getDecimalDbOutput("1"),
+        getDecimalSeqOutput("1"));
+  }
+
+  @Test
+  public void testDecimal2() {
+    verifyType(getDecimalType(), "-10",
+        getDecimalDbOutput("-10"),
+        getDecimalSeqOutput("-10"));
+  }
+
+  @Test
+  public void testDecimal3() {
+    verifyType(getDecimalType(), "3.14159",
+        getDecimalDbOutput("3.14159"),
+        getDecimalSeqOutput("3.14159"));
+  }
+
+  @Test
+  public void testDecimal4() {
+    verifyType(getDecimalType(),
+        "3000000000000000000.14159",
+        getDecimalDbOutput("3000000000000000000.14159"),
+        getDecimalSeqOutput("3000000000000000000.14159"));
+  }
+
+  @Test
+  public void testDecimal5() {
+    verifyType(getDecimalType(),
+        "99999999999999999999.14159",
+        getDecimalDbOutput("99999999999999999999.14159"),
+        getDecimalSeqOutput("99999999999999999999.14159"));
+  }
+
+  @Test
+  public void testDecimal6() {
+    verifyType(getDecimalType(),
+        "-99999999999999999999.14159",
+        getDecimalDbOutput("-99999999999999999999.14159"),
+        getDecimalSeqOutput("-99999999999999999999.14159"));
+  }
+
+  @Test
+  public void testLongVarChar() {
+    if (!supportsLongVarChar()) {
+      LOG.info("Skipping long varchar test (unsupported)");
+      skipped = true;
+      return;
+    }
+    verifyType(getLongVarCharType(),
+        "'this is a long varchar'",
+        "this is a long varchar");
+  }
+
+}
+



Mime
View raw message