hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r903543 - in /hadoop/mapreduce/trunk: ./ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ src/contrib/sqoop/src/test/o...
Date Wed, 27 Jan 2010 05:29:12 GMT
Author: tomwhite
Date: Wed Jan 27 05:28:23 2010
New Revision: 903543

URL: http://svn.apache.org/viewvc?rev=903543&view=rev
Log:
MAPREDUCE-1395. Sqoop does not check return value of Job.waitForCompletion(). Contributed
by Aaron Kimball.

Added:
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Wed Jan 27 05:28:23 2010
@@ -259,6 +259,9 @@
     MAPREDUCE-1394. Sqoop generates incorrect URIs in paths sent to Hive.
     (Aaron Kimball via tomwhite)
 
+    MAPREDUCE-1395. Sqoop does not check return value of Job.waitForCompletion().
+    (Aaron Kimball via tomwhite)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java Wed
Jan 27 05:28:23 2010
@@ -63,6 +63,15 @@
   private List<String> generatedJarFiles;
 
   public Sqoop() {
+    init();
+  }
+
+  public Sqoop(Configuration conf) {
+    init();
+    setConf(conf);
+  }
+
+  private void init() {
     generatedJarFiles = new ArrayList<String>();
   }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
Wed Jan 27 05:28:23 2010
@@ -134,7 +134,11 @@
   private String [] extraArgs;
 
   public SqoopOptions() {
-    initDefaults();
+    initDefaults(null);
+  }
+
+  public SqoopOptions(Configuration conf) {
+    initDefaults(conf);
   }
 
   /**
@@ -144,7 +148,7 @@
    * @param table Table to read
    */
   public SqoopOptions(final String connect, final String table) {
-    initDefaults();
+    initDefaults(null);
 
     this.connectString = connect;
     this.tableName = table;
@@ -223,7 +227,7 @@
     return this.tmpDir;
   }
 
-  private void initDefaults() {
+  private void initDefaults(Configuration baseConfiguration) {
     // first, set the true defaults if nothing else happens.
     // default action is to run the full pipeline.
     this.action = ControlAction.FullImport;
@@ -263,7 +267,11 @@
     this.useCompression = false;
     this.directSplitSize = 0;
 
-    this.conf = new Configuration();
+    if (null == baseConfiguration) {
+      this.conf = new Configuration();
+    } else {
+      this.conf = new Configuration(baseConfiguration);
+    }
 
     this.extraArgs = null;
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
Wed Jan 27 05:28:23 2010
@@ -32,6 +32,7 @@
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
@@ -43,6 +44,7 @@
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.orm.TableClassName;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
+import org.apache.hadoop.sqoop.util.ImportException;
 import org.apache.hadoop.sqoop.util.PerfCounters;
 
 /**
@@ -53,12 +55,22 @@
 
   public static final Log LOG = LogFactory.getLog(DataDrivenImportJob.class.getName());
 
-  private SqoopOptions options;
+  private final SqoopOptions options;
+  private final Class<Mapper> mapperClass;
 
+  // For dependency-injection purposes, we can specify a mapper class
+  // to use during tests.
+  public final static String DATA_DRIVEN_MAPPER_KEY =
+      "sqoop.data.driven.mapper.class";
+
+  @SuppressWarnings("unchecked")
   public DataDrivenImportJob(final SqoopOptions opts) {
     this.options = opts;
+    this.mapperClass = (Class<Mapper>) opts.getConf().getClass(
+        DATA_DRIVEN_MAPPER_KEY, null);
   }
 
+
   /**
    * Run an import job to read a table in to HDFS
    *
@@ -66,9 +78,11 @@
    * @param ormJarFile the Jar file to insert into the dcache classpath. (may be null)
    * @param splitByCol the column of the database table to use to split the import
    * @param conf A fresh Hadoop Configuration to use to build an MR job.
+   * @throws IOException if the job encountered an IO problem
+   * @throws ImportException if the job failed unexpectedly or was misconfigured.
    */
   public void runImport(String tableName, String ormJarFile, String splitByCol,
-      Configuration conf) throws IOException {
+      Configuration conf) throws IOException, ImportException {
 
     LOG.info("Beginning data-driven import of " + tableName);
 
@@ -103,7 +117,11 @@
 
       if (options.getFileLayout() == SqoopOptions.FileLayout.TextFile) {
         job.setOutputFormatClass(RawKeyTextOutputFormat.class);
-        job.setMapperClass(TextImportMapper.class);
+        if (null == mapperClass) {
+          job.setMapperClass(TextImportMapper.class);
+        } else {
+          job.setMapperClass(mapperClass);
+        }
         job.setOutputKeyClass(Text.class);
         job.setOutputValueClass(NullWritable.class);
         if (options.shouldUseCompression()) {
@@ -112,7 +130,11 @@
         }
       } else if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
         job.setOutputFormatClass(SequenceFileOutputFormat.class);
-        job.setMapperClass(AutoProgressMapper.class);
+        if (null == mapperClass) {
+          job.setMapperClass(AutoProgressMapper.class);
+        } else {
+          job.setMapperClass(mapperClass);
+        }
         if (options.shouldUseCompression()) {
           SequenceFileOutputFormat.setCompressOutput(job, true);
           SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);
@@ -172,11 +194,14 @@
       counters.startClock();
 
       try {
-        job.waitForCompletion(false);
+        boolean success = job.waitForCompletion(false);
         counters.stopClock();
         counters.addBytes(job.getCounters().getGroup("FileSystemCounters")
           .findCounter("HDFS_BYTES_WRITTEN").getValue());
         LOG.info("Transferred " + counters.toString());
+        if (!success) {
+          throw new ImportException("Import job failed!");
+        }
       } catch (InterruptedException ie) {
         throw new IOException(ie);
       } catch (ClassNotFoundException cnfe) {

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java
Wed Jan 27 05:28:23 2010
@@ -45,6 +45,7 @@
 import org.apache.hadoop.sqoop.manager.ExportJobContext;
 import org.apache.hadoop.sqoop.orm.TableClassName;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
+import org.apache.hadoop.sqoop.util.ExportException;
 
 /**
  * Actually runs a jdbc export job using the ORM files generated by the sqoop.orm package.
@@ -128,8 +129,10 @@
 
   /**
    * Run an export job to dump a table from HDFS to a database
+   * @throws IOException if the export job encounters an IO error
+   * @throws ExportException if the job fails unexpectedly or is misconfigured.
    */
-  public void runExport() throws IOException {
+  public void runExport() throws ExportException, IOException {
 
     SqoopOptions options = context.getOptions();
     Configuration conf = options.getConf();
@@ -191,7 +194,10 @@
       job.setMapOutputValueClass(NullWritable.class);
 
       try {
-        job.waitForCompletion(false);
+        boolean success = job.waitForCompletion(false);
+        if (!success) {
+          throw new ExportException("Export job failed!");
+        }
       } catch (InterruptedException ie) {
         throw new IOException(ie);
       } catch (ClassNotFoundException cnfe) {

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java
Wed Jan 27 05:28:23 2010
@@ -39,4 +39,10 @@
   public ExportException(final String message, final Throwable cause) {
     super(message, cause);
   }
+
+  @Override
+  public String toString() {
+    String msg = getMessage();
+    return (null == msg) ? "ExportException" : msg;
+  }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java
Wed Jan 27 05:28:23 2010
@@ -41,4 +41,10 @@
   public ImportException(final String message, final Throwable cause) {
     super(message, cause);
   }
+
+  @Override
+  public String toString() {
+    String msg = getMessage();
+    return (null == msg) ? "ImportException" : msg;
+  }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
Wed Jan 27 05:28:23 2010
@@ -43,7 +43,7 @@
    * Create the argv to pass to Sqoop
    * @return the argv as an array of strings.
    */
-  private String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
+  protected String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
Wed Jan 27 05:28:23 2010
@@ -256,52 +256,68 @@
   public void doZeroTimestampTest(int testNum, boolean expectSuccess,
       String connectString) throws IOException, SQLException {
 
-    final String tableName = "mysqlTimestampTable" + Integer.toString(testNum);
+    LOG.info("Beginning zero-timestamp test #" + testNum);
 
-    // Create a table containing a full-zeros timestamp.
-    SqoopOptions options = new SqoopOptions(connectString, tableName);
-    options.setUsername(AUTH_TEST_USER);
-    options.setPassword(AUTH_TEST_PASS);
-
-    manager = new LocalMySQLManager(options);
-
-    Connection connection = null;
-    Statement st = null;
-
-    connection = manager.getConnection();
-    connection.setAutoCommit(false);
-    st = connection.createStatement();
-
-    // create the database table and populate it with data. 
-    st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
-    st.executeUpdate("CREATE TABLE " + tableName + " ("
-        + "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
-        + "ts TIMESTAMP NOT NULL)");
-
-    st.executeUpdate("INSERT INTO " + tableName + " VALUES("
-        + "NULL,'0000-00-00 00:00:00.0')");
-    connection.commit();
-    st.close();
-    connection.close();
-
-    // Run the import.
-    String [] argv = getArgv(true, false, connectString, tableName);
-    runImport(argv);
-
-    // Make sure the result file is there.
-    Path warehousePath = new Path(this.getWarehouseDir());
-    Path tablePath = new Path(warehousePath, tableName);
-    Path filePath = new Path(tablePath, "part-m-00000");
-
-    File f = new File(filePath.toString());
-    if (expectSuccess) {
-      assertTrue("Could not find imported data file", f.exists());
-      BufferedReader r = new BufferedReader(new InputStreamReader(
-          new FileInputStream(f)));
-      assertEquals("1,null", r.readLine());
-      IOUtils.closeStream(r);
-    } else {
-      assertFalse("Imported data when expected failure", f.exists());
+    try {
+      final String tableName = "mysqlTimestampTable" + Integer.toString(testNum);
+
+      // Create a table containing a full-zeros timestamp.
+      SqoopOptions options = new SqoopOptions(connectString, tableName);
+      options.setUsername(AUTH_TEST_USER);
+      options.setPassword(AUTH_TEST_PASS);
+
+      manager = new LocalMySQLManager(options);
+
+      Connection connection = null;
+      Statement st = null;
+
+      connection = manager.getConnection();
+      connection.setAutoCommit(false);
+      st = connection.createStatement();
+
+      // create the database table and populate it with data. 
+      st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
+      st.executeUpdate("CREATE TABLE " + tableName + " ("
+          + "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
+          + "ts TIMESTAMP NOT NULL)");
+
+      st.executeUpdate("INSERT INTO " + tableName + " VALUES("
+          + "NULL,'0000-00-00 00:00:00.0')");
+      connection.commit();
+      st.close();
+      connection.close();
+
+      // Run the import.
+      String [] argv = getArgv(true, false, connectString, tableName);
+      try {
+        runImport(argv);
+      } catch (Exception e) {
+        if (expectSuccess) {
+          // This is unexpected. rethrow.
+          throw new RuntimeException(e);
+        } else {
+          // We expected an error.
+          LOG.info("Got exception running import (expected). msg: " + e);
+        }
+      }
+
+      // Make sure the result file is there.
+      Path warehousePath = new Path(this.getWarehouseDir());
+      Path tablePath = new Path(warehousePath, tableName);
+      Path filePath = new Path(tablePath, "part-m-00000");
+
+      File f = new File(filePath.toString());
+      if (expectSuccess) {
+        assertTrue("Could not find imported data file", f.exists());
+        BufferedReader r = new BufferedReader(new InputStreamReader(
+            new FileInputStream(f)));
+        assertEquals("1,null", r.readLine());
+        IOUtils.closeStream(r);
+      } else {
+        assertFalse("Imported data when expected failure", f.exists());
+      }
+    } finally {
+      LOG.info("Finished zero timestamp test #" + testNum);
     }
   }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/MapreduceTests.java
Wed Jan 27 05:28:23 2010
@@ -31,6 +31,7 @@
   public static Test suite() {
     TestSuite suite = new TestSuite("Tests for org.apache.hadoop.sqoop.mapreduce");
     suite.addTestSuite(TestTextImportMapper.class);
+    suite.addTestSuite(TestImportJob.class);
     return suite;
   }
 }

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java?rev=903543&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
(added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/mapreduce/TestImportJob.java
Wed Jan 27 05:28:23 2010
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.mapreduce;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.junit.Before;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.lib.db.DBWritable;
+import org.apache.hadoop.sqoop.Sqoop;
+import org.apache.hadoop.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
+import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
+import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * Test aspects of the DataDrivenImportJob class
+ */
+public class TestImportJob extends ImportJobTestCase {
+
+  public void testFailedImportDueToIOException() throws IOException {
+    // Make sure that if a MapReduce job to do the import fails due
+    // to an IOException, we tell the user about it.
+
+    // Create a table to attempt to import.
+    createTableForColType("VARCHAR(32)", "'meep'");
+
+    // Make the output dir exist so we know the job will fail via IOException.
+    Path outputPath = new Path(new Path(getWarehouseDir()), getTableName());
+    FileSystem fs = FileSystem.getLocal(new Configuration());
+    fs.mkdirs(outputPath);
+
+    assertTrue(fs.exists(outputPath));
+
+    String [] argv = getArgv(true, new String [] { "DATA_COL0" });
+
+    Sqoop importer = new Sqoop();
+    try {
+      ToolRunner.run(importer, argv);
+      fail("Expected IOException running this job.");
+    } catch (Exception e) {
+      // In debug mode, IOException is wrapped in RuntimeException.
+      LOG.info("Got exceptional return (expected: ok). msg is: " + e);
+    }
+  }
+
+  // A mapper that is guaranteed to cause the task to fail.
+  public static class NullDereferenceMapper
+      extends AutoProgressMapper<LongWritable, DBWritable, Text, NullWritable> {
+
+    public void map(LongWritable key, DBWritable val, Context c)
+        throws IOException, InterruptedException {
+      String s = null;
+      s.length(); // This will throw a NullPointerException.
+    }
+  }
+
+  public void testFailedImportDueToJobFail() throws IOException {
+    // Test that if the job returns 'false' it still fails and informs
+    // the user.
+
+    // Create a table to attempt to import.
+    createTableForColType("VARCHAR(32)", "'meep'");
+
+    String [] argv = getArgv(true, new String [] { "DATA_COL0" });
+
+    // Use dependency injection to specify a mapper that we know
+    // will fail.
+    Configuration conf = new Configuration();
+    conf.setClass(DataDrivenImportJob.DATA_DRIVEN_MAPPER_KEY,
+        NullDereferenceMapper.class,
+        Mapper.class);
+
+    Sqoop importer = new Sqoop(conf);
+    try {
+      ToolRunner.run(importer, argv);
+      fail("Expected ImportException running this job.");
+    } catch (Exception e) {
+      // In debug mode, ImportException is wrapped in RuntimeException.
+      LOG.info("Got exceptional return (expected: ok). msg is: " + e);
+    }
+  }
+
+}
+

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java?rev=903543&r1=903542&r2=903543&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
Wed Jan 27 05:28:23 2010
@@ -51,7 +51,7 @@
    * @param colNames the columns to import. If null, all columns are used.
    * @return the argv as an array of strings.
    */
-  private String [] getArgv(boolean includeHadoopFlags, String [] colNames) {
+  protected String [] getArgv(boolean includeHadoopFlags, String [] colNames) {
     if (null == colNames) {
       colNames = getColNames();
     }



Mime
View raw message