hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r888144 [2/3] - in /hadoop/mapreduce/trunk: ./ src/contrib/sqoop/doc/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ s...
Date Mon, 07 Dec 2009 21:42:40 GMT
Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java Mon Dec  7 21:42:38 2009
@@ -18,9 +18,11 @@
 
 package org.apache.hadoop.sqoop.manager;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.mapreduce.DataDrivenImportJob;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.mapreduce.ExportJob;
+import org.apache.hadoop.sqoop.util.ExportException;
+import org.apache.hadoop.sqoop.util.ImportException;
 import org.apache.hadoop.sqoop.util.ResultSetPrinter;
 
 import java.io.IOException;
@@ -49,14 +51,14 @@
 
   public static final Log LOG = LogFactory.getLog(SqlManager.class.getName());
 
-  protected ImportOptions options;
+  protected SqoopOptions options;
 
   /**
    * Constructs the SqlManager
    * @param opts
    * @param specificMgr
    */
-  public SqlManager(final ImportOptions opts) {
+  public SqlManager(final SqoopOptions opts) {
     this.options = opts;
   }
 
@@ -261,10 +263,10 @@
    * via DataDrivenImportJob to read the table with DataDrivenDBInputFormat.
    */
   public void importTable(ImportJobContext context)
-      throws IOException, ImportError {
+      throws IOException, ImportException {
     String tableName = context.getTableName();
     String jarFile = context.getJarFile();
-    ImportOptions options = context.getOptions();
+    SqoopOptions options = context.getOptions();
     DataDrivenImportJob importer = new DataDrivenImportJob(options);
     String splitCol = options.getSplitByCol();
     if (null == splitCol) {
@@ -274,7 +276,7 @@
 
     if (null == splitCol) {
       // Can't infer a primary key.
-      throw new ImportError("No primary key could be found for table " + tableName
+      throw new ImportException("No primary key could be found for table " + tableName
           + ". Please specify one with --split-by.");
     }
 
@@ -424,4 +426,13 @@
 
     return connection;
   }
+
+  /**
+   * Export data stored in HDFS into a table in a database
+   */
+  public void exportTable(ExportJobContext context)
+      throws IOException, ExportException {
+    ExportJob exportJob = new ExportJob(context);
+    exportJob.runExport();
+  }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java Mon Dec  7 21:42:38 2009
@@ -42,7 +42,7 @@
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 import org.apache.hadoop.sqoop.ConnFactory;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.orm.TableClassName;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
@@ -55,9 +55,9 @@
 
   public static final Log LOG = LogFactory.getLog(ImportJob.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
 
-  public ImportJob(final ImportOptions opts) {
+  public ImportJob(final SqoopOptions opts) {
     this.options = opts;
   }
 
@@ -100,7 +100,7 @@
         outputPath = new Path(tableName);
       }
 
-      if (options.getFileLayout() == ImportOptions.FileLayout.TextFile) {
+      if (options.getFileLayout() == SqoopOptions.FileLayout.TextFile) {
         job.setOutputFormat(RawKeyTextOutputFormat.class);
         job.setMapperClass(TextImportMapper.class);
         job.setOutputKeyClass(Text.class);
@@ -109,7 +109,7 @@
           FileOutputFormat.setCompressOutput(job, true);
           FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
         }
-      } else if (options.getFileLayout() == ImportOptions.FileLayout.SequenceFile) {
+      } else if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
         job.setOutputFormat(SequenceFileOutputFormat.class);
         if (options.shouldUseCompression()) {
           SequenceFileOutputFormat.setCompressOutput(job, true);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java Mon Dec  7 21:42:38 2009
@@ -39,7 +39,7 @@
 import org.apache.hadoop.mapreduce.lib.db.DBWritable;
 
 import org.apache.hadoop.sqoop.ConnFactory;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.orm.TableClassName;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
@@ -53,9 +53,9 @@
 
   public static final Log LOG = LogFactory.getLog(DataDrivenImportJob.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
 
-  public DataDrivenImportJob(final ImportOptions opts) {
+  public DataDrivenImportJob(final SqoopOptions opts) {
     this.options = opts;
   }
 
@@ -74,7 +74,8 @@
 
     String tableClassName = new TableClassName(options).getClassForTable(tableName);
 
-    boolean isLocal = "local".equals(conf.get("mapred.job.tracker"));
+    boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address"))
+        || "local".equals(conf.get("mapred.job.tracker"));
     ClassLoader prevClassLoader = null;
     if (isLocal) {
       // If we're using the LocalJobRunner, then instead of using the compiled jar file
@@ -100,7 +101,7 @@
         outputPath = new Path(tableName);
       }
 
-      if (options.getFileLayout() == ImportOptions.FileLayout.TextFile) {
+      if (options.getFileLayout() == SqoopOptions.FileLayout.TextFile) {
         job.setOutputFormatClass(RawKeyTextOutputFormat.class);
         job.setMapperClass(TextImportMapper.class);
         job.setOutputKeyClass(Text.class);
@@ -109,7 +110,7 @@
           FileOutputFormat.setCompressOutput(job, true);
           FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
         }
-      } else if (options.getFileLayout() == ImportOptions.FileLayout.SequenceFile) {
+      } else if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
         job.setOutputFormatClass(SequenceFileOutputFormat.class);
         job.setMapperClass(AutoProgressMapper.class);
         if (options.shouldUseCompression()) {
@@ -123,7 +124,7 @@
 
       int numMapTasks = options.getNumMappers();
       if (numMapTasks < 1) {
-        numMapTasks = ImportOptions.DEFAULT_NUM_MAPPERS;
+        numMapTasks = SqoopOptions.DEFAULT_NUM_MAPPERS;
         LOG.warn("Invalid mapper count; using " + numMapTasks + " mappers.");
       }
       job.getConfiguration().setInt("mapred.map.tasks", numMapTasks);

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.mapreduce;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
+import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+
+import org.apache.hadoop.sqoop.ConnFactory;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.lib.SqoopRecord;
+import org.apache.hadoop.sqoop.manager.ConnManager;
+import org.apache.hadoop.sqoop.manager.ExportJobContext;
+import org.apache.hadoop.sqoop.orm.TableClassName;
+import org.apache.hadoop.sqoop.util.ClassLoaderStack;
+
+/**
+ * Actually runs a jdbc export job using the ORM files generated by the sqoop.orm package.
+ * Uses DBOutputFormat
+ */
+public class ExportJob {
+
+  public static final Log LOG = LogFactory.getLog(ExportJob.class.getName());
+
+  public static final String SQOOP_EXPORT_TABLE_CLASS_KEY = "sqoop.export.table.class";
+
+  private ExportJobContext context;
+
+  public ExportJob(final ExportJobContext ctxt) {
+    this.context = ctxt;
+  }
+
+  /**
+   * @return true if p is a SequenceFile, or a directory containing
+   * SequenceFiles.
+   */
+  private boolean isSequenceFiles(Path p) throws IOException {
+    Configuration conf = context.getOptions().getConf();
+    FileSystem fs = p.getFileSystem(conf);
+
+    try {
+      FileStatus stat = fs.getFileStatus(p);
+
+      if (null == stat) {
+        // Couldn't get the item.
+        LOG.warn("Input path " + p + " does not exist");
+        return false;
+      }
+
+      if (stat.isDir()) {
+        FileStatus [] subitems = fs.listStatus(p);
+        if (subitems == null || subitems.length == 0) {
+          LOG.warn("Input path " + p + " contains no files");
+          return false; // empty dir.
+        }
+
+        // Pick a random child entry to examine instead.
+        stat = subitems[0];
+      }
+
+      if (null == stat) {
+        LOG.warn("null FileStatus object in isSequenceFiles(); assuming false.");
+        return false;
+      }
+
+      Path target = stat.getPath();
+      // Test target's header to see if it contains magic numbers indicating it's
+      // a SequenceFile.
+      byte [] header = new byte[3];
+      FSDataInputStream is = null;
+      try {
+        is = fs.open(target);
+        is.readFully(header);
+      } catch (IOException ioe) {
+        // Error reading header or EOF; assume not a SequenceFile.
+        LOG.warn("IOException checking SequenceFile header: " + ioe);
+        return false;
+      } finally {
+        try {
+          if (null != is) {
+            is.close();
+          }
+        } catch (IOException ioe) {
+          // ignore; closing.
+          LOG.warn("IOException closing input stream: " + ioe + "; ignoring.");
+        }
+      }
+
+      // Return true (isSequenceFile) iff the magic number sticks.
+      return header[0] == 'S' && header[1] == 'E' && header[2] == 'Q';
+    } catch (FileNotFoundException fnfe) {
+      LOG.warn("Input path " + p + " does not exist");
+      return false; // doesn't exist!
+    }
+  }
+
+  /**
+   * Run an export job to dump a table from HDFS to a database
+   */
+  public void runExport() throws IOException {
+
+    SqoopOptions options = context.getOptions();
+    Configuration conf = options.getConf();
+    String tableName = context.getTableName();
+    String tableClassName = new TableClassName(options).getClassForTable(tableName);
+    String ormJarFile = context.getJarFile();
+
+    LOG.info("Beginning export of " + tableName);
+
+    boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address"))
+        || "local".equals(conf.get("mapred.job.tracker"));
+    ClassLoader prevClassLoader = null;
+    if (isLocal) {
+      // If we're using the LocalJobRunner, then instead of using the compiled jar file
+      // as the job source, we're running in the current thread. Push on another classloader
+      // that loads from that jar in addition to everything currently on the classpath.
+      prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile, tableClassName);
+    }
+
+    try {
+      Job job = new Job(conf);
+
+      // Set the external jar to use for the job.
+      job.getConfiguration().set("mapred.jar", ormJarFile);
+
+      Path inputPath = new Path(context.getOptions().getExportDir());
+      inputPath = inputPath.makeQualified(FileSystem.get(conf));
+
+      if (isSequenceFiles(inputPath)) {
+        job.setInputFormatClass(SequenceFileInputFormat.class);
+        job.setMapperClass(SequenceFileExportMapper.class);
+      } else {
+        job.setInputFormatClass(TextInputFormat.class);
+        job.setMapperClass(TextExportMapper.class);
+      }
+
+      FileInputFormat.addInputPath(job, inputPath);
+      job.setNumReduceTasks(0);
+
+      ConnManager mgr = new ConnFactory(conf).getManager(options);
+      String username = options.getUsername();
+      if (null == username || username.length() == 0) {
+        DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
+            options.getConnectString());
+      } else {
+        DBConfiguration.configureDB(job.getConfiguration(), mgr.getDriverClass(),
+            options.getConnectString(), username, options.getPassword());
+      }
+
+      String [] colNames = options.getColumns();
+      if (null == colNames) {
+        colNames = mgr.getColumnNames(tableName);
+      }
+      DBOutputFormat.setOutput(job, tableName, colNames);
+
+      job.setOutputFormatClass(DBOutputFormat.class);
+      job.getConfiguration().set(SQOOP_EXPORT_TABLE_CLASS_KEY, tableClassName);
+      job.setMapOutputKeyClass(SqoopRecord.class);
+      job.setMapOutputValueClass(NullWritable.class);
+
+      try {
+        job.waitForCompletion(false);
+      } catch (InterruptedException ie) {
+        throw new IOException(ie);
+      } catch (ClassNotFoundException cnfe) {
+        throw new IOException(cnfe);
+      }
+    } finally {
+      if (isLocal && null != prevClassLoader) {
+        // unload the special classloader for this jar.
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+}

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.Mapper.Context;
+
+import org.apache.hadoop.sqoop.lib.SqoopRecord;
+
+/**
+ * Reads a SqoopRecord from the SequenceFile in which it's packed and emits
+ * that DBWritable to the DBOutputFormat for writeback to the database.
+ */
+public class SequenceFileExportMapper
+    extends AutoProgressMapper<LongWritable, SqoopRecord, SqoopRecord, NullWritable> {
+
+  public SequenceFileExportMapper() {
+  }
+
+  public void map(LongWritable key, SqoopRecord val, Context context)
+      throws IOException, InterruptedException {
+    context.write(val, NullWritable.get());
+  }
+}

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Mapper.Context;
+import org.apache.hadoop.mapreduce.lib.db.DBWritable;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import org.apache.hadoop.sqoop.lib.RecordParser;
+import org.apache.hadoop.sqoop.lib.SqoopRecord;
+
+/**
+ * Converts an input record from a string representation to a parsed Sqoop record
+ * and emits that DBWritable to the DBOutputFormat for writeback to the database.
+ */
+public class TextExportMapper
+    extends AutoProgressMapper<LongWritable, Text, SqoopRecord, NullWritable> {
+
+  private SqoopRecord recordImpl;
+
+  public TextExportMapper() {
+  }
+
+  protected void setup(Context context) throws IOException, InterruptedException {
+    super.setup(context);
+
+    Configuration conf = context.getConfiguration();
+
+    // Instantiate a copy of the user's class to hold and parse the record.
+    String recordClassName = conf.get(ExportJob.SQOOP_EXPORT_TABLE_CLASS_KEY);
+    if (null == recordClassName) {
+      throw new IOException("Export table class name ("
+          + ExportJob.SQOOP_EXPORT_TABLE_CLASS_KEY
+          + ") is not set!");
+    }
+
+    try {
+      Class cls = Class.forName(recordClassName, true,
+          Thread.currentThread().getContextClassLoader());
+      recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
+    } catch (ClassNotFoundException cnfe) {
+      throw new IOException(cnfe);
+    }
+
+    if (null == recordImpl) {
+      throw new IOException("Could not instantiate object of type " + recordClassName);
+    }
+  }
+
+
+  public void map(LongWritable key, Text val, Context context)
+      throws IOException, InterruptedException {
+    try {
+      recordImpl.parse(val);
+      context.write(recordImpl, NullWritable.get());
+    } catch (RecordParser.ParseError pe) {
+      throw new IOException("Could not parse record: " + val, pe);
+    }
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java Mon Dec  7 21:42:38 2009
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.sqoop.orm;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.manager.SqlManager;
 import org.apache.hadoop.sqoop.lib.BigDecimalSerializer;
@@ -57,7 +57,7 @@
    */
   public static final int CLASS_WRITER_VERSION = 2;
 
-  private ImportOptions options;
+  private SqoopOptions options;
   private ConnManager connManager;
   private String tableName;
   private CompilationManager compileManager;
@@ -68,7 +68,7 @@
    * @param connMgr the connection manager used to describe the table.
    * @param table the name of the table to read.
    */
-  public ClassWriter(final ImportOptions opts, final ConnManager connMgr,
+  public ClassWriter(final SqoopOptions opts, final ConnManager connMgr,
       final String table, final CompilationManager compMgr) {
     this.options = opts;
     this.connManager = connMgr;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java Mon Dec  7 21:42:38 2009
@@ -36,7 +36,7 @@
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.JobConf;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.util.FileListing;
 
 /**
@@ -53,10 +53,10 @@
 
   public static final Log LOG = LogFactory.getLog(CompilationManager.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
   private List<String> sources;
 
-  public CompilationManager(final ImportOptions opts) {
+  public CompilationManager(final SqoopOptions opts) {
     options = opts;
     sources = new ArrayList<String>();
   }
@@ -209,8 +209,6 @@
     // read the file into a buffer, and write it to the jar file.
     for (File entry : dirEntries) {
       if (!entry.isDirectory()) {
-        LOG.debug("Considering entry: " + entry);
-
         // chomp off the portion of the full path that is shared
         // with the base directory where class files were put;
         // we only record the subdir parts in the zip entry.

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java Mon Dec  7 21:42:38 2009
@@ -18,23 +18,23 @@
 
 package org.apache.hadoop.sqoop.orm;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 /**
  * Reconciles the table name being imported with the class naming information
- * specified in ImportOptions to determine the actual package and class name
+ * specified in SqoopOptions to determine the actual package and class name
  * to use for a table.
  */
 public class TableClassName {
 
   public static final Log LOG = LogFactory.getLog(TableClassName.class.getName());
 
-  private final ImportOptions options;
+  private final SqoopOptions options;
 
-  public TableClassName(final ImportOptions opts) {
+  public TableClassName(final SqoopOptions opts) {
     if (null == opts) {
       throw new NullPointerException("Cannot instantiate a TableClassName on null options.");
     } else {

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java Mon Dec  7 21:42:38 2009
@@ -29,7 +29,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.io.SplittingOutputStream;
 import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
 import org.apache.hadoop.util.Shell;
@@ -70,7 +70,7 @@
    * returned stream.
    */
   public static SplittableBufferedWriter createHdfsSink(Configuration conf,
-      ImportOptions options, String tableName) throws IOException {
+      SqoopOptions options, String tableName) throws IOException {
 
     FileSystem fs = FileSystem.get(conf);
     String warehouseDir = options.getWarehouseDir();

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.util;
+
+/**
+ * General error during export process.
+ */
+@SuppressWarnings("serial")
+public class ExportException extends Exception {
+
+  public ExportException() {
+    super("ExportException");
+  }
+
+  public ExportException(final String message) {
+    super(message);
+  }
+
+  public ExportException(final Throwable cause) {
+    super(cause);
+  }
+
+  public ExportException(final String message, final Throwable cause) {
+    super(message, cause);
+  }
+}

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.util;
+
+/**
+ * General error during import process.
+ *
+ * 
+ */
+@SuppressWarnings("serial")
+public class ImportException extends Exception {
+
+  public ImportException() {
+    super("ImportException");
+  }
+
+  public ImportException(final String message) {
+    super(message);
+  }
+
+  public ImportException(final Throwable cause) {
+    super(cause);
+  }
+
+  public ImportException(final String message, final Throwable cause) {
+    super(message, cause);
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java Mon Dec  7 21:42:38 2009
@@ -47,6 +47,7 @@
     suite.addTestSuite(TestSqlManager.class);
     suite.addTestSuite(TestClassWriter.class);
     suite.addTestSuite(TestColumnTypes.class);
+    suite.addTestSuite(TestExport.class);
     suite.addTestSuite(TestMultiCols.class);
     suite.addTestSuite(TestMultiMaps.class);
     suite.addTestSuite(TestSplitBy.class);
@@ -54,7 +55,7 @@
     suite.addTestSuite(TestHiveImport.class);
     suite.addTestSuite(TestRecordParser.class);
     suite.addTestSuite(TestFieldFormatter.class);
-    suite.addTestSuite(TestImportOptions.class);
+    suite.addTestSuite(TestSqoopOptions.class);
     suite.addTestSuite(TestParseMethods.class);
     suite.addTestSuite(TestConnFactory.class);
     suite.addTestSuite(TestSplittableBufferedWriter.class);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java Mon Dec  7 21:42:38 2009
@@ -41,7 +41,7 @@
     conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, AlwaysDummyFactory.class.getName());
 
     ConnFactory factory = new ConnFactory(conf);
-    ConnManager manager = factory.getManager(new ImportOptions());
+    ConnManager manager = factory.getManager(new SqoopOptions());
     assertNotNull("No manager returned", manager);
     assertTrue("Expected a DummyManager", manager instanceof DummyManager);
   }
@@ -52,7 +52,7 @@
 
     ConnFactory factory = new ConnFactory(conf);
     try {
-      ConnManager manager = factory.getManager(new ImportOptions());
+      ConnManager manager = factory.getManager(new SqoopOptions());
       fail("factory.getManager() expected to throw IOException");
     } catch (IOException ioe) {
       // Expected this. Test passes.
@@ -69,7 +69,7 @@
     conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, classNames);
 
     ConnFactory factory = new ConnFactory(conf);
-    ConnManager manager = factory.getManager(new ImportOptions());
+    ConnManager manager = factory.getManager(new SqoopOptions());
     assertNotNull("No manager returned", manager);
     assertTrue("Expected a DummyManager", manager instanceof DummyManager);
   }
@@ -77,14 +77,14 @@
   ////// mock classes used for test cases above //////
 
   public static class AlwaysDummyFactory extends ManagerFactory {
-    public ConnManager accept(ImportOptions opts) {
+    public ConnManager accept(SqoopOptions opts) {
       // Always return a new DummyManager
       return new DummyManager();
     }
   }
 
   public static class EmptyFactory extends ManagerFactory {
-    public ConnManager accept(ImportOptions opts) {
+    public ConnManager accept(SqoopOptions opts) {
       // Never instantiate a proper ConnManager;
       return null;
     }

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestExport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestExport.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestExport.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestExport.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,539 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.compress.CodecPool;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
+import org.apache.hadoop.io.compress.Decompressor;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import org.apache.hadoop.sqoop.lib.RecordParser;
+import org.apache.hadoop.sqoop.lib.SqoopRecord;
+import org.apache.hadoop.sqoop.testutil.ExportJobTestCase;
+import org.apache.hadoop.sqoop.util.ClassLoaderStack;
+
+import org.junit.Before;
+
+/**
+ * Test that we can export data from HDFS into databases.
+ */
+public class TestExport extends ExportJobTestCase {
+
+  @Before
+  public void setUp() {
+    // start the server
+    super.setUp();
+
+    // throw away any existing data that might be in the database.
+    try {
+      this.getTestServer().dropExistingSchema();
+    } catch (SQLException sqlE) {
+      fail(sqlE.toString());
+    }
+  }
+
+  private String getRecordLine(int recordNum, ColumnGenerator... extraCols) {
+    String idStr = Integer.toString(recordNum);
+    StringBuilder sb = new StringBuilder();
+
+    sb.append(idStr);
+    sb.append("\t");
+    sb.append(getMsgPrefix());
+    sb.append(idStr);
+    for (ColumnGenerator gen : extraCols) {
+      sb.append("\t");
+      sb.append(gen.getExportText(recordNum));
+    }
+    sb.append("\n");
+
+    return sb.toString();
+  }
+
+  /** When generating data for export tests, each column is generated
+      according to a ColumnGenerator. Methods exist for determining
+      what to put into text strings in the files to export, as well
+      as what the string representation of the column as returned by
+      the database should look like.
+    */
+  interface ColumnGenerator {
+    /** for a row with id rowNum, what should we write into that
+        line of the text file to export?
+      */
+    public String getExportText(int rowNum);
+
+    /** for a row with id rowNum, what should the database return
+        for the given column's value?
+      */
+    public String getVerifyText(int rowNum);
+
+    /** Return the column type to put in the CREATE TABLE statement */
+    public String getType();
+  }
+
+  /**
+   * Create a data file that gets exported to the db
+   * @param fileNum the number of the file (for multi-file export)
+   * @param numRecords how many records to write to the file.
+   * @param gzip is true if the file should be gzipped.
+   */
+  private void createTextFile(int fileNum, int numRecords, boolean gzip,
+      ColumnGenerator... extraCols) throws IOException {
+    int startId = fileNum * numRecords;
+
+    String ext = ".txt";
+    if (gzip) {
+      ext = ext + ".gz";
+    }
+    Path tablePath = getTablePath();
+    Path filePath = new Path(tablePath, "part" + fileNum + ext);
+
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    FileSystem fs = FileSystem.get(conf);
+    fs.mkdirs(tablePath);
+    OutputStream os = fs.create(filePath);
+    if (gzip) {
+      CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
+      CompressionCodec codec = ccf.getCodec(filePath);
+      os = codec.createOutputStream(os);
+    }
+    BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
+    for (int i = 0; i < numRecords; i++) {
+      w.write(getRecordLine(startId + i, extraCols));
+    }
+    w.close();
+    os.close();
+
+    if (gzip) {
+      verifyCompressedFile(filePath, numRecords);
+    }
+  }
+
+  private void verifyCompressedFile(Path f, int expectedNumLines) throws IOException {
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    FileSystem fs = FileSystem.get(conf);
+    InputStream is = fs.open(f);
+    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
+    CompressionCodec codec = ccf.getCodec(f);
+    LOG.info("gzip check codec is " + codec);
+    Decompressor decompressor = CodecPool.getDecompressor(codec);
+    if (null == decompressor) {
+      LOG.info("Verifying gzip sanity with null decompressor");
+    } else {
+      LOG.info("Verifying gzip sanity with decompressor: " + decompressor.toString());
+    }
+    is = codec.createInputStream(is, decompressor);
+    BufferedReader r = new BufferedReader(new InputStreamReader(is));
+    int numLines = 0;
+    while (true) {
+      String ln = r.readLine();
+      if (ln == null) {
+        break;
+      }
+      numLines++;
+    }
+
+    r.close();
+    assertEquals("Did not read back correct number of lines",
+        expectedNumLines, numLines);
+    LOG.info("gzip sanity check returned " + numLines + " lines; ok.");
+  }
+
+  /**
+   * Create a data file in SequenceFile format that gets exported to the db
+   * @param fileNum the number of the file (for multi-file export).
+   * @param numRecords how many records to write to the file.
+   * @param className the table class name to instantiate and populate
+   *          for each record.
+   */
+  private void createSequenceFile(int fileNum, int numRecords, String className)
+      throws IOException {
+
+    try {
+      // Instantiate the value record object via reflection. 
+      Class cls = Class.forName(className, true,
+          Thread.currentThread().getContextClassLoader());
+      SqoopRecord record = (SqoopRecord) ReflectionUtils.newInstance(cls, new Configuration());
+
+      // Create the SequenceFile.
+      Configuration conf = new Configuration();
+      conf.set("fs.default.name", "file:///");
+      FileSystem fs = FileSystem.get(conf);
+      Path tablePath = getTablePath();
+      Path filePath = new Path(tablePath, "part" + fileNum);
+      fs.mkdirs(tablePath);
+      SequenceFile.Writer w =
+          SequenceFile.createWriter(fs, conf, filePath, LongWritable.class, cls);
+
+      // Now write the data.
+      int startId = fileNum * numRecords;
+      for (int i = 0; i < numRecords; i++) {
+        record.parse(getRecordLine(startId + i));
+        w.append(new LongWritable(startId + i), record);
+      }
+
+      w.close();
+    } catch (ClassNotFoundException cnfe) {
+      throw new IOException(cnfe);
+    } catch (RecordParser.ParseError pe) {
+      throw new IOException(pe);
+    }
+  }
+
+  /** Return the column name for a column index.
+   *  Each table contains two columns named 'id' and 'msg', and then an
+   *  arbitrary number of additional columns defined by ColumnGenerators.
+   *  These columns are referenced by idx 0, 1, 2...
+   *  @param idx the index of the ColumnGenerator in the array passed to
+   *   createTable().
+   *  @return the name of the column
+   */
+  protected String forIdx(int idx) {
+    return "col" + idx;
+  }
+
+  /** Create the table definition to export to, removing any prior table.
+      By specifying ColumnGenerator arguments, you can add extra columns
+      to the table of arbitrary type.
+   */
+  public void createTable(ColumnGenerator... extraColumns) throws SQLException {
+    Connection conn = getTestServer().getConnection();
+    PreparedStatement statement = conn.prepareStatement(
+        "DROP TABLE " + getTableName() + " IF EXISTS",
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    statement.executeUpdate();
+    conn.commit();
+    statement.close();
+
+    StringBuilder sb = new StringBuilder();
+    sb.append("CREATE TABLE ");
+    sb.append(getTableName());
+    sb.append(" (id INT NOT NULL PRIMARY KEY, msg VARCHAR(64)");
+    int colNum = 0;
+    for (ColumnGenerator gen : extraColumns) {
+      sb.append(", " + forIdx(colNum++) + " " + gen.getType());
+    }
+    sb.append(")");
+
+    statement = conn.prepareStatement(sb.toString(),
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    statement.executeUpdate();
+    conn.commit();
+    statement.close();
+  }
+
+  /** Removing an existing table directory from the filesystem */
+  private void removeTablePath() throws IOException {
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    FileSystem fs = FileSystem.get(conf);
+    fs.delete(getTablePath(), true);
+  }
+
+  /** Verify that on a given row, a column has a given value.
+   * @param id the id column specifying the row to test.
+   */
+  private void assertColValForRowId(int id, String colName, String expectedVal)
+      throws SQLException {
+    Connection conn = getTestServer().getConnection();
+    LOG.info("Verifying column " + colName + " has value " + expectedVal);
+
+    PreparedStatement statement = conn.prepareStatement(
+        "SELECT " + colName + " FROM " + getTableName() + " WHERE id = " + id,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    ResultSet rs = statement.executeQuery();
+    rs.next();
+
+    String actualVal = rs.getString(1);
+    rs.close();
+    statement.close();
+
+    assertEquals("Got unexpected column value", expectedVal, actualVal);
+  }
+
+  /** Verify that for the max and min values of the 'id' column, the values
+      for a given column meet the expected values.
+   */
+  private void assertColMinAndMax(String colName, ColumnGenerator generator)
+      throws SQLException {
+    int minId = getMinRowId();
+    int maxId = getMaxRowId();
+
+    LOG.info("Checking min/max for column " + colName + " with type " + generator.getType());
+
+    String expectedMin = generator.getVerifyText(minId);
+    String expectedMax = generator.getVerifyText(maxId);
+
+    assertColValForRowId(minId, colName, expectedMin);
+    assertColValForRowId(maxId, colName, expectedMax);
+  }
+
+  /** Export 10 rows, make sure they load in correctly */
+  public void testTextExport() throws IOException, SQLException {
+
+    final int TOTAL_RECORDS = 10;
+
+    createTextFile(0, TOTAL_RECORDS, false);
+    createTable();
+    runExport(getArgv(true));
+    verifyExport(TOTAL_RECORDS);
+  }
+
+  /** Export 10 rows from gzipped text files. */
+  public void testGzipExport() throws IOException, SQLException {
+
+    LOG.info("Beginning gzip export test");
+
+    final int TOTAL_RECORDS = 10;
+
+    createTextFile(0, TOTAL_RECORDS, true);
+    createTable();
+    runExport(getArgv(true));
+    verifyExport(TOTAL_RECORDS);
+    LOG.info("Complete gzip export test");
+  }
+
+  /** Run 2 mappers, make sure all records load in correctly */
+  public void testMultiMapTextExport() throws IOException, SQLException {
+
+    final int RECORDS_PER_MAP = 10;
+    final int NUM_FILES = 2;
+
+    for (int f = 0; f < NUM_FILES; f++) {
+      createTextFile(f, RECORDS_PER_MAP, false);
+    }
+
+    createTable();
+    runExport(getArgv(true));
+    verifyExport(RECORDS_PER_MAP * NUM_FILES);
+  }
+
+
+  /** Export some rows from a SequenceFile, make sure they import correctly */
+  public void testSequenceFileExport() throws IOException, SQLException {
+
+    final int TOTAL_RECORDS = 10;
+
+    // First, generate class and jar files that represent the table we're exporting to.
+    LOG.info("Creating initial schema for SeqFile test");
+    createTable();
+    LOG.info("Generating code..."); 
+    List<String> generatedJars = runExport(getArgv(true, "--generate-only"));
+
+    // Now, wipe the created table so we can export on top of it again.
+    LOG.info("Resetting schema and data...");
+    createTable();
+
+    // Wipe the directory we use when creating files to export to ensure
+    // it's ready for new SequenceFiles.
+    removeTablePath();
+
+    assertNotNull(generatedJars);
+    assertEquals("Expected 1 generated jar file", 1, generatedJars.size());
+    String jarFileName = generatedJars.get(0);
+    // Sqoop generates jars named "foo.jar"; by default, this should contain a
+    // class named 'foo'. Extract the class name.
+    Path jarPath = new Path(jarFileName);
+    String jarBaseName = jarPath.getName();
+    assertTrue(jarBaseName.endsWith(".jar"));
+    assertTrue(jarBaseName.length() > ".jar".length());
+    String className = jarBaseName.substring(0, jarBaseName.length() - ".jar".length());
+
+    LOG.info("Using jar filename: " + jarFileName);
+    LOG.info("Using class name: " + className);
+
+    ClassLoader prevClassLoader = null;
+
+    try {
+      if (null != jarFileName) {
+        prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, className);
+      }
+
+      // Now use this class and jar name to create a sequence file.
+      LOG.info("Writing data to SequenceFiles");
+      createSequenceFile(0, TOTAL_RECORDS, className);
+
+      // Now run and verify the export.
+      LOG.info("Exporting SequenceFile-based data");
+      runExport(getArgv(true, "--class-name", className, "--jar-file", jarFileName));
+      verifyExport(TOTAL_RECORDS);
+    } finally {
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  public void testIntCol() throws IOException, SQLException {
+    final int TOTAL_RECORDS = 10;
+
+    // generate a column equivalent to rownum.
+    ColumnGenerator gen = new ColumnGenerator() {
+      public String getExportText(int rowNum) {
+        return "" + rowNum;
+      }
+      public String getVerifyText(int rowNum) {
+        return "" + rowNum;
+      }
+      public String getType() {
+        return "INTEGER";
+      }
+    };
+
+    createTextFile(0, TOTAL_RECORDS, false, gen);
+    createTable(gen);
+    runExport(getArgv(true));
+    verifyExport(TOTAL_RECORDS);
+    assertColMinAndMax(forIdx(0), gen);
+  }
+
+  public void testBigIntCol() throws IOException, SQLException {
+    final int TOTAL_RECORDS = 10;
+
+    // generate a column that won't fit in a normal int.
+    ColumnGenerator gen = new ColumnGenerator() {
+      public String getExportText(int rowNum) {
+        long val = (long) rowNum * 1000000000;
+        return "" + val;
+      }
+      public String getVerifyText(int rowNum) {
+        long val = (long) rowNum * 1000000000;
+        return "" + val;
+      }
+      public String getType() {
+        return "BIGINT";
+      }
+    };
+
+    createTextFile(0, TOTAL_RECORDS, false, gen);
+    createTable(gen);
+    runExport(getArgv(true));
+    verifyExport(TOTAL_RECORDS);
+    assertColMinAndMax(forIdx(0), gen);
+  }
+
+  private String pad(int n) {
+    if (n <= 9) {
+      return "0" + n;
+    } else {
+      return String.valueOf(n);
+    }
+  }
+
+  public void testDatesAndTimes() throws IOException, SQLException {
+    final int TOTAL_RECORDS = 10;
+
+    ColumnGenerator genDate = new ColumnGenerator() {
+      public String getExportText(int rowNum) {
+        int day = rowNum + 1;
+        return "2009-10-" + day;
+      }
+      public String getVerifyText(int rowNum) {
+        int day = rowNum + 1;
+        return "2009-10-" + pad(day);
+      }
+      public String getType() {
+        return "DATE";
+      }
+    };
+
+    ColumnGenerator genTime = new ColumnGenerator() {
+      public String getExportText(int rowNum) {
+        return "10:01:" + rowNum;
+      }
+      public String getVerifyText(int rowNum) {
+        return "10:01:" + pad(rowNum);
+      }
+      public String getType() {
+        return "TIME";
+      }
+    };
+
+    createTextFile(0, TOTAL_RECORDS, false, genDate, genTime);
+    createTable(genDate, genTime);
+    runExport(getArgv(true));
+    verifyExport(TOTAL_RECORDS);
+    assertColMinAndMax(forIdx(0), genDate);
+    assertColMinAndMax(forIdx(1), genTime);
+  }
+
+  public void testNumericTypes() throws IOException, SQLException {
+    final int TOTAL_RECORDS = 10;
+
+    // Check floating point values
+    ColumnGenerator genFloat = new ColumnGenerator() {
+      public String getExportText(int rowNum) {
+        double v = 3.141 * (double) rowNum;
+        return "" + v;
+      }
+      public String getVerifyText(int rowNum) {
+        double v = 3.141 * (double) rowNum;
+        return "" + v;
+      }
+      public String getType() {
+        return "FLOAT";
+      }
+    };
+
+    // Check precise decimal placement. The first of ten
+    // rows will be 2.7181; the last of ten rows will be
+    // 2.71810.
+    ColumnGenerator genNumeric = new ColumnGenerator() {
+      public String getExportText(int rowNum) {
+        int digit = rowNum + 1;
+        return "2.718" + digit;
+      }
+      public String getVerifyText(int rowNum) {
+        int digit = rowNum + 1;
+        return "2.718" + digit;
+      }
+      public String getType() {
+        return "NUMERIC";
+      }
+    };
+
+    createTextFile(0, TOTAL_RECORDS, false, genFloat, genNumeric);
+    createTable(genFloat, genNumeric);
+    runExport(getArgv(true));
+    verifyExport(TOTAL_RECORDS);
+    assertColMinAndMax(forIdx(0), genFloat);
+    assertColMinAndMax(forIdx(1), genNumeric);
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java Mon Dec  7 21:42:38 2009
@@ -31,7 +31,7 @@
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -119,7 +119,7 @@
     String [] argv = getArgv(true, columns, splitByCol);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
       opts.parse(getArgv(false, columns, splitByCol));
 
       CompilationManager compileMgr = new CompilationManager(opts);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java Mon Dec  7 21:42:38 2009
@@ -26,7 +26,7 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -98,7 +98,7 @@
     String [] argv = getArgv(true, columns, splitByCol);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
       opts.parse(getArgv(false, columns, splitByCol));
 
       CompilationManager compileMgr = new CompilationManager(opts);

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,228 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop;
+
+import junit.framework.TestCase;
+
+
+/**
+ * Test aspects of the SqoopOptions class
+ */
+public class TestSqoopOptions extends TestCase {
+
+  // tests for the toChar() parser
+  public void testNormalChar() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('a', SqoopOptions.toChar("a"));
+  }
+
+  public void testEmptyString() throws SqoopOptions.InvalidOptionsException {
+    try {
+      SqoopOptions.toChar("");
+      fail("Expected exception");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testNullString() throws SqoopOptions.InvalidOptionsException {
+    try {
+      SqoopOptions.toChar(null);
+      fail("Expected exception");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testTooLong() throws SqoopOptions.InvalidOptionsException {
+    // Should just use the first character and log a warning.
+    assertEquals('x', SqoopOptions.toChar("xyz"));
+  }
+
+  public void testHexChar1() throws SqoopOptions.InvalidOptionsException {
+    assertEquals(0xF, SqoopOptions.toChar("\\0xf"));
+  }
+
+  public void testHexChar2() throws SqoopOptions.InvalidOptionsException {
+    assertEquals(0xF, SqoopOptions.toChar("\\0xF"));
+  }
+
+  public void testHexChar3() throws SqoopOptions.InvalidOptionsException {
+    assertEquals(0xF0, SqoopOptions.toChar("\\0xf0"));
+  }
+
+  public void testHexChar4() throws SqoopOptions.InvalidOptionsException {
+    assertEquals(0xF0, SqoopOptions.toChar("\\0Xf0"));
+  }
+
+  public void testEscapeChar1() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('\n', SqoopOptions.toChar("\\n"));
+  }
+
+  public void testEscapeChar2() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('\\', SqoopOptions.toChar("\\\\"));
+  }
+
+  public void testEscapeChar3() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('\\', SqoopOptions.toChar("\\"));
+  }
+
+  public void testUnknownEscape1() throws SqoopOptions.InvalidOptionsException {
+    try {
+      SqoopOptions.toChar("\\Q");
+      fail("Expected exception");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testUnknownEscape2() throws SqoopOptions.InvalidOptionsException {
+    try {
+      SqoopOptions.toChar("\\nn");
+      fail("Expected exception");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expect this.
+    }
+  }
+
+  public void testEscapeNul1() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('\000', SqoopOptions.toChar("\\0"));
+  }
+
+  public void testEscapeNul2() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('\000', SqoopOptions.toChar("\\00"));
+  }
+
+  public void testEscapeNul3() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('\000', SqoopOptions.toChar("\\0000"));
+  }
+
+  public void testEscapeNul4() throws SqoopOptions.InvalidOptionsException {
+    assertEquals('\000', SqoopOptions.toChar("\\0x0"));
+  }
+
+  public void testOctalChar1() throws SqoopOptions.InvalidOptionsException {
+    assertEquals(04, SqoopOptions.toChar("\\04"));
+  }
+
+  public void testOctalChar2() throws SqoopOptions.InvalidOptionsException {
+    assertEquals(045, SqoopOptions.toChar("\\045"));
+  }
+
+  public void testErrOctalChar() throws SqoopOptions.InvalidOptionsException {
+    try {
+      SqoopOptions.toChar("\\095");
+      fail("Expected exception");
+    } catch (NumberFormatException nfe) {
+      // expected.
+    }
+  }
+
+  public void testErrHexChar() throws SqoopOptions.InvalidOptionsException {
+    try {
+      SqoopOptions.toChar("\\0x9K5");
+      fail("Expected exception");
+    } catch (NumberFormatException nfe) {
+      // expected.
+    }
+  }
+
+  // test that setting output delimiters also sets input delimiters 
+  public void testDelimitersInherit() throws SqoopOptions.InvalidOptionsException {
+    String [] args = {
+        "--fields-terminated-by",
+        "|"
+    };
+
+    SqoopOptions opts = new SqoopOptions();
+    opts.parse(args);
+    assertEquals('|', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // test that setting output delimiters and setting input delims separately works
+  public void testDelimOverride1() throws SqoopOptions.InvalidOptionsException {
+    String [] args = {
+        "--fields-terminated-by",
+        "|",
+        "--input-fields-terminated-by",
+        "*"
+    };
+
+    SqoopOptions opts = new SqoopOptions();
+    opts.parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // test that the order in which delims are specified doesn't matter
+  public void testDelimOverride2() throws SqoopOptions.InvalidOptionsException {
+    String [] args = {
+        "--input-fields-terminated-by",
+        "*",
+        "--fields-terminated-by",
+        "|"
+    };
+
+    SqoopOptions opts = new SqoopOptions();
+    opts.parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  public void testBadNumMappers1() {
+    String [] args = {
+      "--num-mappers",
+      "x"
+    };
+
+    try {
+      SqoopOptions opts = new SqoopOptions();
+      opts.parse(args);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testBadNumMappers2() {
+    String [] args = {
+      "-m",
+      "x"
+    };
+
+    try {
+      SqoopOptions opts = new SqoopOptions();
+      opts.parse(args);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testGoodNumMappers() throws SqoopOptions.InvalidOptionsException {
+    String [] args = {
+      "-m",
+      "4"
+    };
+
+    SqoopOptions opts = new SqoopOptions();
+    opts.parse(args);
+    assertEquals(4, opts.getNumMappers());
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java Mon Dec  7 21:42:38 2009
@@ -26,7 +26,7 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -103,7 +103,7 @@
     String [] argv = getArgv(true, columns, whereClause);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
       opts.parse(getArgv(false, columns, whereClause));
 
       CompilationManager compileMgr = new CompilationManager(opts);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java Mon Dec  7 21:42:38 2009
@@ -27,7 +27,7 @@
 
 import org.apache.hadoop.fs.Path;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
@@ -71,11 +71,11 @@
     return args.toArray(new String[0]);
   }
 
-  private ImportOptions getImportOptions(String [] extraArgs) {
-    ImportOptions opts = new ImportOptions();
+  private SqoopOptions getSqoopOptions(String [] extraArgs) {
+    SqoopOptions opts = new SqoopOptions();
     try {
       opts.parse(getArgv(false, extraArgs));
-    } catch (ImportOptions.InvalidOptionsException ioe) {
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
       fail("Invalid options: " + ioe.toString());
     }
 
@@ -91,7 +91,7 @@
     
     // set up our mock hive shell to compare our generated script
     // against the correct expected one.
-    ImportOptions options = getImportOptions(extraArgs);
+    SqoopOptions options = getSqoopOptions(extraArgs);
     String hiveHome = options.getHiveHome();
     assertNotNull("hive.home was not set", hiveHome);
     Path testDataPath = new Path(new Path(hiveHome), "scripts/" + verificationScript);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java Mon Dec  7 21:42:38 2009
@@ -37,7 +37,7 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.util.FileListing;
 
@@ -77,7 +77,7 @@
 
   @Before
   public void setUp() {
-    ImportOptions options = new ImportOptions(CONNECT_STRING, TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
     options.setUsername(getCurrentUser());
     manager = new LocalMySQLManager(options);
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java Mon Dec  7 21:42:38 2009
@@ -39,7 +39,7 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 
@@ -79,7 +79,7 @@
 
   @Before
   public void setUp() {
-    ImportOptions options = new ImportOptions(AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
     options.setUsername(AUTH_TEST_USER);
     options.setPassword(AUTH_TEST_PASS);
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java Mon Dec  7 21:42:38 2009
@@ -39,7 +39,7 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.util.FileListing;
@@ -78,7 +78,7 @@
 
   @Before
   public void setUp() {
-    ImportOptions options = new ImportOptions(CONNECT_STRING, TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
     options.setUsername(ORACLE_USER_NAME);
     options.setPassword(ORACLE_USER_PASS);
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java Mon Dec  7 21:42:38 2009
@@ -35,7 +35,7 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.util.FileListing;
 
@@ -84,7 +84,7 @@
   public void setUp() {
     LOG.debug("Setting up another postgresql test...");
 
-    ImportOptions options = new ImportOptions(CONNECT_STRING, TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
     options.setUsername(DATABASE_USER);
 
     ConnManager manager = null;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java Mon Dec  7 21:42:38 2009
@@ -34,8 +34,8 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import org.apache.hadoop.sqoop.ImportOptions;
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.testutil.DirUtil;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -53,7 +53,7 @@
   // instance variables populated during setUp, used during tests
   private HsqldbTestServer testServer;
   private ConnManager manager;
-  private ImportOptions options;
+  private SqoopOptions options;
 
   @Before
   public void setUp() {
@@ -71,7 +71,7 @@
     }
 
     manager = testServer.getManager();
-    options = testServer.getImportOptions();
+    options = testServer.getSqoopOptions();
 
     // sanity check: make sure we're in a tmp dir before we blow anything away.
     assertTrue("Test generates code in non-tmp dir!",

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java Mon Dec  7 21:42:38 2009
@@ -34,8 +34,8 @@
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions;
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.mapred.RawKeyTextOutputFormat;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
@@ -99,7 +99,7 @@
         encloseRequired);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
 
       String tableClassName = getTableName();
 

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,286 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.testutil;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.log4j.BasicConfigurator;
+import org.junit.After;
+import org.junit.Before;
+
+import org.apache.hadoop.sqoop.manager.ConnManager;
+
+import junit.framework.TestCase;
+
+/**
+ * Class that implements common methods required for tests
+ */
+public class BaseSqoopTestCase extends TestCase {
+
+  public static final Log LOG = LogFactory.getLog(BaseSqoopTestCase.class.getName());
+
+  /** Base directory for all temporary data */
+  public static final String TEMP_BASE_DIR;
+
+  /** Where to import table data to in the local filesystem for testing */
+  public static final String LOCAL_WAREHOUSE_DIR;
+
+  // Initializer for the above
+  static {
+    String tmpDir = System.getProperty("test.build.data", "/tmp/");
+    if (!tmpDir.endsWith(File.separator)) {
+      tmpDir = tmpDir + File.separator;
+    }
+
+    TEMP_BASE_DIR = tmpDir;
+    LOCAL_WAREHOUSE_DIR = TEMP_BASE_DIR + "sqoop/warehouse";
+  }
+
+  // Used if a test manually sets the table name to be used.
+  private String curTableName;
+
+  protected void setCurTableName(String curName) {
+    this.curTableName = curName;
+  }
+
+  /**
+   * Because of how classloading works, we don't actually want to name
+   * all the tables the same thing -- they'll actually just use the same
+   * implementation of the Java class that was classloaded before. So we
+   * use this counter to uniquify table names.
+   */
+  private static int tableNum = 0;
+
+  /** When creating sequentially-identified tables, what prefix should
+   *  be applied to these tables?
+   */
+  protected String getTablePrefix() {
+    return "SQOOP_TABLE_";
+  }
+
+  protected String getTableName() {
+    if (null != curTableName) {
+      return curTableName;
+    } else {
+      return getTablePrefix() + Integer.toString(tableNum);
+    }
+  }
+
+  protected String getWarehouseDir() {
+    return LOCAL_WAREHOUSE_DIR;
+  }
+
+  private String [] colNames;
+  protected String [] getColNames() {
+    return colNames;
+  }
+
+  protected HsqldbTestServer getTestServer() {
+    return testServer;
+  }
+
+  protected ConnManager getManager() {
+    return manager;
+  }
+
+  // instance variables populated during setUp, used during tests
+  private HsqldbTestServer testServer;
+  private ConnManager manager;
+
+  private static boolean isLog4jConfigured = false;
+
+  protected void incrementTableNum() {
+    tableNum++;
+  }
+
+  @Before
+  public void setUp() {
+
+    incrementTableNum();
+
+    if (!isLog4jConfigured) {
+      BasicConfigurator.configure();
+      isLog4jConfigured = true;
+      LOG.info("Configured log4j with console appender.");
+    }
+
+    testServer = new HsqldbTestServer();
+    try {
+      testServer.resetServer();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    } catch (ClassNotFoundException cnfe) {
+      LOG.error("Could not find class for db driver: " + cnfe.toString());
+      fail("Could not find class for db driver: " + cnfe.toString());
+    }
+
+    manager = testServer.getManager();
+  }
+
+  @After
+  public void tearDown() {
+    setCurTableName(null); // clear user-override table name.
+
+    try {
+      if (null != manager) {
+        manager.close();
+      }
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+
+  }
+
+  static final String BASE_COL_NAME = "DATA_COL";
+
+  /**
+   * Create a table with a set of columns and add a row of values.
+   * @param colTypes the types of the columns to make
+   * @param vals the SQL text for each value to insert
+   */
+  protected void createTableWithColTypes(String [] colTypes, String [] vals) {
+    Connection conn = null;
+    try {
+      conn = getTestServer().getConnection();
+      PreparedStatement statement = conn.prepareStatement(
+          "DROP TABLE " + getTableName() + " IF EXISTS",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+
+      String columnDefStr = "";
+      String columnListStr = "";
+      String valueListStr = "";
+
+      String [] myColNames = new String[colTypes.length];
+
+      for (int i = 0; i < colTypes.length; i++) {
+        String colName = BASE_COL_NAME + Integer.toString(i);
+        columnDefStr += colName + " " + colTypes[i];
+        columnListStr += colName;
+        valueListStr += vals[i];
+        myColNames[i] = colName;
+        if (i < colTypes.length - 1) {
+          columnDefStr += ", ";
+          columnListStr += ", ";
+          valueListStr += ", ";
+        }
+      }
+
+      statement = conn.prepareStatement(
+          "CREATE TABLE " + getTableName() + "(" + columnDefStr + ")",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+
+      statement = conn.prepareStatement(
+          "INSERT INTO " + getTableName() + "(" + columnListStr + ")"
+          + " VALUES(" + valueListStr + ")",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+      conn.commit();
+      this.colNames = myColNames;
+    } catch (SQLException sqlException) {
+      fail("Could not create table: " + sqlException.toString());
+    } finally {
+      if (null != conn) {
+        try {
+          conn.close();
+        } catch (SQLException sqlE) {
+          LOG.warn("Got SQLException during close: " + sqlE.toString());
+        }
+      }
+    }
+  }
+
+  /**
+   * Create a table with a single column and put a data element in it.
+   * @param colType the type of the column to create
+   * @param val the value to insert (reformatted as a string)
+   */
+  protected void createTableForColType(String colType, String val) {
+    String [] types = { colType };
+    String [] vals = { val };
+
+    createTableWithColTypes(types, vals);
+  }
+
+  protected Path getTablePath() {
+    Path warehousePath = new Path(getWarehouseDir());
+    Path tablePath = new Path(warehousePath, getTableName());
+    return tablePath;
+  }
+
+  protected Path getDataFilePath() {
+    return new Path(getTablePath(), "part-m-00000");
+  }
+
+  protected void removeTableDir() {
+    File tableDirFile = new File(getTablePath().toString());
+    if (tableDirFile.exists()) {
+      // Remove the director where the table will be imported to,
+      // prior to running the MapReduce job.
+      if (!DirUtil.deleteDir(tableDirFile)) {
+        LOG.warn("Could not delete table directory: " + tableDirFile.getAbsolutePath());
+      }
+    }
+  }
+
+  /**
+   * verify that the single-column single-row result can be read back from the db.
+   */
+  protected void verifyReadback(int colNum, String expectedVal) {
+    ResultSet results = null;
+    try {
+      results = getManager().readTable(getTableName(), getColNames());
+      assertNotNull("Null results from readTable()!", results);
+      assertTrue("Expected at least one row returned", results.next());
+      String resultVal = results.getString(colNum);
+      if (null != expectedVal) {
+        assertNotNull("Expected non-null result value", resultVal);
+      }
+
+      assertEquals("Error reading inserted value back from db", expectedVal, resultVal);
+      assertFalse("Expected at most one row returned", results.next());
+    } catch (SQLException sqlE) {
+      fail("Got SQLException: " + sqlE.toString());
+    } finally {
+      if (null != results) {
+        try {
+          results.close();
+        } catch (SQLException sqlE) {
+          fail("Got SQLException in resultset.close(): " + sqlE.toString());
+        }
+      }
+    }
+  }
+}



Mime
View raw message