hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ste...@apache.org
Subject svn commit: r903227 [6/16] - in /hadoop/mapreduce/branches/MAPREDUCE-233: ./ .eclipse.templates/ conf/ ivy/ src/benchmarks/gridmix/ src/benchmarks/gridmix/javasort/ src/benchmarks/gridmix/maxent/ src/benchmarks/gridmix/monsterQuery/ src/benchmarks/grid...
Date Tue, 26 Jan 2010 14:03:09 GMT
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java Tue Jan 26 14:02:53 2010
@@ -28,8 +28,8 @@
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.util.ImportException;
 
 /**
  * Manages connections to Postgresql databases
@@ -46,11 +46,11 @@
   // set to true after we warn the user that we can use direct fastpath.
   private static boolean warningPrinted = false;
 
-  public PostgresqlManager(final ImportOptions opts) {
+  public PostgresqlManager(final SqoopOptions opts) {
     super(DRIVER_CLASS, opts);
   }
 
-  protected PostgresqlManager(final ImportOptions opts, boolean ignored) {
+  protected PostgresqlManager(final SqoopOptions opts, boolean ignored) {
     // constructor used by subclasses to avoid the --direct warning.
     super(DRIVER_CLASS, opts);
   }
@@ -67,12 +67,12 @@
   @Override
   protected String getColNamesQuery(String tableName) {
     // Use LIMIT to return fast
-    return "SELECT t.* FROM " + tableName + " AS t LIMIT 1";
+    return "SELECT t.* FROM " + escapeTableName(tableName) + " AS t LIMIT 1";
   }
 
   @Override
   public void importTable(ImportJobContext context)
-        throws IOException, ImportError {
+        throws IOException, ImportException {
 
     // The user probably should have requested --direct to invoke pg_dump.
     // Display a warning informing them of this fact.

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java Tue Jan 26 14:02:53 2010
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.sqoop.manager;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.hive.HiveTypes;
 import org.apache.hadoop.sqoop.mapreduce.DataDrivenImportJob;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.mapreduce.ExportJob;
+import org.apache.hadoop.sqoop.util.ExportException;
+import org.apache.hadoop.sqoop.util.ImportException;
 import org.apache.hadoop.sqoop.util.ResultSetPrinter;
 
 import java.io.IOException;
@@ -49,14 +52,14 @@
 
   public static final Log LOG = LogFactory.getLog(SqlManager.class.getName());
 
-  protected ImportOptions options;
+  protected SqoopOptions options;
 
   /**
    * Constructs the SqlManager
    * @param opts
    * @param specificMgr
    */
-  public SqlManager(final ImportOptions opts) {
+  public SqlManager(final SqoopOptions opts) {
     this.options = opts;
   }
 
@@ -65,7 +68,8 @@
    * be tuned per-database, but the main extraction loop is still inheritable.
    */
   protected String getColNamesQuery(String tableName) {
-    return "SELECT t.* FROM " + tableName + " AS t";
+    // adding where clause to prevent loading a big table
+    return "SELECT t.* FROM " + escapeTableName(tableName) + " AS t WHERE 1=0";
   }
 
   @Override
@@ -167,13 +171,13 @@
       if (!first) {
         sb.append(", ");
       }
-      sb.append(col);
+      sb.append(escapeColName(col));
       first = false;
     }
     sb.append(" FROM ");
-    sb.append(tableName);
+    sb.append(escapeTableName(tableName));
     sb.append(" AS ");   // needed for hsqldb; doesn't hurt anyone else.
-    sb.append(tableName);
+    sb.append(escapeTableName(tableName));
 
     return execute(sb.toString());
   }
@@ -261,10 +265,10 @@
    * via DataDrivenImportJob to read the table with DataDrivenDBInputFormat.
    */
   public void importTable(ImportJobContext context)
-      throws IOException, ImportError {
+      throws IOException, ImportException {
     String tableName = context.getTableName();
     String jarFile = context.getJarFile();
-    ImportOptions options = context.getOptions();
+    SqoopOptions options = context.getOptions();
     DataDrivenImportJob importer = new DataDrivenImportJob(options);
     String splitCol = options.getSplitByCol();
     if (null == splitCol) {
@@ -274,7 +278,7 @@
 
     if (null == splitCol) {
       // Can't infer a primary key.
-      throw new ImportError("No primary key could be found for table " + tableName
+      throw new ImportException("No primary key could be found for table " + tableName
           + ". Please specify one with --split-by.");
     }
 
@@ -305,7 +309,7 @@
    * @param sqlType
    * @return the name of a Java type to hold the sql datatype, or null if none.
    */
-  public static String toJavaType(int sqlType) {
+  public String toJavaType(int sqlType) {
     // mappings from http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html
     if (sqlType == Types.INTEGER) {
       return "Integer";
@@ -344,10 +348,19 @@
     } else {
       // TODO(aaron): Support BINARY, VARBINARY, LONGVARBINARY, DISTINCT, CLOB, BLOB, ARRAY,
       // STRUCT, REF, JAVA_OBJECT.
+      // return database specific java data type
       return null;
     }
   }
 
+  /**
+   * Resolve a database-specific type to Hive data type
+   * @param sqlType     sql type
+   * @return            hive type
+   */
+  public String toHiveType(int sqlType) {
+    return HiveTypes.toHiveType(sqlType);
+  }
 
   public void close() throws SQLException {
   }
@@ -424,4 +437,13 @@
 
     return connection;
   }
+
+  /**
+   * Export data stored in HDFS into a table in a database
+   */
+  public void exportTable(ExportJobContext context)
+      throws IOException, ExportException {
+    ExportJob exportJob = new ExportJob(context);
+    exportJob.runExport();
+  }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java Tue Jan 26 14:02:53 2010
@@ -42,7 +42,7 @@
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 import org.apache.hadoop.sqoop.ConnFactory;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.orm.TableClassName;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
@@ -55,9 +55,9 @@
 
   public static final Log LOG = LogFactory.getLog(ImportJob.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
 
-  public ImportJob(final ImportOptions opts) {
+  public ImportJob(final SqoopOptions opts) {
     this.options = opts;
   }
 
@@ -100,7 +100,7 @@
         outputPath = new Path(tableName);
       }
 
-      if (options.getFileLayout() == ImportOptions.FileLayout.TextFile) {
+      if (options.getFileLayout() == SqoopOptions.FileLayout.TextFile) {
         job.setOutputFormat(RawKeyTextOutputFormat.class);
         job.setMapperClass(TextImportMapper.class);
         job.setOutputKeyClass(Text.class);
@@ -109,7 +109,7 @@
           FileOutputFormat.setCompressOutput(job, true);
           FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
         }
-      } else if (options.getFileLayout() == ImportOptions.FileLayout.SequenceFile) {
+      } else if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
         job.setOutputFormat(SequenceFileOutputFormat.class);
         if (options.shouldUseCompression()) {
           SequenceFileOutputFormat.setCompressOutput(job, true);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java Tue Jan 26 14:02:53 2010
@@ -39,7 +39,7 @@
 import org.apache.hadoop.mapreduce.lib.db.DBWritable;
 
 import org.apache.hadoop.sqoop.ConnFactory;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.orm.TableClassName;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
@@ -53,9 +53,9 @@
 
   public static final Log LOG = LogFactory.getLog(DataDrivenImportJob.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
 
-  public DataDrivenImportJob(final ImportOptions opts) {
+  public DataDrivenImportJob(final SqoopOptions opts) {
     this.options = opts;
   }
 
@@ -74,7 +74,8 @@
 
     String tableClassName = new TableClassName(options).getClassForTable(tableName);
 
-    boolean isLocal = "local".equals(conf.get("mapred.job.tracker"));
+    boolean isLocal = "local".equals(conf.get("mapreduce.jobtracker.address"))
+        || "local".equals(conf.get("mapred.job.tracker"));
     ClassLoader prevClassLoader = null;
     if (isLocal) {
       // If we're using the LocalJobRunner, then instead of using the compiled jar file
@@ -100,7 +101,7 @@
         outputPath = new Path(tableName);
       }
 
-      if (options.getFileLayout() == ImportOptions.FileLayout.TextFile) {
+      if (options.getFileLayout() == SqoopOptions.FileLayout.TextFile) {
         job.setOutputFormatClass(RawKeyTextOutputFormat.class);
         job.setMapperClass(TextImportMapper.class);
         job.setOutputKeyClass(Text.class);
@@ -109,7 +110,7 @@
           FileOutputFormat.setCompressOutput(job, true);
           FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
         }
-      } else if (options.getFileLayout() == ImportOptions.FileLayout.SequenceFile) {
+      } else if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
         job.setOutputFormatClass(SequenceFileOutputFormat.class);
         job.setMapperClass(AutoProgressMapper.class);
         if (options.shouldUseCompression()) {
@@ -123,7 +124,7 @@
 
       int numMapTasks = options.getNumMappers();
       if (numMapTasks < 1) {
-        numMapTasks = ImportOptions.DEFAULT_NUM_MAPPERS;
+        numMapTasks = SqoopOptions.DEFAULT_NUM_MAPPERS;
         LOG.warn("Invalid mapper count; using " + numMapTasks + " mappers.");
       }
       job.getConfiguration().setInt("mapred.map.tasks", numMapTasks);
@@ -148,14 +149,23 @@
         colNames = mgr.getColumnNames(tableName);
       }
 
+      String [] sqlColNames = null;
+      if (null != colNames) {
+        sqlColNames = new String[colNames.length];
+        for (int i = 0; i < colNames.length; i++) {
+          sqlColNames[i] = mgr.escapeColName(colNames[i]);
+        }
+      }
+
       // It's ok if the where clause is null in DBInputFormat.setInput.
       String whereClause = options.getWhereClause();
 
       // We can't set the class properly in here, because we may not have the
       // jar loaded in this JVM. So we start by calling setInput() with DBWritable,
       // and then overriding the string manually.
-      DataDrivenDBInputFormat.setInput(job, DBWritable.class, tableName, whereClause,
-          splitByCol, colNames);
+      DataDrivenDBInputFormat.setInput(job, DBWritable.class,
+          mgr.escapeTableName(tableName), whereClause,
+          mgr.escapeColName(splitByCol), sqlColNames);
       job.getConfiguration().set(DBConfiguration.INPUT_CLASS_PROPERTY, tableClassName);
 
       PerfCounters counters = new PerfCounters();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.sqoop.orm;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.manager.SqlManager;
 import org.apache.hadoop.sqoop.lib.BigDecimalSerializer;
@@ -33,6 +33,7 @@
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.util.HashSet;
 import java.util.Map;
 
 import org.apache.commons.logging.Log;
@@ -40,14 +41,57 @@
 
 /**
  * Creates an ORM class to represent a table from a database
- *
- * 
- *
  */
 public class ClassWriter {
 
   public static final Log LOG = LogFactory.getLog(ClassWriter.class.getName());
 
+  // The following are keywords and cannot be used for class, method, or field
+  // names.
+  public static final HashSet<String> JAVA_RESERVED_WORDS;
+
+  static {
+    JAVA_RESERVED_WORDS = new HashSet<String>();
+
+    JAVA_RESERVED_WORDS.add("abstract");
+    JAVA_RESERVED_WORDS.add("else");
+    JAVA_RESERVED_WORDS.add("int");
+    JAVA_RESERVED_WORDS.add("strictfp");
+    JAVA_RESERVED_WORDS.add("assert");
+    JAVA_RESERVED_WORDS.add("enum");
+    JAVA_RESERVED_WORDS.add("interface");
+    JAVA_RESERVED_WORDS.add("super");
+    JAVA_RESERVED_WORDS.add("boolean");
+    JAVA_RESERVED_WORDS.add("extends");
+    JAVA_RESERVED_WORDS.add("long");
+    JAVA_RESERVED_WORDS.add("switch");
+    JAVA_RESERVED_WORDS.add("break");
+    JAVA_RESERVED_WORDS.add("false");
+    JAVA_RESERVED_WORDS.add("native");
+    JAVA_RESERVED_WORDS.add("synchronized");
+    JAVA_RESERVED_WORDS.add("byte");
+    JAVA_RESERVED_WORDS.add("final");
+    JAVA_RESERVED_WORDS.add("new");
+    JAVA_RESERVED_WORDS.add("this");
+    JAVA_RESERVED_WORDS.add("case");
+    JAVA_RESERVED_WORDS.add("finally");
+    JAVA_RESERVED_WORDS.add("null");
+    JAVA_RESERVED_WORDS.add("throw");
+    JAVA_RESERVED_WORDS.add("catch");
+    JAVA_RESERVED_WORDS.add("float");
+    JAVA_RESERVED_WORDS.add("package");
+    JAVA_RESERVED_WORDS.add("throws");
+    JAVA_RESERVED_WORDS.add("char");
+    JAVA_RESERVED_WORDS.add("for");
+    JAVA_RESERVED_WORDS.add("private");
+    JAVA_RESERVED_WORDS.add("transient");
+    JAVA_RESERVED_WORDS.add("class");
+    JAVA_RESERVED_WORDS.add("goto");
+    JAVA_RESERVED_WORDS.add("protected");
+    JAVA_RESERVED_WORDS.add("true");
+    JAVA_RESERVED_WORDS.add("const");
+  }
+
   /**
    * This version number is injected into all generated Java classes to denote
    * which version of the ClassWriter's output format was used to generate the
@@ -57,7 +101,7 @@
    */
   public static final int CLASS_WRITER_VERSION = 2;
 
-  private ImportOptions options;
+  private SqoopOptions options;
   private ConnManager connManager;
   private String tableName;
   private CompilationManager compileManager;
@@ -68,7 +112,7 @@
    * @param connMgr the connection manager used to describe the table.
    * @param table the name of the table to read.
    */
-  public ClassWriter(final ImportOptions opts, final ConnManager connMgr,
+  public ClassWriter(final SqoopOptions opts, final ConnManager connMgr,
       final String table, final CompilationManager compMgr) {
     this.options = opts;
     this.connManager = connMgr;
@@ -76,6 +120,87 @@
     this.compileManager = compMgr;
   }
 
+  /**
+   * Given some character that can't be in an identifier,
+   * try to map it to a string that can.
+   *
+   * @param c a character that can't be in a Java identifier
+   * @return a string of characters that can, or null if there's
+   * no good translation.
+   */
+  static String getIdentifierStrForChar(char c) {
+    if (Character.isJavaIdentifierPart(c)) {
+      return "" + c;
+    } else if (Character.isWhitespace(c)) {
+      // Eliminate whitespace.
+      return null;
+    } else {
+      // All other characters map to underscore.
+      return "_";
+    }
+  }
+
+  /**
+   * @param word a word to test.
+   * @return true if 'word' is reserved the in Java language.
+   */
+  private static boolean isReservedWord(String word) {
+    return JAVA_RESERVED_WORDS.contains(word);
+  }
+
+  /**
+   * Coerce a candidate name for an identifier into one which will
+   * definitely compile.
+   *
+   * Ensures that the returned identifier matches [A-Za-z_][A-Za-z0-9_]*
+   * and is not a reserved word.
+   *
+   * @param candidate A string we want to use as an identifier
+   * @return A string naming an identifier which compiles and is
+   *   similar to the candidate.
+   */
+  public static String toIdentifier(String candidate) {
+    StringBuilder sb = new StringBuilder();
+    boolean first = true;
+    for (char c : candidate.toCharArray()) {
+      if (Character.isJavaIdentifierStart(c) && first) {
+        // Ok for this to be the first character of the identifier.
+        sb.append(c);
+        first = false;
+      } else if (Character.isJavaIdentifierPart(c) && !first) {
+        // Ok for this character to be in the output identifier.
+        sb.append(c);
+      } else {
+        // We have a character in the original that can't be
+        // part of this identifier we're building.
+        // If it's just not allowed to be the first char, add a leading '_'.
+        // If we have a reasonable translation (e.g., '-' -> '_'), do that.
+        // Otherwise, drop it.
+        if (first && Character.isJavaIdentifierPart(c)
+            && !Character.isJavaIdentifierStart(c)) {
+          sb.append("_");
+          sb.append(c);
+          first = false;
+        } else {
+          // Try to map this to a different character or string.
+          // If we can't just give up.
+          String translated = getIdentifierStrForChar(c);
+          if (null != translated) {
+            sb.append(translated);
+            first = false;
+          }
+        }
+      }
+    }
+
+    String output = sb.toString();
+    if (isReservedWord(output)) {
+      // e.g., 'class' -> '_class';
+      return "_" + output;
+    }
+
+    return output;
+  }
 
   /**
    * @param javaType
@@ -251,7 +376,7 @@
 
     for (String col : colNames) {
       int sqlType = columnTypes.get(col);
-      String javaType = SqlManager.toJavaType(sqlType);
+      String javaType = connManager.toJavaType(sqlType);
       if (null == javaType) {
         LOG.error("Cannot resolve SQL type " + sqlType);
         continue;
@@ -281,7 +406,7 @@
       fieldNum++;
 
       int sqlType = columnTypes.get(col);
-      String javaType = SqlManager.toJavaType(sqlType);
+      String javaType = connManager.toJavaType(sqlType);
       if (null == javaType) {
         LOG.error("No Java type for SQL type " + sqlType);
         continue;
@@ -318,7 +443,7 @@
       fieldNum++;
 
       int sqlType = columnTypes.get(col);
-      String javaType = SqlManager.toJavaType(sqlType);
+      String javaType = connManager.toJavaType(sqlType);
       if (null == javaType) {
         LOG.error("No Java type for SQL type " + sqlType);
         continue;
@@ -351,7 +476,7 @@
 
     for (String col : colNames) {
       int sqlType = columnTypes.get(col);
-      String javaType = SqlManager.toJavaType(sqlType);
+      String javaType = connManager.toJavaType(sqlType);
       if (null == javaType) {
         LOG.error("No Java type for SQL type " + sqlType);
         continue;
@@ -407,7 +532,7 @@
     boolean first = true;
     for (String col : colNames) {
       int sqlType = columnTypes.get(col);
-      String javaType = SqlManager.toJavaType(sqlType);
+      String javaType = connManager.toJavaType(sqlType);
       if (null == javaType) {
         LOG.error("No Java type for SQL type " + sqlType);
         continue;
@@ -472,7 +597,7 @@
   private void parseColumn(String colName, int colType, StringBuilder sb) {
     // assume that we have __it and __cur_str vars, based on __loadFromFields() code.
     sb.append("    __cur_str = __it.next();\n");
-    String javaType = SqlManager.toJavaType(colType);
+    String javaType = connManager.toJavaType(colType);
 
     parseNullVal(colName, sb);
     if (javaType.equals("String")) {
@@ -565,7 +690,7 @@
 
     for (String col : colNames) {
       int sqlType = columnTypes.get(col);
-      String javaType = SqlManager.toJavaType(sqlType);
+      String javaType = connManager.toJavaType(sqlType);
       if (null == javaType) {
         LOG.error("No Java type for SQL type " + sqlType);
         continue;
@@ -593,8 +718,21 @@
       colNames = connManager.getColumnNames(tableName);
     }
 
+    // Translate all the column names into names that are safe to
+    // use as identifiers.
+    String [] cleanedColNames = new String[colNames.length];
+    for (int i = 0; i < colNames.length; i++) {
+      String col = colNames[i];
+      String identifier = toIdentifier(col);
+      cleanedColNames[i] = identifier;
+
+      // make sure the col->type mapping holds for the 
+      // new identifier name, too.
+      columnTypes.put(identifier, columnTypes.get(col));
+    }
+
     // Generate the Java code
-    StringBuilder sb = generateClassForColumns(columnTypes, colNames);
+    StringBuilder sb = generateClassForColumns(columnTypes, cleanedColNames);
 
     // Write this out to a file.
     String codeOutDir = options.getCodeOutputDir();
@@ -605,16 +743,18 @@
     String sourceFilename = className.replace('.', File.separatorChar) + ".java";
     String filename = codeOutDir + sourceFilename;
 
-    LOG.debug("Writing source file: " + filename);
-    LOG.debug("Table name: " + tableName);
-    StringBuilder sbColTypes = new StringBuilder();
-    for (String col : colNames) {
-      Integer colType = columnTypes.get(col);
-      sbColTypes.append(col + ":" + colType + ", ");
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Writing source file: " + filename);
+      LOG.debug("Table name: " + tableName);
+      StringBuilder sbColTypes = new StringBuilder();
+      for (String col : colNames) {
+        Integer colType = columnTypes.get(col);
+        sbColTypes.append(col + ":" + colType + ", ");
+      }
+      String colTypeStr = sbColTypes.toString();
+      LOG.debug("Columns: " + colTypeStr);
+      LOG.debug("sourceFilename is " + sourceFilename);
     }
-    String colTypeStr = sbColTypes.toString();
-    LOG.debug("Columns: " + colTypeStr);
-    LOG.debug("sourceFilename is " + sourceFilename);
 
     compileManager.addSourceFile(sourceFilename);
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java Tue Jan 26 14:02:53 2010
@@ -31,12 +31,17 @@
 import java.util.jar.JarOutputStream;
 import java.util.zip.ZipEntry;
 
+import javax.tools.JavaCompiler;
+import javax.tools.JavaFileObject;
+import javax.tools.StandardJavaFileManager;
+import javax.tools.ToolProvider;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.JobConf;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.util.FileListing;
 
 /**
@@ -53,10 +58,10 @@
 
   public static final Log LOG = LogFactory.getLog(CompilationManager.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
   private List<String> sources;
 
-  public CompilationManager(final ImportOptions opts) {
+  public CompilationManager(final SqoopOptions opts) {
     options = opts;
     sources = new ArrayList<String>();
   }
@@ -109,9 +114,14 @@
 
     // ensure that the jar output dir exists.
     String jarOutDir = options.getJarOutputDir();
-    boolean mkdirSuccess = new File(jarOutDir).mkdirs();
-    if (!mkdirSuccess) {
-      LOG.debug("Warning: Could not make directories for " + jarOutDir);
+    File jarOutDirObj = new File(jarOutDir);
+    if (!jarOutDirObj.exists()) {
+      boolean mkdirSuccess = jarOutDirObj.mkdirs();
+      if (!mkdirSuccess) {
+        LOG.debug("Warning: Could not make directories for " + jarOutDir);
+      }
+    } else if (LOG.isDebugEnabled()) {
+      LOG.debug("Found existing " + jarOutDir);
     }
 
     // find hadoop-*-core.jar for classpath.
@@ -141,8 +151,12 @@
 
     String curClasspath = System.getProperty("java.class.path");
 
+    String srcOutDir = new File(options.getCodeOutputDir()).getAbsolutePath();
+    if (!srcOutDir.endsWith(File.separator)) {
+      srcOutDir = srcOutDir + File.separator;
+    }
+
     args.add("-sourcepath");
-    String srcOutDir = options.getCodeOutputDir();
     args.add(srcOutDir);
 
     args.add("-d");
@@ -151,21 +165,36 @@
     args.add("-classpath");
     args.add(curClasspath + File.pathSeparator + coreJar + sqoopJar);
 
-    // add all the source files
+    JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
+    StandardJavaFileManager fileManager =
+        compiler.getStandardFileManager(null, null, null);
+
+    ArrayList<String> srcFileNames = new ArrayList<String>();
     for (String srcfile : sources) {
-      args.add(srcOutDir + srcfile);
+      srcFileNames.add(srcOutDir + srcfile);
+      LOG.debug("Adding source file: " + srcOutDir + srcfile);
     }
 
-    StringBuilder sb = new StringBuilder();
-    for (String arg : args) {
-      sb.append(arg + " ");
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Invoking javac with args:");
+      for (String arg : args) {
+        LOG.debug("  " + arg);
+      }
     }
 
-    // NOTE(aaron): Usage is at http://java.sun.com/j2se/1.5.0/docs/tooldocs/solaris/javac.html
-    LOG.debug("Invoking javac with args: " + sb.toString());
-    int javacRet = com.sun.tools.javac.Main.compile(args.toArray(new String[0]));
-    if (javacRet != 0) {
-      throw new IOException("javac exited with status " + javacRet);
+    Iterable<? extends JavaFileObject> srcFileObjs =
+        fileManager.getJavaFileObjectsFromStrings(srcFileNames);
+    JavaCompiler.CompilationTask task = compiler.getTask(
+        null, // Write to stderr
+        fileManager,
+        null, // No special diagnostic handling
+        args,
+        null, // Compile all classes in the source compilation units
+        srcFileObjs);
+
+    boolean result = task.call();
+    if (!result) {
+      throw new IOException("Error returned by javac");
     }
   }
 
@@ -209,8 +238,6 @@
     // read the file into a buffer, and write it to the jar file.
     for (File entry : dirEntries) {
       if (!entry.isDirectory()) {
-        LOG.debug("Considering entry: " + entry);
-
         // chomp off the portion of the full path that is shared
         // with the base directory where class files were put;
         // we only record the subdir parts in the zip entry.

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java Tue Jan 26 14:02:53 2010
@@ -18,23 +18,23 @@
 
 package org.apache.hadoop.sqoop.orm;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 /**
  * Reconciles the table name being imported with the class naming information
- * specified in ImportOptions to determine the actual package and class name
+ * specified in SqoopOptions to determine the actual package and class name
  * to use for a table.
  */
 public class TableClassName {
 
   public static final Log LOG = LogFactory.getLog(TableClassName.class.getName());
 
-  private final ImportOptions options;
+  private final SqoopOptions options;
 
-  public TableClassName(final ImportOptions opts) {
+  public TableClassName(final SqoopOptions opts) {
     if (null == opts) {
       throw new NullPointerException("Cannot instantiate a TableClassName on null options.");
     } else {
@@ -90,7 +90,8 @@
     }
 
     // no specific class; no specific package.
-    return tableName;
+    // Just make sure it's a legal identifier.
+    return ClassWriter.toIdentifier(tableName);
   }
 
   /**

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java Tue Jan 26 14:02:53 2010
@@ -29,7 +29,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.io.SplittingOutputStream;
 import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
 import org.apache.hadoop.util.Shell;
@@ -70,7 +70,7 @@
    * returned stream.
    */
   public static SplittableBufferedWriter createHdfsSink(Configuration conf,
-      ImportOptions options, String tableName) throws IOException {
+      SqoopOptions options, String tableName) throws IOException {
 
     FileSystem fs = FileSystem.get(conf);
     String warehouseDir = options.getWarehouseDir();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java Tue Jan 26 14:02:53 2010
@@ -19,6 +19,7 @@
 package org.apache.hadoop.sqoop;
 
 import org.apache.hadoop.sqoop.hive.TestHiveImport;
+import org.apache.hadoop.sqoop.hive.TestTableDefWriter;
 import org.apache.hadoop.sqoop.io.TestSplittableBufferedWriter;
 import org.apache.hadoop.sqoop.lib.TestFieldFormatter;
 import org.apache.hadoop.sqoop.lib.TestRecordParser;
@@ -47,6 +48,7 @@
     suite.addTestSuite(TestSqlManager.class);
     suite.addTestSuite(TestClassWriter.class);
     suite.addTestSuite(TestColumnTypes.class);
+    suite.addTestSuite(TestExport.class);
     suite.addTestSuite(TestMultiCols.class);
     suite.addTestSuite(TestMultiMaps.class);
     suite.addTestSuite(TestSplitBy.class);
@@ -54,10 +56,11 @@
     suite.addTestSuite(TestHiveImport.class);
     suite.addTestSuite(TestRecordParser.class);
     suite.addTestSuite(TestFieldFormatter.class);
-    suite.addTestSuite(TestImportOptions.class);
+    suite.addTestSuite(TestSqoopOptions.class);
     suite.addTestSuite(TestParseMethods.class);
     suite.addTestSuite(TestConnFactory.class);
     suite.addTestSuite(TestSplittableBufferedWriter.class);
+    suite.addTestSuite(TestTableDefWriter.class);
     suite.addTest(MapreduceTests.suite());
 
     return suite;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java Tue Jan 26 14:02:53 2010
@@ -58,6 +58,8 @@
     args.add(HsqldbTestServer.getUrl());
     args.add("--num-mappers");
     args.add("1");
+    args.add("--escaped-by");
+    args.add("\\");
 
     return args.toArray(new String[0]);
   }
@@ -86,9 +88,18 @@
     // create two tables.
     this.expectedStrings.add("A winner");
     this.expectedStrings.add("is you!");
+    this.expectedStrings.add(null);
 
+    int i = 0;
     for (String expectedStr: this.expectedStrings) {
-      this.createTableForColType("VARCHAR(32) PRIMARY KEY", "'" + expectedStr + "'");
+      String wrappedStr = null;
+      if (expectedStr != null) {
+        wrappedStr = "'" + expectedStr + "'";
+      }
+
+      String [] types = { "INT NOT NULL PRIMARY KEY", "VARCHAR(32)" };
+      String [] vals = { Integer.toString(i++) , wrappedStr };
+      this.createTableWithColTypes(types, vals);
       this.tableNames.add(this.getTableName());
       this.removeTableDir();
       incrementTableNum();
@@ -100,13 +111,15 @@
     runImport(argv);
 
     Path warehousePath = new Path(this.getWarehouseDir());
+    int i = 0;
     for (String tableName : this.tableNames) {
       Path tablePath = new Path(warehousePath, tableName);
       Path filePath = new Path(tablePath, "part-m-00000");
 
       // dequeue the expected value for this table. This
       // list has the same order as the tableNames list.
-      String expectedVal = this.expectedStrings.get(0);
+      String expectedVal = Integer.toString(i++) + ","
+          + this.expectedStrings.get(0);
       this.expectedStrings.remove(0);
 
       BufferedReader reader = new BufferedReader(

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java Tue Jan 26 14:02:53 2010
@@ -41,7 +41,7 @@
     conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, AlwaysDummyFactory.class.getName());
 
     ConnFactory factory = new ConnFactory(conf);
-    ConnManager manager = factory.getManager(new ImportOptions());
+    ConnManager manager = factory.getManager(new SqoopOptions());
     assertNotNull("No manager returned", manager);
     assertTrue("Expected a DummyManager", manager instanceof DummyManager);
   }
@@ -52,7 +52,7 @@
 
     ConnFactory factory = new ConnFactory(conf);
     try {
-      ConnManager manager = factory.getManager(new ImportOptions());
+      ConnManager manager = factory.getManager(new SqoopOptions());
       fail("factory.getManager() expected to throw IOException");
     } catch (IOException ioe) {
       // Expected this. Test passes.
@@ -69,7 +69,7 @@
     conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, classNames);
 
     ConnFactory factory = new ConnFactory(conf);
-    ConnManager manager = factory.getManager(new ImportOptions());
+    ConnManager manager = factory.getManager(new SqoopOptions());
     assertNotNull("No manager returned", manager);
     assertTrue("Expected a DummyManager", manager instanceof DummyManager);
   }
@@ -77,14 +77,14 @@
   ////// mock classes used for test cases above //////
 
   public static class AlwaysDummyFactory extends ManagerFactory {
-    public ConnManager accept(ImportOptions opts) {
+    public ConnManager accept(SqoopOptions opts) {
       // Always return a new DummyManager
       return new DummyManager();
     }
   }
 
   public static class EmptyFactory extends ManagerFactory {
-    public ConnManager accept(ImportOptions opts) {
+    public ConnManager accept(SqoopOptions opts) {
       // Never instantiate a proper ConnManager;
       return null;
     }
@@ -113,6 +113,24 @@
       return null;
     }
 
+    /**
+    * Default implementation
+    * @param sqlType     sql data type
+    * @return            java data type
+    */
+    public String toJavaType(int sqlType) {
+      return null;
+    }
+
+    /**
+    * Default implementation
+    * @param sqlType     sql data type
+    * @return            hive data type
+    */
+    public String toHiveType(int sqlType) {
+      return null;
+    }
+
     public Map<String, Integer> getColumnTypes(String tableName) {
       return null;
     }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java Tue Jan 26 14:02:53 2010
@@ -31,7 +31,7 @@
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -119,7 +119,7 @@
     String [] argv = getArgv(true, columns, splitByCol);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
       opts.parse(getArgv(false, columns, splitByCol));
 
       CompilationManager compileMgr = new CompilationManager(opts);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java Tue Jan 26 14:02:53 2010
@@ -26,7 +26,7 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -98,7 +98,7 @@
     String [] argv = getArgv(true, columns, splitByCol);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
       opts.parse(getArgv(false, columns, splitByCol));
 
       CompilationManager compileMgr = new CompilationManager(opts);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java Tue Jan 26 14:02:53 2010
@@ -26,7 +26,7 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -103,7 +103,7 @@
     String [] argv = getArgv(true, columns, whereClause);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
       opts.parse(getArgv(false, columns, whereClause));
 
       CompilationManager compileMgr = new CompilationManager(opts);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java Tue Jan 26 14:02:53 2010
@@ -27,7 +27,7 @@
 
 import org.apache.hadoop.fs.Path;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
@@ -71,11 +71,11 @@
     return args.toArray(new String[0]);
   }
 
-  private ImportOptions getImportOptions(String [] extraArgs) {
-    ImportOptions opts = new ImportOptions();
+  private SqoopOptions getSqoopOptions(String [] extraArgs) {
+    SqoopOptions opts = new SqoopOptions();
     try {
       opts.parse(getArgv(false, extraArgs));
-    } catch (ImportOptions.InvalidOptionsException ioe) {
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
       fail("Invalid options: " + ioe.toString());
     }
 
@@ -91,7 +91,7 @@
     
     // set up our mock hive shell to compare our generated script
     // against the correct expected one.
-    ImportOptions options = getImportOptions(extraArgs);
+    SqoopOptions options = getSqoopOptions(extraArgs);
     String hiveHome = options.getHiveHome();
     assertNotNull("hive.home was not set", hiveHome);
     Path testDataPath = new Path(new Path(hiveHome), "scripts/" + verificationScript);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/lib/TestFieldFormatter.java Tue Jan 26 14:02:53 2010
@@ -37,6 +37,10 @@
   public void testNullArgs() {
     String result = FieldFormatter.escapeAndEnclose("", null, null, null, false);
     assertEquals("", result);
+
+    char [] encloseFor = { '\"' };
+    assertNull(FieldFormatter.escapeAndEnclose(null, "\\", "\"", encloseFor,
+        false));
   }
 
   public void testBasicStr() {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java Tue Jan 26 14:02:53 2010
@@ -37,7 +37,8 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.util.FileListing;
 
@@ -77,7 +78,7 @@
 
   @Before
   public void setUp() {
-    ImportOptions options = new ImportOptions(CONNECT_STRING, TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
     options.setUsername(getCurrentUser());
     manager = new LocalMySQLManager(options);
 
@@ -181,19 +182,21 @@
     }
   }
 
-  private String [] getArgv(boolean mysqlOutputDelims, String... extraArgs) {
+  private String [] getArgv(boolean mysqlOutputDelims, boolean isDirect,
+      String tableName, String... extraArgs) {
     ArrayList<String> args = new ArrayList<String>();
 
-    args.add("-D");
-    args.add("fs.default.name=file:///");
+    CommonArgs.addHadoopFlags(args);
 
     args.add("--table");
-    args.add(TABLE_NAME);
+    args.add(tableName);
     args.add("--warehouse-dir");
     args.add(getWarehouseDir());
     args.add("--connect");
     args.add(CONNECT_STRING);
-    args.add("--direct");
+    if (isDirect) {
+      args.add("--direct");
+    }
     args.add("--username");
     args.add(getCurrentUser());
     args.add("--where");
@@ -214,12 +217,19 @@
     return args.toArray(new String[0]);
   }
 
-  private void doLocalBulkImport(boolean mysqlOutputDelims,
-      String [] expectedResults, String [] extraArgs) throws IOException {
+  private void doImport(boolean mysqlOutputDelims, boolean isDirect,
+      String tableName, String [] expectedResults, String [] extraArgs)
+      throws IOException {
 
     Path warehousePath = new Path(this.getWarehouseDir());
-    Path tablePath = new Path(warehousePath, TABLE_NAME);
-    Path filePath = new Path(tablePath, "data-00000");
+    Path tablePath = new Path(warehousePath, tableName);
+
+    Path filePath;
+    if (isDirect) {
+      filePath = new Path(tablePath, "data-00000");
+    } else {
+      filePath = new Path(tablePath, "part-m-00000");
+    }
 
     File tableFile = new File(tablePath.toString());
     if (tableFile.exists() && tableFile.isDirectory()) {
@@ -227,7 +237,7 @@
       FileListing.recursiveDeleteDir(tableFile);
     }
 
-    String [] argv = getArgv(mysqlOutputDelims, extraArgs);
+    String [] argv = getArgv(mysqlOutputDelims, isDirect, tableName, extraArgs);
     try {
       runImport(argv);
     } catch (IOException ioe) {
@@ -237,7 +247,7 @@
     }
 
     File f = new File(filePath.toString());
-    assertTrue("Could not find imported data file", f.exists());
+    assertTrue("Could not find imported data file: " + f, f.exists());
     BufferedReader r = null;
     try {
       // Read through the file and make sure it's all there.
@@ -262,7 +272,7 @@
         "3,Fred,2009-01-23,15,marketing"
     };
 
-    doLocalBulkImport(false, expectedResults, null);
+    doImport(false, true, TABLE_NAME, expectedResults, null);
   }
 
   @Test
@@ -275,7 +285,7 @@
 
     String [] extraArgs = { "-", "--lock-tables" };
 
-    doLocalBulkImport(false, expectedResults, extraArgs);
+    doImport(false, true, TABLE_NAME, expectedResults, extraArgs);
   }
 
   @Test
@@ -286,6 +296,110 @@
         "3,'Fred','2009-01-23',15,'marketing'"
     };
 
-    doLocalBulkImport(true, expectedResults, null);
+    doImport(true, true, TABLE_NAME, expectedResults, null);
+  }
+
+  @Test
+  public void testMysqlJdbcImport() throws IOException {
+    String [] expectedResults = {
+        "2,Bob,2009-04-20,400.0,sales",
+        "3,Fred,2009-01-23,15.0,marketing"
+    };
+
+    doImport(false, false, TABLE_NAME, expectedResults, null);
+  }
+
+  @Test
+  public void testJdbcEscapedTableName() throws Exception {
+    // Test a JDBC-based import of a table whose name is
+    // a reserved sql keyword (and is thus `quoted`)
+    final String reservedTableName = "TABLE";
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING,
+        reservedTableName);
+    options.setUsername(getCurrentUser());
+    ConnManager mgr = new MySQLManager(options);
+
+    Connection connection = null;
+    Statement st = null;
+
+    try {
+      connection = mgr.getConnection();
+      connection.setAutoCommit(false);
+      st = connection.createStatement();
+
+      // create the database table and populate it with data.
+      st.executeUpdate("DROP TABLE IF EXISTS `" + reservedTableName + "`");
+      st.executeUpdate("CREATE TABLE `" + reservedTableName + "` ("
+          + "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
+          + "name VARCHAR(24) NOT NULL, "
+          + "start_date DATE, "
+          + "salary FLOAT, "
+          + "dept VARCHAR(32))");
+
+      st.executeUpdate("INSERT INTO `" + reservedTableName + "` VALUES("
+          + "2,'Aaron','2009-05-14',1000000.00,'engineering')");
+      connection.commit();
+    } finally {
+      if (null != st) {
+        st.close();
+      }
+
+      if (null != connection) {
+        connection.close();
+      }
+    }
+
+    String [] expectedResults = {
+        "2,Aaron,2009-05-14,1000000.0,engineering"
+    };
+
+    doImport(false, false, reservedTableName, expectedResults, null);
+  }
+
+  @Test
+  public void testJdbcEscapedColumnName() throws Exception {
+    // Test a JDBC-based import of a table with a column whose name is
+    // a reserved sql keyword (and is thus `quoted`)
+    final String tableName = "mysql_escaped_col_table";
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING,
+        tableName);
+    options.setUsername(getCurrentUser());
+    ConnManager mgr = new MySQLManager(options);
+
+    Connection connection = null;
+    Statement st = null;
+
+    try {
+      connection = mgr.getConnection();
+      connection.setAutoCommit(false);
+      st = connection.createStatement();
+
+      // create the database table and populate it with data.
+      st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
+      st.executeUpdate("CREATE TABLE " + tableName + " ("
+          + "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
+          + "`table` VARCHAR(24) NOT NULL, "
+          + "`CREATE` DATE, "
+          + "salary FLOAT, "
+          + "dept VARCHAR(32))");
+
+      st.executeUpdate("INSERT INTO " + tableName + " VALUES("
+          + "2,'Aaron','2009-05-14',1000000.00,'engineering')");
+      connection.commit();
+    } finally {
+      if (null != st) {
+        st.close();
+      }
+
+      if (null != connection) {
+        connection.close();
+      }
+    }
+
+    String [] expectedResults = {
+        "2,Aaron,2009-05-14,1000000.0,engineering"
+    };
+
+    doImport(false, false, tableName, expectedResults, null);
   }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java Tue Jan 26 14:02:53 2010
@@ -39,7 +39,7 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 
@@ -79,7 +79,7 @@
 
   @Before
   public void setUp() {
-    ImportOptions options = new ImportOptions(AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
     options.setUsername(AUTH_TEST_USER);
     options.setPassword(AUTH_TEST_PASS);
 
@@ -131,7 +131,8 @@
     }
   }
 
-  private String [] getArgv(boolean includeHadoopFlags) {
+  private String [] getArgv(boolean includeHadoopFlags,
+      boolean useDirect, String connectString, String tableName) {
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
@@ -139,12 +140,14 @@
     }
 
     args.add("--table");
-    args.add(AUTH_TABLE_NAME);
+    args.add(tableName);
     args.add("--warehouse-dir");
     args.add(getWarehouseDir());
     args.add("--connect");
-    args.add(AUTH_CONNECT_STRING);
-    args.add("--direct");
+    args.add(connectString);
+    if (useDirect) {
+      args.add("--direct");
+    }
     args.add("--username");
     args.add(AUTH_TEST_USER);
     args.add("--password");
@@ -162,7 +165,7 @@
    */
   @Test
   public void testAuthAccess() {
-    String [] argv = getArgv(true);
+    String [] argv = getArgv(true, true, AUTH_CONNECT_STRING, AUTH_TABLE_NAME);
     try {
       runImport(argv);
     } catch (IOException ioe) {
@@ -190,4 +193,115 @@
       IOUtils.closeStream(r);
     }
   }
+
+  @Test
+  public void testZeroTimestamp() throws IOException, SQLException {
+    // MySQL timestamps can hold values whose range causes problems
+    // for java.sql.Timestamp. The MySQLManager adds settings to the
+    // connect string which configure the driver's handling of
+    // zero-valued timestamps. Check that all of these modifications
+    // to the connect string are successful.
+
+    try {
+      // A connect string with a null 'query' component.
+      doZeroTimestampTest(0, true, AUTH_CONNECT_STRING);
+
+      // A connect string with a zero-length query component.
+      doZeroTimestampTest(1, true, AUTH_CONNECT_STRING + "?");
+
+      // A connect string with another argument
+      doZeroTimestampTest(2, true, AUTH_CONNECT_STRING + "?connectTimeout=0");
+      doZeroTimestampTest(3, true, AUTH_CONNECT_STRING + "?connectTimeout=0&");
+
+      // A connect string with the zero-timestamp behavior already
+      // configured.
+      doZeroTimestampTest(4, true, AUTH_CONNECT_STRING
+          + "?zeroDateTimeBehavior=convertToNull");
+
+      // And finally, behavior already configured in such a way as to
+      // cause the timestamp import to fail.
+      doZeroTimestampTest(5, false, AUTH_CONNECT_STRING
+          + "?zeroDateTimeBehavior=exception");
+    } finally {
+      // Clean up our mess on the way out.
+      dropTimestampTables();
+    }
+  }
+
+  private void dropTimestampTables() throws SQLException {
+    SqoopOptions options = new SqoopOptions(AUTH_CONNECT_STRING, null);
+    options.setUsername(AUTH_TEST_USER);
+    options.setPassword(AUTH_TEST_PASS);
+
+    manager = new LocalMySQLManager(options);
+
+    Connection connection = null;
+    Statement st = null;
+
+    connection = manager.getConnection();
+    connection.setAutoCommit(false);
+    st = connection.createStatement();
+
+    st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable0");
+    st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable1");
+    st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable2");
+    st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable3");
+    st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable4");
+    st.executeUpdate("DROP TABLE IF EXISTS mysqlTimestampTable5");
+    connection.commit();
+    st.close();
+    connection.close();
+  }
+
+  public void doZeroTimestampTest(int testNum, boolean expectSuccess,
+      String connectString) throws IOException, SQLException {
+
+    final String tableName = "mysqlTimestampTable" + Integer.toString(testNum);
+
+    // Create a table containing a full-zeros timestamp.
+    SqoopOptions options = new SqoopOptions(connectString, tableName);
+    options.setUsername(AUTH_TEST_USER);
+    options.setPassword(AUTH_TEST_PASS);
+
+    manager = new LocalMySQLManager(options);
+
+    Connection connection = null;
+    Statement st = null;
+
+    connection = manager.getConnection();
+    connection.setAutoCommit(false);
+    st = connection.createStatement();
+
+    // create the database table and populate it with data. 
+    st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
+    st.executeUpdate("CREATE TABLE " + tableName + " ("
+        + "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, "
+        + "ts TIMESTAMP NOT NULL)");
+
+    st.executeUpdate("INSERT INTO " + tableName + " VALUES("
+        + "NULL,'0000-00-00 00:00:00.0')");
+    connection.commit();
+    st.close();
+    connection.close();
+
+    // Run the import.
+    String [] argv = getArgv(true, false, connectString, tableName);
+    runImport(argv);
+
+    // Make sure the result file is there.
+    Path warehousePath = new Path(this.getWarehouseDir());
+    Path tablePath = new Path(warehousePath, tableName);
+    Path filePath = new Path(tablePath, "part-m-00000");
+
+    File f = new File(filePath.toString());
+    if (expectSuccess) {
+      assertTrue("Could not find imported data file", f.exists());
+      BufferedReader r = new BufferedReader(new InputStreamReader(
+          new FileInputStream(f)));
+      assertEquals("1,null", r.readLine());
+      IOUtils.closeStream(r);
+    } else {
+      assertFalse("Imported data when expected failure", f.exists());
+    }
+  }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java Tue Jan 26 14:02:53 2010
@@ -27,7 +27,13 @@
 import java.sql.Connection;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Date;
+import java.util.Calendar;
+import java.util.TimeZone;
 import java.util.ArrayList;
+import java.text.ParseException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 
 import junit.framework.TestCase;
 
@@ -39,7 +45,7 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.util.FileListing;
@@ -78,7 +84,7 @@
 
   @Before
   public void setUp() {
-    ImportOptions options = new ImportOptions(CONNECT_STRING, TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
     options.setUsername(ORACLE_USER_NAME);
     options.setPassword(ORACLE_USER_PASS);
 
@@ -101,14 +107,16 @@
           + "start_date DATE, "
           + "salary FLOAT, "
           + "dept VARCHAR2(32), "
+          + "timestamp_tz TIMESTAMP WITH TIME ZONE, "
+          + "timestamp_ltz TIMESTAMP WITH LOCAL TIME ZONE, "
           + "PRIMARY KEY (id))");
 
       st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
-          + "1,'Aaron',to_date('2009-05-14','yyyy-mm-dd'),1000000.00,'engineering')");
+          + "1,'Aaron',to_date('2009-05-14','yyyy-mm-dd'),1000000.00,'engineering','29-DEC-09 12.00.00.000000000 PM','29-DEC-09 12.00.00.000000000 PM')");
       st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
-          + "2,'Bob',to_date('2009-04-20','yyyy-mm-dd'),400.00,'sales')");
+          + "2,'Bob',to_date('2009-04-20','yyyy-mm-dd'),400.00,'sales','30-DEC-09 12.00.00.000000000 PM','30-DEC-09 12.00.00.000000000 PM')");
       st.executeUpdate("INSERT INTO " + TABLE_NAME + " VALUES("
-          + "3,'Fred',to_date('2009-01-23','yyyy-mm-dd'),15.00,'marketing')");
+          + "3,'Fred',to_date('2009-01-23','yyyy-mm-dd'),15.00,'marketing','31-DEC-09 12.00.00.000000000 PM','31-DEC-09 12.00.00.000000000 PM')");
       connection.commit();
     } catch (SQLException sqlE) {
       LOG.error("Encountered SQL Exception: " + sqlE);
@@ -180,7 +188,7 @@
       ioe.printStackTrace();
       fail(ioe.toString());
     }
-
+      
     File f = new File(filePath.toString());
     assertTrue("Could not find imported data file", f.exists());
     BufferedReader r = null;
@@ -188,7 +196,7 @@
       // Read through the file and make sure it's all there.
       r = new BufferedReader(new InputStreamReader(new FileInputStream(f)));
       for (String expectedLine : expectedResults) {
-        assertEquals(expectedLine, r.readLine());
+        compareRecords(expectedLine, r.readLine());
       }
     } catch (IOException ioe) {
       LOG.error("Got IOException verifying results: " + ioe.toString());
@@ -208,11 +216,80 @@
     // a strict DATE type. Thus we include HH:MM:SS.mmmmm below.
     // See http://www.oracle.com/technology/tech/java/sqlj_jdbc/htdocs/jdbc_faq.html#08_01
     String [] expectedResults = {
-        "1,Aaron,2009-05-14 00:00:00.0,1000000,engineering",
-        "2,Bob,2009-04-20 00:00:00.0,400,sales",
-        "3,Fred,2009-01-23 00:00:00.0,15,marketing"
+        "1,Aaron,2009-05-14 00:00:00.0,1000000,engineering,2009-12-29 12:00:00.0,2009-12-29 12:00:00.0",
+        "2,Bob,2009-04-20 00:00:00.0,400,sales,2009-12-30 12:00:00.0,2009-12-30 12:00:00.0",
+        "3,Fred,2009-01-23 00:00:00.0,15,marketing,2009-12-31 12:00:00.0,2009-12-31 12:00:00.0"
     };
 
     runOracleTest(expectedResults);
   }
+
+  /**
+   * Compare two lines
+   * @param expectedLine    expected line
+   * @param receivedLine    received line
+   * @throws IOException    exception during lines comparison
+   */
+  private void compareRecords(String expectedLine, String receivedLine) throws IOException {
+    // handle null case
+    if (expectedLine == null || receivedLine == null) {
+      return;
+    }
+
+    // check if lines are equal
+    if (expectedLine.equals(receivedLine)) {
+      return;
+    }
+
+    // check if size is the same
+    String [] expectedValues = expectedLine.split(",");
+    String [] receivedValues = receivedLine.split(",");
+    if (expectedValues.length != 7 || receivedValues.length != 7) {
+      LOG.error("Number of expected fields did not match number of received fields");
+      throw new IOException("Number of expected fields did not match number of received fields");
+    }
+
+    // check first 5 values
+    boolean mismatch = false;
+    for (int i = 0; !mismatch && i < 5; i++) {
+      mismatch = !expectedValues[i].equals(receivedValues[i]);
+    }
+    if (mismatch) {
+      throw new IOException("Expected:<" + expectedLine + "> but was:<" + receivedLine + ">");
+    }
+
+    Date expectedDate = null;
+    Date receivedDate = null;
+    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S");
+    int offset = TimeZone.getDefault().getOffset(System.currentTimeMillis()) / 3600000;
+    for (int i = 5; i < 7; i++) {
+      // parse expected timestamp
+      try {
+        expectedDate = df.parse(expectedValues[i]);
+      } catch (ParseException ex) {
+        LOG.error("Could not parse expected timestamp: " + expectedValues[i]);
+        throw new IOException("Could not parse expected timestamp: " + expectedValues[i]);
+      }
+
+      // parse received timestamp
+      try {
+        receivedDate = df.parse(receivedValues[i]);
+      } catch (ParseException ex) {
+        LOG.error("Could not parse received timestamp: " + receivedValues[i]);
+        throw new IOException("Could not parse received timestamp: " + receivedValues[i]);
+      }
+
+      // compare two timestamps considering timezone offset
+      Calendar expectedCal = Calendar.getInstance();
+      expectedCal.setTime(expectedDate);
+      expectedCal.add(Calendar.HOUR, offset);
+
+      Calendar receivedCal = Calendar.getInstance();
+      receivedCal.setTime(receivedDate);
+
+      if (!expectedCal.equals(receivedCal)) {
+        throw new IOException("Expected:<" + expectedLine + "> but was:<" + receivedLine + ">, while timezone offset is: " + offset);
+      }
+    }
+  }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java Tue Jan 26 14:02:53 2010
@@ -35,7 +35,8 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.util.FileListing;
 
@@ -84,7 +85,7 @@
   public void setUp() {
     LOG.debug("Setting up another postgresql test...");
 
-    ImportOptions options = new ImportOptions(CONNECT_STRING, TABLE_NAME);
+    SqoopOptions options = new SqoopOptions(CONNECT_STRING, TABLE_NAME);
     options.setUsername(DATABASE_USER);
 
     ConnManager manager = null;
@@ -150,8 +151,7 @@
   private String [] getArgv(boolean isDirect) {
     ArrayList<String> args = new ArrayList<String>();
 
-    args.add("-D");
-    args.add("fs.default.name=file:///");
+    CommonArgs.addHadoopFlags(args);
 
     args.add("--table");
     args.add(TABLE_NAME);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java Tue Jan 26 14:02:53 2010
@@ -21,6 +21,8 @@
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
+import java.sql.Connection;
+import java.sql.Statement;
 import java.sql.SQLException;
 import java.util.Enumeration;
 import java.util.jar.JarEntry;
@@ -34,8 +36,8 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import org.apache.hadoop.sqoop.ImportOptions;
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.testutil.DirUtil;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
@@ -53,7 +55,7 @@
   // instance variables populated during setUp, used during tests
   private HsqldbTestServer testServer;
   private ConnManager manager;
-  private ImportOptions options;
+  private SqoopOptions options;
 
   @Before
   public void setUp() {
@@ -71,7 +73,7 @@
     }
 
     manager = testServer.getManager();
-    options = testServer.getImportOptions();
+    options = testServer.getSqoopOptions();
 
     // sanity check: make sure we're in a tmp dir before we blow anything away.
     assertTrue("Test generates code in non-tmp dir!",
@@ -267,5 +269,50 @@
 
     runGenerationTest(argv, OVERRIDE_PACKAGE_NAME + "." + HsqldbTestServer.getTableName());
   }
+
+
+  // Test the SQL identifier -> Java identifier conversion.
+  @Test
+  public void testIdentifierConversion() {
+    assertNull(ClassWriter.getIdentifierStrForChar(' '));
+    assertNull(ClassWriter.getIdentifierStrForChar('\t'));
+    assertNull(ClassWriter.getIdentifierStrForChar('\r'));
+    assertNull(ClassWriter.getIdentifierStrForChar('\n'));
+    assertEquals("x", ClassWriter.getIdentifierStrForChar('x'));
+    assertEquals("_", ClassWriter.getIdentifierStrForChar('-'));
+    assertEquals("_", ClassWriter.getIdentifierStrForChar('_'));
+
+    assertEquals("foo", ClassWriter.toIdentifier("foo"));
+    assertEquals("_class", ClassWriter.toIdentifier("class"));
+    assertEquals("_class", ClassWriter.toIdentifier("cla ss"));
+    assertEquals("_int", ClassWriter.toIdentifier("int"));
+    assertEquals("thisismanywords", ClassWriter.toIdentifier("this is many words"));
+    assertEquals("_9isLegalInSql", ClassWriter.toIdentifier("9isLegalInSql"));
+    assertEquals("___", ClassWriter.toIdentifier("___"));
+  }
+
+  @Test
+  public void testWeirdColumnNames() throws SQLException {
+    // Recreate the table with column names that aren't legal Java identifiers.
+    String tableName = HsqldbTestServer.getTableName();
+    Connection connection = testServer.getConnection();
+    Statement st = connection.createStatement();
+    st.executeUpdate("DROP TABLE " + tableName + " IF EXISTS");
+    st.executeUpdate("CREATE TABLE " + tableName + " (class INT, \"9field\" INT)");
+    st.executeUpdate("INSERT INTO " + tableName + " VALUES(42, 41)");
+    connection.commit();
+    connection.close();
+
+    String [] argv = {
+        "--bindir",
+        JAR_GEN_DIR,
+        "--outdir",
+        CODE_GEN_DIR,
+        "--package-name",
+        OVERRIDE_PACKAGE_NAME
+    };
+
+    runGenerationTest(argv, OVERRIDE_PACKAGE_NAME + "." + HsqldbTestServer.getTableName());
+  }
 }
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java Tue Jan 26 14:02:53 2010
@@ -34,8 +34,8 @@
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.sqoop.ImportOptions;
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.mapred.RawKeyTextOutputFormat;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.testutil.CommonArgs;
@@ -99,7 +99,7 @@
         encloseRequired);
     runImport(argv);
     try {
-      ImportOptions opts = new ImportOptions();
+      SqoopOptions opts = new SqoopOptions();
 
       String tableClassName = getTableName();
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java Tue Jan 26 14:02:53 2010
@@ -29,7 +29,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.hsqldb.Server;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.manager.HsqldbManager;
 
@@ -226,13 +226,13 @@
     populateData();
   }
 
-  public ImportOptions getImportOptions() {
-    return new ImportOptions(HsqldbTestServer.getUrl(),
+  public SqoopOptions getSqoopOptions() {
+    return new SqoopOptions(HsqldbTestServer.getUrl(),
         HsqldbTestServer.getTableName());
   }
 
   public ConnManager getManager() {
-    return new HsqldbManager(getImportOptions());
+    return new HsqldbManager(getSqoopOptions());
   }
 
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java Tue Jan 26 14:02:53 2010
@@ -20,250 +20,29 @@
 
 import java.io.File;
 import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
 import java.util.ArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.BasicConfigurator;
-import org.junit.After;
-import org.junit.Before;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.Sqoop;
-import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
-import org.apache.hadoop.sqoop.manager.ConnManager;
+import org.apache.hadoop.sqoop.SqoopOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
 import org.apache.hadoop.sqoop.util.ClassLoaderStack;
 
-import junit.framework.TestCase;
-
 /**
  * Class that implements common methods required for tests which import data
  * from SQL into HDFS and verify correct import.
  */
-public class ImportJobTestCase extends TestCase {
+public class ImportJobTestCase extends BaseSqoopTestCase {
 
   public static final Log LOG = LogFactory.getLog(ImportJobTestCase.class.getName());
 
-  /** Base directory for all temporary data */
-  public static final String TEMP_BASE_DIR;
-
-  /** Where to import table data to in the local filesystem for testing */
-  public static final String LOCAL_WAREHOUSE_DIR;
-
-  // Initializer for the above
-  static {
-    String tmpDir = System.getProperty("test.build.data", "/tmp/");
-    if (!tmpDir.endsWith(File.separator)) {
-      tmpDir = tmpDir + File.separator;
-    }
-
-    TEMP_BASE_DIR = tmpDir;
-    LOCAL_WAREHOUSE_DIR = TEMP_BASE_DIR + "sqoop/warehouse";
-  }
-
-  // Used if a test manually sets the table name to be used.
-  private String curTableName;
-
-  protected void setCurTableName(String curName) {
-    this.curTableName = curName;
-  }
-
-  /**
-   * Because of how classloading works, we don't actually want to name
-   * all the tables the same thing -- they'll actually just use the same
-   * implementation of the Java class that was classloaded before. So we
-   * use this counter to uniquify table names.
-   */
-  private static int tableNum = 0;
-
-  /** the name of a table that we'll populate with items for each test. */
-  static final String TABLE_NAME = "IMPORT_TABLE_";
-
-  protected String getTableName() {
-    if (null != curTableName) {
-      return curTableName;
-    } else {
-      return TABLE_NAME + Integer.toString(tableNum);
-    }
-  }
-
-  protected String getWarehouseDir() {
-    return LOCAL_WAREHOUSE_DIR;
-  }
-
-  private String [] colNames;
-  protected String [] getColNames() {
-    return colNames;
-  }
-
-  protected HsqldbTestServer getTestServer() {
-    return testServer;
-  }
-
-  protected ConnManager getManager() {
-    return manager;
-  }
-
-  // instance variables populated during setUp, used during tests
-  private HsqldbTestServer testServer;
-  private ConnManager manager;
-
-  private static boolean isLog4jConfigured = false;
-
-  protected void incrementTableNum() {
-    tableNum++;
-  }
-
-  @Before
-  public void setUp() {
-
-    incrementTableNum();
-
-    if (!isLog4jConfigured) {
-      BasicConfigurator.configure();
-      isLog4jConfigured = true;
-      LOG.info("Configured log4j with console appender.");
-    }
-
-    testServer = new HsqldbTestServer();
-    try {
-      testServer.resetServer();
-    } catch (SQLException sqlE) {
-      LOG.error("Got SQLException: " + sqlE.toString());
-      fail("Got SQLException: " + sqlE.toString());
-    } catch (ClassNotFoundException cnfe) {
-      LOG.error("Could not find class for db driver: " + cnfe.toString());
-      fail("Could not find class for db driver: " + cnfe.toString());
-    }
-
-    manager = testServer.getManager();
-  }
-
-  @After
-  public void tearDown() {
-    setCurTableName(null); // clear user-override table name.
-
-    try {
-      if (null != manager) {
-        manager.close();
-      }
-    } catch (SQLException sqlE) {
-      LOG.error("Got SQLException: " + sqlE.toString());
-      fail("Got SQLException: " + sqlE.toString());
-    }
-
-  }
-
-  static final String BASE_COL_NAME = "DATA_COL";
-
-  /**
-   * Create a table with a set of columns and add a row of values.
-   * @param colTypes the types of the columns to make
-   * @param vals the SQL text for each value to insert
-   */
-  protected void createTableWithColTypes(String [] colTypes, String [] vals) {
-    Connection conn = null;
-    try {
-      conn = getTestServer().getConnection();
-      PreparedStatement statement = conn.prepareStatement(
-          "DROP TABLE " + getTableName() + " IF EXISTS",
-          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-      statement.executeUpdate();
-      statement.close();
-
-      String columnDefStr = "";
-      String columnListStr = "";
-      String valueListStr = "";
-
-      String [] myColNames = new String[colTypes.length];
-
-      for (int i = 0; i < colTypes.length; i++) {
-        String colName = BASE_COL_NAME + Integer.toString(i);
-        columnDefStr += colName + " " + colTypes[i];
-        columnListStr += colName;
-        valueListStr += vals[i];
-        myColNames[i] = colName;
-        if (i < colTypes.length - 1) {
-          columnDefStr += ", ";
-          columnListStr += ", ";
-          valueListStr += ", ";
-        }
-      }
-
-      statement = conn.prepareStatement(
-          "CREATE TABLE " + getTableName() + "(" + columnDefStr + ")",
-          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-      statement.executeUpdate();
-      statement.close();
-
-      statement = conn.prepareStatement(
-          "INSERT INTO " + getTableName() + "(" + columnListStr + ")"
-          + " VALUES(" + valueListStr + ")",
-          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-      statement.executeUpdate();
-      statement.close();
-      conn.commit();
-      this.colNames = myColNames;
-    } catch (SQLException sqlException) {
-      fail("Could not create table: " + sqlException.toString());
-    } finally {
-      if (null != conn) {
-        try {
-          conn.close();
-        } catch (SQLException sqlE) {
-          LOG.warn("Got SQLException during close: " + sqlE.toString());
-        }
-      }
-    }
-  }
-
-  /**
-   * Create a table with a single column and put a data element in it.
-   * @param colType the type of the column to create
-   * @param val the value to insert (reformatted as a string)
-   */
-  protected void createTableForColType(String colType, String val) {
-    String [] types = { colType };
-    String [] vals = { val };
-
-    createTableWithColTypes(types, vals);
-  }
-
-  /**
-   * verify that the single-column single-row result can be read back from the db.
-   *
-   */
-  protected void verifyReadback(int colNum, String expectedVal) {
-    ResultSet results = null;
-    try {
-      results = getManager().readTable(getTableName(), getColNames());
-      assertNotNull("Null results from readTable()!", results);
-      assertTrue("Expected at least one row returned", results.next());
-      String resultVal = results.getString(colNum);
-      if (null != expectedVal) {
-        assertNotNull("Expected non-null result value", resultVal);
-      }
-
-      assertEquals("Error reading inserted value back from db", expectedVal, resultVal);
-      assertFalse("Expected at most one row returned", results.next());
-    } catch (SQLException sqlE) {
-      fail("Got SQLException: " + sqlE.toString());
-    } finally {
-      if (null != results) {
-        try {
-          results.close();
-        } catch (SQLException sqlE) {
-          fail("Got SQLException in resultset.close(): " + sqlE.toString());
-        }
-      }
-    }
+  protected String getTablePrefix() {
+    return "IMPORT_TABLE_";
   }
 
   /**
@@ -306,27 +85,6 @@
     return args.toArray(new String[0]);
   }
 
-  protected Path getTablePath() {
-    Path warehousePath = new Path(getWarehouseDir());
-    Path tablePath = new Path(warehousePath, getTableName());
-    return tablePath;
-  }
-
-  protected Path getDataFilePath() {
-    return new Path(getTablePath(), "part-m-00000");
-  }
-
-  protected void removeTableDir() {
-    File tableDirFile = new File(getTablePath().toString());
-    if (tableDirFile.exists()) {
-      // Remove the director where the table will be imported to,
-      // prior to running the MapReduce job.
-      if (!DirUtil.deleteDir(tableDirFile)) {
-        LOG.warn("Could not delete table directory: " + tableDirFile.getAbsolutePath());
-      }
-    }
-  }
-
   /**
    * Do a MapReduce-based import of the table and verify that the results
    * were imported as expected. (tests readFields(ResultSet) and toString())
@@ -353,7 +111,7 @@
     // expect a successful return.
     assertEquals("Failure during job", 0, ret);
 
-    ImportOptions opts = new ImportOptions();
+    SqoopOptions opts = new SqoopOptions();
     try {
       opts.parse(getArgv(false, importCols));
     } catch (InvalidOptionsException ioe) {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q Tue Jan 26 14:02:53 2010
@@ -1,2 +1,2 @@
-CREATE TABLE CUSTOM_DELIM_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\054' LINES TERMINATED BY '\0174' STORED AS TEXTFILE;
+CREATE TABLE CUSTOM_DELIM_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\054' LINES TERMINATED BY '\174' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/CUSTOM_DELIM_IMPORT' INTO TABLE CUSTOM_DELIM_IMPORT;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/dateImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/dateImport.q?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/dateImport.q (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/dateImport.q Tue Jan 26 14:02:53 2010
@@ -1,2 +1,2 @@
-CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/DATE_HIVE_IMPORT' INTO TABLE DATE_HIVE_IMPORT;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/failingImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/failingImport.q?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/failingImport.q (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/failingImport.q Tue Jan 26 14:02:53 2010
@@ -1,2 +1,2 @@
-CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/DATE_HIVE_IMPORT' INTO TABLE DATE_HIVE_IMPORT;



Mime
View raw message