hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r896265 - in /hadoop/mapreduce/trunk: ./ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/ src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/ src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/ src/contrib/sqoop/testdata/hive/s...
Date Tue, 05 Jan 2010 23:18:40 GMT
Author: tomwhite
Date: Tue Jan  5 23:18:32 2010
New Revision: 896265

URL: http://svn.apache.org/viewvc?rev=896265&view=rev
Log:
MAPREDUCE-1310. CREATE TABLE statements for Hive do not correctly specify delimiters. Contributed
by Aaron Kimball.

Added:
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q
    hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q
    hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q
    hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q
    hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Tue Jan  5 23:18:32 2010
@@ -204,6 +204,9 @@
     MAPREDUCE-1155. Streaming tests swallow exceptions.
     (Todd Lipcon via tomwhite)
 
+    MAPREDUCE-1310. CREATE TABLE statements for Hive do not correctly specify
+    delimiters. (Aaron Kimball via tomwhite)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
Tue Jan  5 23:18:32 2010
@@ -115,10 +115,10 @@
       sb.append("COMMENT 'Imported by sqoop on " + curDateStr + "' ");
     }
 
-    sb.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\0");
-    sb.append(Integer.toOctalString((int) options.getOutputFieldDelim()));
-    sb.append("' LINES TERMINATED BY '\\0");
-    sb.append(Integer.toOctalString((int) options.getOutputRecordDelim()));
+    sb.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY '");
+    sb.append(getHiveOctalCharCode((int) options.getOutputFieldDelim()));
+    sb.append("' LINES TERMINATED BY '");
+    sb.append(getHiveOctalCharCode((int) options.getOutputRecordDelim()));
     sb.append("' STORED AS TEXTFILE");
 
     LOG.debug("Create statement: " + sb.toString());
@@ -170,5 +170,28 @@
     LOG.debug("Load statement: " + sb.toString());
     return sb.toString();
   }
+
+  /**
+   * Return a string identifying the character to use as a delimiter
+   * in Hive, in octal representation.
+   * Hive can specify delimiter characters in the form '\ooo' where
+   * ooo is a three-digit octal number between 000 and 177. Values
+   * may not be truncated ('\12' is wrong; '\012' is ok) nor may they
+   * be zero-prefixed (e.g., '\0177' is wrong).
+   *
+   * @param charNum the character to use as a delimiter
+   * @return a string of the form "\ooo" where ooo is an octal number
+   * in [000, 177].
+   * @throws IllegalArgumentException if charNum >> 0177.
+   */
+  static String getHiveOctalCharCode(int charNum)
+      throws IllegalArgumentException {
+    if (charNum > 0177) {
+      throw new IllegalArgumentException(
+          "Character " + charNum + " is an out-of-range delimiter");
+    }
+
+    return String.format("\\%03o", charNum);
+  }
 }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java
Tue Jan  5 23:18:32 2010
@@ -19,6 +19,7 @@
 package org.apache.hadoop.sqoop;
 
 import org.apache.hadoop.sqoop.hive.TestHiveImport;
+import org.apache.hadoop.sqoop.hive.TestTableDefWriter;
 import org.apache.hadoop.sqoop.io.TestSplittableBufferedWriter;
 import org.apache.hadoop.sqoop.lib.TestFieldFormatter;
 import org.apache.hadoop.sqoop.lib.TestRecordParser;
@@ -59,6 +60,7 @@
     suite.addTestSuite(TestParseMethods.class);
     suite.addTestSuite(TestConnFactory.class);
     suite.addTestSuite(TestSplittableBufferedWriter.class);
+    suite.addTestSuite(TestTableDefWriter.class);
     suite.addTest(MapreduceTests.suite());
 
     return suite;

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java?rev=896265&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java
(added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestTableDefWriter.java
Tue Jan  5 23:18:32 2010
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.hive;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.Test;
+
+import junit.framework.TestCase;
+
+/**
+ * Test Hive DDL statement generation.
+ */
+public class TestTableDefWriter extends TestCase {
+
+  public static final Log LOG = LogFactory.getLog(TestHiveImport.class.getName());
+
+
+  // Test getHiveOctalCharCode and expect an IllegalArgumentException.
+  private void expectExceptionInCharCode(int charCode) {
+    try {
+      TableDefWriter.getHiveOctalCharCode(charCode);
+      fail("Expected IllegalArgumentException");
+    } catch (IllegalArgumentException iae) {
+      // Expected; ok.
+    }
+  }
+
+  public void testHiveOctalCharCode() {
+    assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
+    assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
+    assertEquals("\\012", TableDefWriter.getHiveOctalCharCode((int) '\n'));
+    assertEquals("\\177", TableDefWriter.getHiveOctalCharCode(0177));
+
+    expectExceptionInCharCode(4096);
+    expectExceptionInCharCode(0200);
+    expectExceptionInCharCode(254);
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/customDelimImport.q Tue
Jan  5 23:18:32 2010
@@ -1,2 +1,2 @@
-CREATE TABLE CUSTOM_DELIM_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW
FORMAT DELIMITED FIELDS TERMINATED BY '\054' LINES TERMINATED BY '\0174' STORED AS TEXTFILE;
+CREATE TABLE CUSTOM_DELIM_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW
FORMAT DELIMITED FIELDS TERMINATED BY '\054' LINES TERMINATED BY '\174' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/CUSTOM_DELIM_IMPORT' INTO TABLE CUSTOM_DELIM_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/dateImport.q Tue Jan  5
23:18:32 2010
@@ -1,2 +1,2 @@
-CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/DATE_HIVE_IMPORT' INTO TABLE DATE_HIVE_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/failingImport.q Tue Jan
 5 23:18:32 2010
@@ -1,2 +1,2 @@
-CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE DATE_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 STRING) ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/DATE_HIVE_IMPORT' INTO TABLE DATE_HIVE_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/normalImport.q Tue Jan
 5 23:18:32 2010
@@ -1,2 +1,2 @@
-CREATE TABLE NORMAL_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW
FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE NORMAL_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW
FORMAT DELIMITED FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/NORMAL_HIVE_IMPORT' INTO TABLE NORMAL_HIVE_IMPORT;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q?rev=896265&r1=896264&r2=896265&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/testdata/hive/scripts/numericImport.q Tue Jan
 5 23:18:32 2010
@@ -1,2 +1,2 @@
-CREATE TABLE NUMERIC_HIVE_IMPORT ( DATA_COL0 DOUBLE, DATA_COL1 STRING) ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE NUMERIC_HIVE_IMPORT ( DATA_COL0 DOUBLE, DATA_COL1 STRING) ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/NUMERIC_HIVE_IMPORT' INTO TABLE NUMERIC_HIVE_IMPORT;



Mime
View raw message