hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1610452 [1/2] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/ itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/ itests/custom-se...
Date Mon, 14 Jul 2014 16:01:12 GMT
Author: brock
Date: Mon Jul 14 16:01:11 2014
New Revision: 1610452

URL: http://svn.apache.org/r1610452
Log:
HIVE-5976 - Decouple input formats from STORED as keywords (David Chen via Brock)

Added:
    hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextSerDe.java
    hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextStorageFormatDescriptor.java
    hive/trunk/itests/custom-serde/src/main/resources/
    hive/trunk/itests/custom-serde/src/main/resources/META-INF/
    hive/trunk/itests/custom-serde/src/main/resources/META-INF/services/
    hive/trunk/itests/custom-serde/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AbstractStorageFormatDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ORCFileStorageFormatDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ParquetFileStorageFormatDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileStorageFormatDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileStorageFormatDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/TextFileStorageFormatDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java
    hive/trunk/ql/src/main/resources/META-INF/
    hive/trunk/ql/src/main/resources/META-INF/services/
    hive/trunk/ql/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestStorageFormatDescriptor.java
    hive/trunk/ql/src/test/queries/clientpositive/storage_format_descriptor.q
    hive/trunk/ql/src/test/results/clientpositive/storage_format_descriptor.q.out
Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/conf/hive-default.xml.template
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out
    hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out
    hive/trunk/ql/src/test/results/clientpositive/create_union_table.q.out
    hive/trunk/ql/src/test/results/clientpositive/ctas.q.out
    hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out
    hive/trunk/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out
    hive/trunk/ql/src/test/results/clientpositive/groupby_duplicate_key.q.out
    hive/trunk/ql/src/test/results/clientpositive/input15.q.out
    hive/trunk/ql/src/test/results/clientpositive/inputddl1.q.out
    hive/trunk/ql/src/test/results/clientpositive/inputddl2.q.out
    hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out
    hive/trunk/ql/src/test/results/clientpositive/merge3.q.out
    hive/trunk/ql/src/test/results/clientpositive/nonmr_fetch.q.out
    hive/trunk/ql/src/test/results/clientpositive/nullformat.q.out
    hive/trunk/ql/src/test/results/clientpositive/nullformatCTAS.q.out
    hive/trunk/ql/src/test/results/clientpositive/parallel_orderby.q.out
    hive/trunk/ql/src/test/results/clientpositive/skewjoin_noskew.q.out
    hive/trunk/ql/src/test/results/clientpositive/smb_mapjoin9.q.out
    hive/trunk/ql/src/test/results/clientpositive/temp_table.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/ctas.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/tez_dml.q.out
    hive/trunk/ql/src/test/results/clientpositive/union25.q.out
    hive/trunk/ql/src/test/results/clientpositive/union_top_level.q.out

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon Jul 14 16:01:11 2014
@@ -681,6 +681,10 @@ public class HiveConf extends Configurat
         "org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe",
         "The default SerDe Hive will use for the RCFile format"),
 
+    HIVEDEFAULTSERDE("hive.default.serde",
+        "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe",
+        "The default SerDe Hive will use for storage formats that do not specify a SerDe."),
+
     SERDESUSINGMETASTOREFORSCHEMA("hive.serdes.using.metastore.for.schema",
         "org.apache.hadoop.hive.ql.io.orc.OrcSerde,org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe," +
         "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe,org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe," +

Modified: hive/trunk/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Mon Jul 14 16:01:11 2014
@@ -1,7 +1,5 @@
 <?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!--
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?><!--
    Licensed to the Apache Software Foundation (ASF) under one or more
    contributor license agreements.  See the NOTICE file distributed with
    this work for additional information regarding copyright ownership.
@@ -16,8 +14,7 @@
    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    See the License for the specific language governing permissions and
    limitations under the License.
--->
-<configuration>
+--><configuration>
   <!-- WARNING!!! This file is auto generated for documentation purposes ONLY! -->
   <!-- WARNING!!! Any changes you make to this file will be ignored by Hive.   -->
   <!-- WARNING!!! You must make your changes in hive-site.xml instead.         -->
@@ -1091,6 +1088,11 @@
     <description>The default SerDe Hive will use for the RCFile format</description>
   </property>
   <property>
+    <key>hive.default.serde</key>
+    <value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+    <description>The default SerDe Hive will use for storage formats that do not specify a SerDe.</description>
+  </property>
+  <property>
     <key>hive.serdes.using.metastore.for.schema</key>
     <value>org.apache.hadoop.hive.ql.io.orc.OrcSerde,org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe,org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe,org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe,org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe,org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe</value>
     <description>SerDes retriving schema from metastore. This an internal parameter. Check with the hive dev. team</description>

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java Mon Jul 14 16:01:11 2014
@@ -30,8 +30,6 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.DDLTask;
 import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
-import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
@@ -41,6 +39,7 @@ import org.apache.hadoop.hive.ql.parse.B
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.StorageFormat;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hive.hcatalog.common.HCatConstants;
@@ -66,14 +65,20 @@ final class CreateTableHook extends HCat
     // Analyze and create tbl properties object
     int numCh = ast.getChildCount();
 
-    String inputFormat = null, outputFormat = null;
     tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast
       .getChild(0));
     boolean likeTable = false;
+    StorageFormat format = new StorageFormat(context.getConf());
 
     for (int num = 1; num < numCh; num++) {
       ASTNode child = (ASTNode) ast.getChild(num);
-
+      if (format.fillStorageFormat(child)) {
+        if (org.apache.commons.lang.StringUtils
+            .isNotEmpty(format.getStorageHandler())) {
+            return ast;
+        }
+        continue;
+      }
       switch (child.getToken().getType()) {
 
       case HiveParser.TOK_QUERY: // CTAS
@@ -84,17 +89,6 @@ final class CreateTableHook extends HCat
       case HiveParser.TOK_TABLEBUCKETS:
         break;
 
-      case HiveParser.TOK_TBLSEQUENCEFILE:
-        inputFormat = HCatConstants.SEQUENCEFILE_INPUT;
-        outputFormat = HCatConstants.SEQUENCEFILE_OUTPUT;
-        break;
-
-      case HiveParser.TOK_TBLTEXTFILE:
-        inputFormat = org.apache.hadoop.mapred.TextInputFormat.class.getName();
-        outputFormat = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName();
-
-        break;
-
       case HiveParser.TOK_LIKETABLE:
         likeTable = true;
         break;
@@ -125,43 +119,14 @@ final class CreateTableHook extends HCat
           }
         }
         break;
-
-      case HiveParser.TOK_STORAGEHANDLER:
-        String storageHandler = BaseSemanticAnalyzer
-          .unescapeSQLString(child.getChild(0).getText());
-        if (org.apache.commons.lang.StringUtils
-          .isNotEmpty(storageHandler)) {
-          return ast;
-        }
-
-        break;
-
-      case HiveParser.TOK_TABLEFILEFORMAT:
-        if (child.getChildCount() < 2) {
-          throw new SemanticException(
-            "Incomplete specification of File Format. " +
-              "You must provide InputFormat, OutputFormat.");
-        }
-        inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
-          .getChild(0).getText());
-        outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
-          .getChild(1).getText());
-        break;
-
-      case HiveParser.TOK_TBLRCFILE:
-        inputFormat = RCFileInputFormat.class.getName();
-        outputFormat = RCFileOutputFormat.class.getName();
-        break;
-
       }
     }
 
-    if (!likeTable && (inputFormat == null || outputFormat == null)) {
+    if (!likeTable && (format.getInputFormat() == null || format.getOutputFormat() == null)) {
       throw new SemanticException(
         "STORED AS specification is either incomplete or incorrect.");
     }
 
-
     return ast;
   }
 

Added: hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextSerDe.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextSerDe.java (added)
+++ hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextSerDe.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2;
+
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+
+public class CustomTextSerDe extends LazySimpleSerDe {
+  public CustomTextSerDe() throws SerDeException {
+  }
+}

Added: hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextStorageFormatDescriptor.java (added)
+++ hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomTextStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2;
+
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.TextFileStorageFormatDescriptor;
+
+import com.google.common.collect.ImmutableSet;
+
+public class CustomTextStorageFormatDescriptor extends TextFileStorageFormatDescriptor {
+
+  @Override
+  public Set<String> getNames() {
+    return ImmutableSet.of(IOConstants.CUSTOM_TEXT_SERDE);
+  }
+
+  @Override
+  public String getSerde() {
+    return CustomTextSerDe.class.getName();
+  }
+}

Added: hive/trunk/itests/custom-serde/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor
URL: http://svn.apache.org/viewvc/hive/trunk/itests/custom-serde/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor?rev=1610452&view=auto
==============================================================================
--- hive/trunk/itests/custom-serde/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor (added)
+++ hive/trunk/itests/custom-serde/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor Mon Jul 14 16:01:11 2014
@@ -0,0 +1 @@
+org.apache.hadoop.hive.serde2.CustomTextStorageFormatDescriptor

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AbstractStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AbstractStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AbstractStorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AbstractStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+
+public abstract class AbstractStorageFormatDescriptor implements StorageFormatDescriptor {
+
+  @Override
+  public String getSerde() {
+    return null;
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java Mon Jul 14 16:01:11 2014
@@ -17,11 +17,32 @@
  */
 package org.apache.hadoop.hive.ql.io;
 
+import org.apache.hadoop.mapred.TextInputFormat;
+
+import com.google.common.annotations.VisibleForTesting;
+
 public final class IOConstants {
   public static final String COLUMNS = "columns";
   public static final String COLUMNS_TYPES = "columns.types";
   public static final String MAPRED_TASK_ID = "mapred.task.id";
 
+  public static final String TEXTFILE = "TEXTFILE";
+  public static final String SEQUENCEFILE = "SEQUENCEFILE";
+  public static final String RCFILE = "RCFILE";
+  public static final String ORC = "ORC";
+  public static final String ORCFILE = "ORCFILE";
+  public static final String PARQUET = "PARQUET";
+  public static final String PARQUETFILE = "PARQUETFILE";
+
+  @VisibleForTesting
+  public static final String CUSTOM_TEXT_SERDE = "CustomTextSerde";
+
+  public static final String TEXTFILE_INPUT = TextInputFormat.class
+      .getName();
+  @SuppressWarnings("deprecation")
+  public static final String TEXTFILE_OUTPUT = IgnoreKeyTextOutputFormat.class
+      .getName();
+
   private IOConstants() {
     // prevent instantiation
   }

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ORCFileStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ORCFileStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ORCFileStorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ORCFileStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
+import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
+import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
+
+import com.google.common.collect.ImmutableSet;
+
+public class ORCFileStorageFormatDescriptor extends AbstractStorageFormatDescriptor {
+  @Override
+  public Set<String> getNames() {
+    return ImmutableSet.of(IOConstants.ORCFILE, IOConstants.ORC);
+  }
+  @Override
+  public String getInputFormat() {
+    return OrcInputFormat.class.getName();
+  }
+  @Override
+  public String getOutputFormat() {
+    return OrcOutputFormat.class.getName();
+  }
+  @Override
+  public String getSerde() {
+    return OrcSerde.class.getName();
+  }
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ParquetFileStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ParquetFileStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ParquetFileStorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/ParquetFileStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
+import org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat;
+import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
+
+import com.google.common.collect.ImmutableSet;
+
+public class ParquetFileStorageFormatDescriptor extends AbstractStorageFormatDescriptor {
+  @Override
+  public Set<String> getNames() {
+    return ImmutableSet.of(IOConstants.PARQUETFILE, IOConstants.PARQUET);
+  }
+  @Override
+  public String getInputFormat() {
+    return MapredParquetInputFormat.class.getName();
+  }
+  @Override
+  public String getOutputFormat() {
+    return MapredParquetOutputFormat.class.getName();
+  }
+  @Override
+  public String getSerde() {
+    return ParquetHiveSerDe.class.getName();
+  }
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileStorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.util.Set;
+
+import com.google.common.collect.ImmutableSet;
+
+public class RCFileStorageFormatDescriptor extends AbstractStorageFormatDescriptor {
+  @Override
+  public Set<String> getNames() {
+    return ImmutableSet.of(IOConstants.RCFILE);
+  }
+  @Override
+  public String getInputFormat() {
+    return RCFileInputFormat.class.getName();
+  }
+  @Override
+  public String getOutputFormat() {
+    return RCFileOutputFormat.class.getName();
+  }
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileStorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.util.Set;
+
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+
+import com.google.common.collect.ImmutableSet;
+
+public class SequenceFileStorageFormatDescriptor extends AbstractStorageFormatDescriptor {
+  @Override
+  public Set<String> getNames() {
+    return ImmutableSet.of(IOConstants.SEQUENCEFILE);
+  }
+  @Override
+  public String getInputFormat() {
+    return SequenceFileInputFormat.class.getName();
+  }
+  @Override
+  public String getOutputFormat() {
+    return SequenceFileOutputFormat.class.getName();
+  }
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.util.Set;
+
+import javax.annotation.Nullable;
+
+/**
+ * Subclasses represent a storage format for the
+ * CREATE TABLE ... STORED AS ... command. Subclasses are
+ * found via the ServiceLoader facility.
+ */
+public interface StorageFormatDescriptor {
+  /**
+   * Return the set of names this storage format is known as.
+   */
+  Set<String> getNames();
+  /**
+   * Return the name of the input format as a string
+   */
+  String getInputFormat();
+  /**
+   * Return the name of the output format as a string
+   */
+  String getOutputFormat();
+  /**
+   * Return the name of the serde as a string or null
+   */
+  @Nullable String getSerde();
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.ServiceLoader;
+
+import javax.annotation.Nullable;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import com.google.common.collect.ImmutableMap;
+
+public class StorageFormatFactory {
+  private static final Log LOG = LogFactory.getLog(StorageFormatFactory.class);
+
+  private final Map<String, StorageFormatDescriptor> storageFormats;
+
+  public StorageFormatFactory() {
+    Map<String, StorageFormatDescriptor> localStorageFormats =
+        new HashMap<String, StorageFormatDescriptor>();
+    for (StorageFormatDescriptor descriptor : ServiceLoader.load(StorageFormatDescriptor.class)) {
+      for (String name : descriptor.getNames()) {
+        name = name.trim().toUpperCase();
+        StorageFormatDescriptor oldDescriptor  = localStorageFormats.put(name, descriptor);
+        if (oldDescriptor != null) {
+          String msg = "Storage Format Descriptor conflict at name '" + name + "', " +
+              "the descriptor " + descriptor + " is overriding " + oldDescriptor;
+          LOG.warn(msg);
+        }
+      }
+    }
+    this.storageFormats = ImmutableMap.copyOf(localStorageFormats);
+  }
+
+  public @Nullable StorageFormatDescriptor get(String name) {
+    name = name.trim().toUpperCase();
+    return storageFormats.get(name);
+  }
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/TextFileStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/TextFileStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/TextFileStorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/TextFileStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.util.Set;
+
+import com.google.common.collect.ImmutableSet;
+
+public class TextFileStorageFormatDescriptor extends AbstractStorageFormatDescriptor {
+  @Override
+  public Set<String> getNames() {
+    return ImmutableSet.of(IOConstants.TEXTFILE);
+  }
+  @Override
+  public String getInputFormat() {
+    return IOConstants.TEXTFILE_INPUT;
+  }
+  @Override
+  public String getOutputFormat() {
+    return IOConstants.TEXTFILE_OUTPUT;
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Mon Jul 14 16:01:11 2014
@@ -50,15 +50,6 @@ import org.apache.hadoop.hive.ql.exec.Ta
 import org.apache.hadoop.hive.ql.hooks.LineageInfo;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
-import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
-import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
-import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
-import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
-import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
-import org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat;
-import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -77,9 +68,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.mapred.SequenceFileInputFormat;
-import org.apache.hadoop.mapred.SequenceFileOutputFormat;
-import org.apache.hadoop.mapred.TextInputFormat;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -87,7 +75,6 @@ import com.google.common.annotations.Vis
  * BaseSemanticAnalyzer.
  *
  */
-@SuppressWarnings("deprecation")
 public abstract class BaseSemanticAnalyzer {
   private static final Log STATIC_LOG = LogFactory.getLog(BaseSemanticAnalyzer.class.getName());
   protected final Hive db;
@@ -119,28 +106,6 @@ public abstract class BaseSemanticAnalyz
   protected TableAccessInfo tableAccessInfo;
   protected ColumnAccessInfo columnAccessInfo;
 
-  protected static final String TEXTFILE_INPUT = TextInputFormat.class
-      .getName();
-  protected static final String TEXTFILE_OUTPUT = IgnoreKeyTextOutputFormat.class
-      .getName();
-  protected static final String SEQUENCEFILE_INPUT = SequenceFileInputFormat.class
-      .getName();
-  protected static final String SEQUENCEFILE_OUTPUT = SequenceFileOutputFormat.class
-      .getName();
-  protected static final String RCFILE_INPUT = RCFileInputFormat.class
-      .getName();
-  protected static final String RCFILE_OUTPUT = RCFileOutputFormat.class
-      .getName();
-  protected static final String ORCFILE_INPUT = OrcInputFormat.class
-      .getName();
-  protected static final String ORCFILE_OUTPUT = OrcOutputFormat.class
-      .getName();
-  protected static final String ORCFILE_SERDE = OrcSerde.class
-      .getName();
-
-  protected static final String PARQUETFILE_INPUT = MapredParquetInputFormat.class.getName();
-  protected static final String PARQUETFILE_OUTPUT = MapredParquetOutputFormat.class.getName();
-  protected static final String PARQUETFILE_SERDE = ParquetHiveSerDe.class.getName();
 
   public boolean skipAuthorization() {
     return false;
@@ -154,7 +119,7 @@ public abstract class BaseSemanticAnalyz
     String lineDelim = null;
     String nullFormat = null;
 
-    protected void analyzeRowFormat(AnalyzeCreateCommonVars shared, ASTNode child) throws SemanticException {
+    protected void analyzeRowFormat(ASTNode child) throws SemanticException {
       child = (ASTNode) child.getChild(0);
       int numChildRowFormat = child.getChildCount();
       for (int numC = 0; numC < numChildRowFormat; numC++) {
@@ -190,93 +155,7 @@ public abstract class BaseSemanticAnalyz
                     .getText());
           break;
         default:
-          assert false;
-        }
-      }
-    }
-  }
-
-  class AnalyzeCreateCommonVars {
-    String serde = null;
-    Map<String, String> serdeProps = new HashMap<String, String>();
-  }
-
-  class StorageFormat {
-    String inputFormat = null;
-    String outputFormat = null;
-    String storageHandler = null;
-
-    protected boolean fillStorageFormat(ASTNode child, AnalyzeCreateCommonVars shared) {
-      boolean storageFormat = false;
-      switch(child.getToken().getType()) {
-      case HiveParser.TOK_TBLSEQUENCEFILE:
-        inputFormat = SEQUENCEFILE_INPUT;
-        outputFormat = SEQUENCEFILE_OUTPUT;
-        storageFormat = true;
-        break;
-      case HiveParser.TOK_TBLTEXTFILE:
-        inputFormat = TEXTFILE_INPUT;
-        outputFormat = TEXTFILE_OUTPUT;
-        storageFormat = true;
-        break;
-      case HiveParser.TOK_TBLRCFILE:
-        inputFormat = RCFILE_INPUT;
-        outputFormat = RCFILE_OUTPUT;
-        if (shared.serde == null) {
-          shared.serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE);
-        }
-        storageFormat = true;
-        break;
-      case HiveParser.TOK_TBLORCFILE:
-        inputFormat = ORCFILE_INPUT;
-        outputFormat = ORCFILE_OUTPUT;
-        shared.serde = ORCFILE_SERDE;
-        storageFormat = true;
-        break;
-      case HiveParser.TOK_TBLPARQUETFILE:
-        inputFormat = PARQUETFILE_INPUT;
-        outputFormat = PARQUETFILE_OUTPUT;
-        shared.serde = PARQUETFILE_SERDE;
-        storageFormat = true;
-        break;
-      case HiveParser.TOK_TABLEFILEFORMAT:
-        inputFormat = unescapeSQLString(child.getChild(0).getText());
-        outputFormat = unescapeSQLString(child.getChild(1).getText());
-        storageFormat = true;
-        break;
-      case HiveParser.TOK_STORAGEHANDLER:
-        storageHandler = unescapeSQLString(child.getChild(0).getText());
-        if (child.getChildCount() == 2) {
-          readProps(
-            (ASTNode) (child.getChild(1).getChild(0)),
-            shared.serdeProps);
-        }
-        storageFormat = true;
-        break;
-      }
-      return storageFormat;
-    }
-
-    protected void fillDefaultStorageFormat(AnalyzeCreateCommonVars shared) {
-      if ((inputFormat == null) && (storageHandler == null)) {
-        if ("SequenceFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) {
-          inputFormat = SEQUENCEFILE_INPUT;
-          outputFormat = SEQUENCEFILE_OUTPUT;
-        } else if ("RCFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) {
-          inputFormat = RCFILE_INPUT;
-          outputFormat = RCFILE_OUTPUT;
-          shared.serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE);
-        } else if ("ORC".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) {
-          inputFormat = ORCFILE_INPUT;
-          outputFormat = ORCFILE_OUTPUT;
-          shared.serde = ORCFILE_SERDE;
-        } else if ("PARQUET".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) {
-          inputFormat = PARQUETFILE_INPUT;
-          outputFormat = PARQUETFILE_OUTPUT;
-          shared.serde = PARQUETFILE_SERDE;
-        } else {
-          inputFormat = TEXTFILE_INPUT;
-          outputFormat = TEXTFILE_OUTPUT;
+          throw new AssertionError("Unkown Token: " + rowChild);
         }
       }
     }
@@ -607,13 +486,6 @@ public abstract class BaseSemanticAnalyz
     return getColumns(ast, true);
   }
 
-  protected void handleGenericFileFormat(ASTNode node) throws SemanticException{
-
-  ASTNode child = (ASTNode)node.getChild(0);
-  throw new SemanticException("Unrecognized file format in STORED AS clause:"+
-         " "+ (child == null ? "" : child.getText()));
-  }
-
   /**
    * Get the list of FieldSchema out of the ASTNode.
    */
@@ -966,7 +838,7 @@ public abstract class BaseSemanticAnalyz
    * @throws HiveException
    */
   public final boolean isValidPrefixSpec(Table tTable, Map<String, String> spec)
- throws HiveException {
+      throws HiveException {
 
     // TODO - types need to be checked.
     List<FieldSchema> partCols = tTable.getPartitionKeys();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Jul 14 16:01:11 2014
@@ -1036,17 +1036,16 @@ public class DDLSemanticAnalyzer extends
     String indexComment = null;
 
     RowFormatParams rowFormatParams = new RowFormatParams();
-    StorageFormat storageFormat = new StorageFormat();
-    AnalyzeCreateCommonVars shared = new AnalyzeCreateCommonVars();
+    StorageFormat storageFormat = new StorageFormat(conf);
 
     for (int idx = 4; idx < ast.getChildCount(); idx++) {
       ASTNode child = (ASTNode) ast.getChild(idx);
-      if (storageFormat.fillStorageFormat(child, shared)) {
+      if (storageFormat.fillStorageFormat(child)) {
         continue;
       }
       switch (child.getToken().getType()) {
       case HiveParser.TOK_TABLEROWFORMAT:
-        rowFormatParams.analyzeRowFormat(shared, child);
+        rowFormatParams.analyzeRowFormat(child);
         break;
       case HiveParser.TOK_CREATEINDEX_INDEXTBLNAME:
         ASTNode ch = (ASTNode) child.getChild(0);
@@ -1067,10 +1066,10 @@ public class DDLSemanticAnalyzer extends
         break;
       case HiveParser.TOK_TABLESERIALIZER:
         child = (ASTNode) child.getChild(0);
-        shared.serde = unescapeSQLString(child.getChild(0).getText());
+        storageFormat.setSerde(unescapeSQLString(child.getChild(0).getText()));
         if (child.getChildCount() == 2) {
           readProps((ASTNode) (child.getChild(1).getChild(0)),
-              shared.serdeProps);
+              storageFormat.getSerdeProps());
         }
         break;
       case HiveParser.TOK_INDEXCOMMENT:
@@ -1079,14 +1078,14 @@ public class DDLSemanticAnalyzer extends
       }
     }
 
-    storageFormat.fillDefaultStorageFormat(shared);
+    storageFormat.fillDefaultStorageFormat();
 
 
     CreateIndexDesc crtIndexDesc = new CreateIndexDesc(tableName, indexName,
-        indexedCols, indexTableName, deferredRebuild, storageFormat.inputFormat,
-        storageFormat.outputFormat,
-        storageFormat.storageHandler, typeName, location, idxProps, tblProps,
-        shared.serde, shared.serdeProps, rowFormatParams.collItemDelim,
+        indexedCols, indexTableName, deferredRebuild, storageFormat.getInputFormat(),
+        storageFormat.getOutputFormat(),
+        storageFormat.getStorageHandler(), typeName, location, idxProps, tblProps,
+        storageFormat.getSerde(), storageFormat.getSerdeProps(), rowFormatParams.collItemDelim,
         rowFormatParams.fieldDelim, rowFormatParams.fieldEscape,
         rowFormatParams.lineDelim, rowFormatParams.mapKeyDelim, indexComment);
     Task<?> createIndex =
@@ -1321,69 +1320,15 @@ public class DDLSemanticAnalyzer extends
       HashMap<String, String> partSpec)
       throws SemanticException {
 
-    String inputFormat = null;
-    String outputFormat = null;
-    String storageHandler = null;
-    String serde = null;
+    StorageFormat format = new StorageFormat(conf);
     ASTNode child = (ASTNode) ast.getChild(0);
 
-    switch (child.getToken().getType()) {
-    case HiveParser.TOK_TABLEFILEFORMAT:
-      inputFormat = unescapeSQLString(((ASTNode) child.getChild(0)).getToken()
-          .getText());
-      outputFormat = unescapeSQLString(((ASTNode) child.getChild(1)).getToken()
-          .getText());
-      serde = unescapeSQLString(((ASTNode) child.getChild(2)).getToken()
-          .getText());
-      try {
-        Class.forName(inputFormat);
-        Class.forName(outputFormat);
-        Class.forName(serde);
-      } catch (ClassNotFoundException e) {
-        throw new SemanticException(e);
-      }
-      break;
-    case HiveParser.TOK_STORAGEHANDLER:
-      storageHandler =
-          unescapeSQLString(((ASTNode) child.getChild(1)).getToken().getText());
-      try {
-        Class.forName(storageHandler);
-      } catch (ClassNotFoundException e) {
-        throw new SemanticException(e);
-      }
-      break;
-    case HiveParser.TOK_TBLSEQUENCEFILE:
-      inputFormat = SEQUENCEFILE_INPUT;
-      outputFormat = SEQUENCEFILE_OUTPUT;
-      serde = org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName();
-      break;
-    case HiveParser.TOK_TBLTEXTFILE:
-      inputFormat = TEXTFILE_INPUT;
-      outputFormat = TEXTFILE_OUTPUT;
-      serde = org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName();
-      break;
-    case HiveParser.TOK_TBLRCFILE:
-      inputFormat = RCFILE_INPUT;
-      outputFormat = RCFILE_OUTPUT;
-      serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE);
-      break;
-    case HiveParser.TOK_TBLORCFILE:
-      inputFormat = ORCFILE_INPUT;
-      outputFormat = ORCFILE_OUTPUT;
-      serde = ORCFILE_SERDE;
-      break;
-    case HiveParser.TOK_TBLPARQUETFILE:
-      inputFormat = PARQUETFILE_INPUT;
-      outputFormat = PARQUETFILE_OUTPUT;
-      serde = PARQUETFILE_SERDE;
-      break;
-    case HiveParser.TOK_FILEFORMAT_GENERIC:
-      handleGenericFileFormat(child);
-      break;
+    if (!format.fillStorageFormat(child)) {
+      throw new AssertionError("Unknown token " + child.getText());
     }
 
-    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, inputFormat,
-        outputFormat, serde, storageHandler, partSpec);
+    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, format.getInputFormat(),
+        format.getOutputFormat(), format.getSerde(), format.getStorageHandler(), partSpec);
 
     addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Mon Jul 14 16:01:11 2014
@@ -151,11 +151,6 @@ KW_KEY_TYPE: '$KEY$';
 KW_LINES: 'LINES';
 KW_STORED: 'STORED';
 KW_FILEFORMAT: 'FILEFORMAT';
-KW_SEQUENCEFILE: 'SEQUENCEFILE';
-KW_TEXTFILE: 'TEXTFILE';
-KW_RCFILE: 'RCFILE';
-KW_ORCFILE: 'ORC';
-KW_PARQUETFILE: 'PARQUET';
 KW_INPUTFORMAT: 'INPUTFORMAT';
 KW_OUTPUTFORMAT: 'OUTPUTFORMAT';
 KW_INPUTDRIVER: 'INPUTDRIVER';

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Mon Jul 14 16:01:11 2014
@@ -183,11 +183,6 @@ TOK_TABLEROWFORMATCOLLITEMS;
 TOK_TABLEROWFORMATMAPKEYS;
 TOK_TABLEROWFORMATLINES;
 TOK_TABLEROWFORMATNULL;
-TOK_TBLORCFILE;
-TOK_TBLPARQUETFILE;
-TOK_TBLSEQUENCEFILE;
-TOK_TBLTEXTFILE;
-TOK_TBLRCFILE;
 TOK_TABLEFILEFORMAT;
 TOK_FILEFORMAT_GENERIC;
 TOK_OFFLINE;
@@ -1267,12 +1262,7 @@ alterStatementSuffixCompact
 fileFormat
 @init { pushMsg("file format specification", state); }
 @after { popMsg(state); }
-    : KW_SEQUENCEFILE  -> ^(TOK_TBLSEQUENCEFILE)
-    | KW_TEXTFILE  -> ^(TOK_TBLTEXTFILE)
-    | KW_RCFILE  -> ^(TOK_TBLRCFILE)
-    | KW_ORCFILE -> ^(TOK_TBLORCFILE)
-    | KW_PARQUETFILE -> ^(TOK_TBLPARQUETFILE)
-    | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral KW_SERDE serdeCls=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+    : KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral KW_SERDE serdeCls=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
       -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls $inDriver? $outDriver?)
     | genericSpec=identifier -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
     ;
@@ -1818,12 +1808,7 @@ tableFileFormat
 @init { pushMsg("table file format specification", state); }
 @after { popMsg(state); }
     :
-      KW_STORED KW_AS KW_SEQUENCEFILE  -> TOK_TBLSEQUENCEFILE
-      | KW_STORED KW_AS KW_TEXTFILE  -> TOK_TBLTEXTFILE
-      | KW_STORED KW_AS KW_RCFILE  -> TOK_TBLRCFILE
-      | KW_STORED KW_AS KW_ORCFILE -> TOK_TBLORCFILE
-      | KW_STORED KW_AS KW_PARQUETFILE -> TOK_TBLPARQUETFILE
-      | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+      KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
       -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
       | KW_STORED KW_BY storageHandler=StringLiteral
          (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Mon Jul 14 16:01:11 2014
@@ -538,5 +538,5 @@ functionIdentifier
 
 nonReserved
     :
-    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE |
  KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION
+    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_AN
 ALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION
     ;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Mon Jul 14 16:01:11 2014
@@ -22,12 +22,11 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -216,4 +215,18 @@ public final class ParseUtils {
       return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
   }
 
+  public static String ensureClassExists(String className)
+      throws SemanticException {
+    if (className == null) {
+      return null;
+    }
+    try {
+      Class.forName(className, true, JavaUtils.getClassLoader());
+    } catch (ClassNotFoundException e) {
+      throw new SemanticException("Cannot find class '" + className + "'", e);
+    }
+    return className;
+  }
+
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Mon Jul 14 16:01:11 2014
@@ -1182,10 +1182,6 @@ public class SemanticAnalyzer extends Ba
     return phase1Result;
   }
 
-  private void getMetaData(QBExpr qbexpr) throws SemanticException {
-    getMetaData(qbexpr, null);
-  }
-
   private void getMetaData(QBExpr qbexpr, ReadEntity parentInput)
       throws SemanticException {
     if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
@@ -1361,8 +1357,7 @@ public class SemanticAnalyzer extends Ba
       }
 
       RowFormatParams rowFormatParams = new RowFormatParams();
-      AnalyzeCreateCommonVars shared = new AnalyzeCreateCommonVars();
-      StorageFormat storageFormat = new StorageFormat();
+      StorageFormat storageFormat = new StorageFormat(conf);
 
       LOG.info("Get metadata for destination tables");
       // Go over all the destination structures and populate the related
@@ -1457,10 +1452,16 @@ public class SemanticAnalyzer extends Ba
           int numCh = ast.getChildCount();
           for (int num = 1; num < numCh ; num++){
             ASTNode child = (ASTNode) ast.getChild(num);
-            if (ast.getChild(num) != null){
+            if (child != null) {
+              if (storageFormat.fillStorageFormat(child)) {
+                localDirectoryDesc.setOutputFormat(storageFormat.getOutputFormat());
+                localDirectoryDesc.setSerName(storageFormat.getSerde());
+                localDirectoryDescIsSet = true;
+                continue;
+              }
               switch (child.getToken().getType()) {
                 case HiveParser.TOK_TABLEROWFORMAT:
-                  rowFormatParams.analyzeRowFormat(shared, child);
+                  rowFormatParams.analyzeRowFormat(child);
                   localDirectoryDesc.setFieldDelim(rowFormatParams.fieldDelim);
                   localDirectoryDesc.setLineDelim(rowFormatParams.lineDelim);
                   localDirectoryDesc.setCollItemDelim(rowFormatParams.collItemDelim);
@@ -1471,18 +1472,8 @@ public class SemanticAnalyzer extends Ba
                   break;
                 case HiveParser.TOK_TABLESERIALIZER:
                   ASTNode serdeChild = (ASTNode) child.getChild(0);
-                  shared.serde = unescapeSQLString(serdeChild.getChild(0).getText());
-                  localDirectoryDesc.setSerName(shared.serde);
-                  localDirectoryDescIsSet=true;
-                  break;
-                case HiveParser.TOK_TBLSEQUENCEFILE:
-                case HiveParser.TOK_TBLTEXTFILE:
-                case HiveParser.TOK_TBLRCFILE:
-                case HiveParser.TOK_TBLORCFILE:
-                case HiveParser.TOK_TABLEFILEFORMAT:
-                  storageFormat.fillStorageFormat(child, shared);
-                  localDirectoryDesc.setOutputFormat(storageFormat.outputFormat);
-                  localDirectoryDesc.setSerName(shared.serde);
+                  storageFormat.setSerde(unescapeSQLString(serdeChild.getChild(0).getText()));
+                  localDirectoryDesc.setSerName(storageFormat.getSerde());
                   localDirectoryDescIsSet=true;
                   break;
               }
@@ -10031,8 +10022,7 @@ public class SemanticAnalyzer extends Ba
     boolean storedAsDirs = false;
 
     RowFormatParams rowFormatParams = new RowFormatParams();
-    StorageFormat storageFormat = new StorageFormat();
-    AnalyzeCreateCommonVars shared = new AnalyzeCreateCommonVars();
+    StorageFormat storageFormat = new StorageFormat(conf);
 
     LOG.info("Creating table " + tableName + " position="
         + ast.getCharPositionInLine());
@@ -10046,7 +10036,7 @@ public class SemanticAnalyzer extends Ba
      */
     for (int num = 1; num < numCh; num++) {
       ASTNode child = (ASTNode) ast.getChild(num);
-      if (storageFormat.fillStorageFormat(child, shared)) {
+      if (storageFormat.fillStorageFormat(child)) {
         continue;
       }
       switch (child.getToken().getType()) {
@@ -10118,7 +10108,7 @@ public class SemanticAnalyzer extends Ba
         }
         break;
       case HiveParser.TOK_TABLEROWFORMAT:
-        rowFormatParams.analyzeRowFormat(shared, child);
+        rowFormatParams.analyzeRowFormat(child);
         break;
       case HiveParser.TOK_TABLELOCATION:
         location = unescapeSQLString(child.getChild(0).getText());
@@ -10130,16 +10120,12 @@ public class SemanticAnalyzer extends Ba
         break;
       case HiveParser.TOK_TABLESERIALIZER:
         child = (ASTNode) child.getChild(0);
-        shared.serde = unescapeSQLString(child.getChild(0).getText());
+        storageFormat.setSerde(unescapeSQLString(child.getChild(0).getText()));
         if (child.getChildCount() == 2) {
           readProps((ASTNode) (child.getChild(1).getChild(0)),
-              shared.serdeProps);
+              storageFormat.getSerdeProps());
         }
         break;
-
-      case HiveParser.TOK_FILEFORMAT_GENERIC:
-        handleGenericFileFormat(child);
-        break;
       case HiveParser.TOK_TABLESKEWED:
         /**
          * Throw an error if the user tries to use the DDL with
@@ -10160,9 +10146,9 @@ public class SemanticAnalyzer extends Ba
       }
     }
 
-    storageFormat.fillDefaultStorageFormat(shared);
+    storageFormat.fillDefaultStorageFormat();
 
-    if ((command_type == CTAS) && (storageFormat.storageHandler != null)) {
+    if ((command_type == CTAS) && (storageFormat.getStorageHandler() != null)) {
       throw new SemanticException(ErrorMsg.CREATE_NON_NATIVE_AS.getMsg());
     }
 
@@ -10174,7 +10160,8 @@ public class SemanticAnalyzer extends Ba
           return null;
         }
       } catch (HiveException e) {
-        e.printStackTrace();
+        // should not occur since second parameter to getTableWithQN is false
+        throw new IllegalStateException("Unxpected Exception thrown: " + e.getMessage(), e);
       }
     }
 
@@ -10215,8 +10202,8 @@ public class SemanticAnalyzer extends Ba
           rowFormatParams.fieldEscape,
           rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
           comment,
-          storageFormat.inputFormat, storageFormat.outputFormat, location, shared.serde,
-          storageFormat.storageHandler, shared.serdeProps, tblProps, ifNotExists, skewedColNames,
+          storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(),
+          storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists, skewedColNames,
           skewedValues);
       crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
       crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
@@ -10240,8 +10227,9 @@ public class SemanticAnalyzer extends Ba
         }
       }
       CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt, isTemporary,
-          storageFormat.inputFormat, storageFormat.outputFormat, location,
-          shared.serde, shared.serdeProps, tblProps, ifNotExists, likeTableName);
+          storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location,
+          storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
+          likeTableName);
       SessionState.get().setCommandType(HiveOperation.CREATETABLE);
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
           crtTblLikeDesc), conf));
@@ -10265,10 +10253,10 @@ public class SemanticAnalyzer extends Ba
           bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
           rowFormatParams.fieldEscape,
           rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
-          comment, storageFormat.inputFormat,
-          storageFormat.outputFormat, location, shared.serde, storageFormat.storageHandler,
-          shared.serdeProps,
-          tblProps, ifNotExists, skewedColNames, skewedValues);
+          comment, storageFormat.getInputFormat(),
+          storageFormat.getOutputFormat(), location, storageFormat.getSerde(),
+          storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
+          skewedColNames, skewedValues);
       crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
       crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
       qb.setTableDesc(crtTblDesc);

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse;
+
+import static org.apache.hadoop.hive.ql.parse.ParseUtils.ensureClassExists;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormatDescriptor;
+import org.apache.hadoop.hive.ql.io.StorageFormatFactory;
+
+public class StorageFormat {
+  private static final StorageFormatFactory storageFormatFactory = new StorageFormatFactory();
+  private final Configuration conf;
+  private String inputFormat;
+  private String outputFormat;
+  private String storageHandler;
+  private String serde;
+  private final Map<String, String> serdeProps;
+
+  public StorageFormat(Configuration conf) {
+    this.conf = conf;
+    this.serdeProps = new HashMap<String, String>();
+  }
+
+  /**
+   * Returns true if the passed token was a storage format token
+   * and thus was processed accordingly.
+   */
+  public boolean fillStorageFormat(ASTNode child) throws SemanticException {
+    switch (child.getToken().getType()) {
+    case HiveParser.TOK_TABLEFILEFORMAT:
+      if (child.getChildCount() < 2) {
+        throw new SemanticException(
+          "Incomplete specification of File Format. " +
+            "You must provide InputFormat, OutputFormat.");
+      }
+      inputFormat = ensureClassExists(BaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
+      outputFormat = ensureClassExists(BaseSemanticAnalyzer.unescapeSQLString(child.getChild(1).getText()));
+      if (child.getChildCount() == 3) {
+        serde = ensureClassExists(BaseSemanticAnalyzer.unescapeSQLString(child.getChild(2).getText()));
+      }
+      break;
+    case HiveParser.TOK_STORAGEHANDLER:
+      storageHandler = ensureClassExists(BaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
+      if (child.getChildCount() == 2) {
+        BaseSemanticAnalyzer.readProps(
+          (ASTNode) (child.getChild(1).getChild(0)),
+          serdeProps);
+      }
+      break;
+    case HiveParser.TOK_FILEFORMAT_GENERIC:
+      ASTNode grandChild = (ASTNode)child.getChild(0);
+      String name = (grandChild == null ? "" : grandChild.getText()).trim().toUpperCase();
+      processStorageFormat(name);
+      break;
+    default:
+      // token was not a storage format token
+      return false;
+    }
+    return true;
+  }
+
+  private void processStorageFormat(String name) throws SemanticException {
+    if (name.isEmpty()) {
+      throw new SemanticException("File format in STORED AS clause cannot be empty");
+    }
+    StorageFormatDescriptor descriptor = storageFormatFactory.get(name);
+    if (descriptor == null) {
+      throw new SemanticException("Unrecognized file format in STORED AS clause:" +
+          " '" + name + "'");
+    }
+    inputFormat = ensureClassExists(descriptor.getInputFormat());
+    outputFormat = ensureClassExists(descriptor.getOutputFormat());
+    if (serde == null) {
+      serde = ensureClassExists(descriptor.getSerde());
+    }
+    if (serde == null) {
+      // RCFile supports a configurable SerDe
+      if (name.equalsIgnoreCase(IOConstants.RCFILE)) {
+        serde = ensureClassExists(HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE));
+      } else {
+        serde = ensureClassExists(HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTSERDE));
+      }
+    }
+  }
+
+  protected void fillDefaultStorageFormat() throws SemanticException {
+    if ((inputFormat == null) && (storageHandler == null)) {
+      String defaultFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT);
+      if (StringUtils.isBlank(defaultFormat)) {
+        inputFormat = IOConstants.TEXTFILE_INPUT;
+        outputFormat = IOConstants.TEXTFILE_OUTPUT;
+      } else {
+        processStorageFormat(defaultFormat);
+        if (defaultFormat.equalsIgnoreCase(IOConstants.RCFILE)) {
+          serde = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE);
+        }
+      }
+    }
+  }
+
+  public void setSerde(String serde) {
+    this.serde = serde;
+  }
+
+  public String getInputFormat() {
+    return inputFormat;
+  }
+
+  public String getOutputFormat() {
+    return outputFormat;
+  }
+
+  public String getStorageHandler() {
+    return storageHandler;
+  }
+
+  public String getSerde() {
+    return serde;
+  }
+
+  public Map<String, String> getSerdeProps() {
+    return serdeProps;
+  }
+}

Added: hive/trunk/ql/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor (added)
+++ hive/trunk/ql/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor Mon Jul 14 16:01:11 2014
@@ -0,0 +1,5 @@
+org.apache.hadoop.hive.ql.io.TextFileStorageFormatDescriptor
+org.apache.hadoop.hive.ql.io.SequenceFileStorageFormatDescriptor
+org.apache.hadoop.hive.ql.io.RCFileStorageFormatDescriptor
+org.apache.hadoop.hive.ql.io.ORCFileStorageFormatDescriptor
+org.apache.hadoop.hive.ql.io.ParquetFileStorageFormatDescriptor

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestStorageFormatDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestStorageFormatDescriptor.java?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestStorageFormatDescriptor.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestStorageFormatDescriptor.java Mon Jul 14 16:01:11 2014
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+import com.google.common.collect.Sets;
+
+public class TestStorageFormatDescriptor {
+
+  @Test
+  public void testNames() {
+    Assert.assertEquals(Sets.newHashSet(IOConstants.TEXTFILE),
+        (new TextFileStorageFormatDescriptor()).getNames());
+    Assert.assertEquals(Sets.newHashSet(IOConstants.SEQUENCEFILE),
+        (new SequenceFileStorageFormatDescriptor()).getNames());
+    Assert.assertEquals(Sets.newHashSet(IOConstants.RCFILE),
+        (new RCFileStorageFormatDescriptor()).getNames());
+    Assert.assertEquals(Sets.newHashSet(IOConstants.ORC, IOConstants.ORCFILE),
+        (new ORCFileStorageFormatDescriptor()).getNames());
+    Assert.assertEquals(Sets.newHashSet(IOConstants.PARQUET, IOConstants.PARQUETFILE),
+        (new ParquetFileStorageFormatDescriptor()).getNames());
+  }
+}

Added: hive/trunk/ql/src/test/queries/clientpositive/storage_format_descriptor.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/storage_format_descriptor.q?rev=1610452&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/storage_format_descriptor.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/storage_format_descriptor.q Mon Jul 14 16:01:11 2014
@@ -0,0 +1,5 @@
+DROP TABLE stored_as_custom_text_serde;
+CREATE TABLE stored_as_custom_text_serde(key string, value string) STORED AS customtextserde;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE stored_as_custom_text_serde;
+SELECT * FROM stored_as_custom_text_serde ORDER BY key, value;
+DROP TABLE stored_as_custom_text_serde;

Modified: hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out Mon Jul 14 16:01:11 2014
@@ -1 +1 @@
-FAILED: SemanticException [Error 10055]: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat
+FAILED: SemanticException Cannot find class 'ClassDoesNotExist'

Modified: hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out Mon Jul 14 16:01:11 2014
@@ -1 +1 @@
-FAILED: SemanticException Unrecognized file format in STORED AS clause: foo
+FAILED: SemanticException Unrecognized file format in STORED AS clause: 'FOO'

Modified: hive/trunk/ql/src/test/results/clientpositive/create_union_table.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/create_union_table.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/create_union_table.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/create_union_table.q.out Mon Jul 14 16:01:11 2014
@@ -14,6 +14,7 @@ STAGE PLANS:
           columns: mydata uniontype<int,double,array<string>,struct<a:int,b:string>>, strct struct<a:int,b:string,c:string>
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: abc
 
 PREHOOK: query: create table abc(mydata uniontype<int,double,array<string>,struct<a:int,b:string>>,

Modified: hive/trunk/ql/src/test/results/clientpositive/ctas.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ctas.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ctas.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/ctas.q.out Mon Jul 14 16:01:11 2014
@@ -95,6 +95,7 @@ STAGE PLANS:
           columns: k string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_CTAS1
 
   Stage: Stage-3
@@ -239,6 +240,7 @@ STAGE PLANS:
           columns: key string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_ctas2
 
   Stage: Stage-3
@@ -594,6 +596,7 @@ STAGE PLANS:
           field delimiter: ,
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_ctas4
 
   Stage: Stage-3
@@ -677,7 +680,8 @@ TOK_CREATETABLE
             ','
          TOK_TABLEROWFORMATLINES
             '\012'
-   TOK_TBLTEXTFILE
+   TOK_FILEFORMAT_GENERIC
+      textfile
    TOK_QUERY
       TOK_FROM
          TOK_TABREF
@@ -891,6 +895,7 @@ STAGE PLANS:
           line delimiter: 
 
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_ctas5
 
   Stage: Stage-3

Modified: hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out Mon Jul 14 16:01:11 2014
@@ -71,6 +71,7 @@ STAGE PLANS:
           columns: _col0 double, _col1 bigint, _c1 double, _c2 bigint
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: summary
 
   Stage: Stage-2
@@ -187,6 +188,7 @@ STAGE PLANS:
           columns: key string, value string, rr int
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: x4
 
   Stage: Stage-2
@@ -351,6 +353,7 @@ STAGE PLANS:
           columns: key string, value string, lead1 string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: x5
 
   Stage: Stage-3
@@ -493,6 +496,7 @@ STAGE PLANS:
           columns: _col0 string, _c1 string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: x6
 
   Stage: Stage-2
@@ -614,6 +618,7 @@ STAGE PLANS:
           columns: _col0 string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: x7
 
   Stage: Stage-2
@@ -1045,6 +1050,7 @@ STAGE PLANS:
           columns: _col0 string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: x8
 
   Stage: Stage-2
@@ -1178,6 +1184,7 @@ STAGE PLANS:
           columns: _c0 string, key string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: x9
 
   Stage: Stage-2

Modified: hive/trunk/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out Mon Jul 14 16:01:11 2014
@@ -74,6 +74,7 @@ STAGE PLANS:
           columns: key string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: table_db1
 
   Stage: Stage-2

Modified: hive/trunk/ql/src/test/results/clientpositive/groupby_duplicate_key.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/groupby_duplicate_key.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/groupby_duplicate_key.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/groupby_duplicate_key.q.out Mon Jul 14 16:01:11 2014
@@ -139,6 +139,7 @@ STAGE PLANS:
           columns: key string, dummy1 string, dummy2 string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: dummy
 
   Stage: Stage-2

Modified: hive/trunk/ql/src/test/results/clientpositive/input15.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input15.q.out?rev=1610452&r1=1610451&r2=1610452&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input15.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/input15.q.out Mon Jul 14 16:01:11 2014
@@ -15,6 +15,7 @@ STAGE PLANS:
           field delimiter: 	
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: TEST15
 
 PREHOOK: query: CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE



Mime
View raw message