hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From khorg...@apache.org
Subject hive git commit: HIVE-8838 : Support Parquet through HCatalog (Adam Szita, reviewed by Sergio Peña, Aihua Xu & Sushanth Sowmyan)
Date Wed, 12 Jul 2017 18:10:58 GMT
Repository: hive
Updated Branches:
  refs/heads/master b6c15bc72 -> fadfcc67c


HIVE-8838 : Support Parquet through HCatalog (Adam Szita, reviewed by Sergio Peña, Aihua
Xu & Sushanth Sowmyan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fadfcc67
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fadfcc67
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fadfcc67

Branch: refs/heads/master
Commit: fadfcc67c98a019a63f36a138b3c333b80d1e074
Parents: b6c15bc
Author: Sushanth Sowmyan <khorgath@gmail.com>
Authored: Wed Jul 12 11:09:25 2017 -0700
Committer: Sushanth Sowmyan <khorgath@gmail.com>
Committed: Wed Jul 12 11:10:55 2017 -0700

----------------------------------------------------------------------
 .../mapreduce/FileRecordWriterContainer.java    |   7 +-
 .../hive/hcatalog/mapreduce/SpecialCases.java   |  27 +++
 .../pig/TestHCatLoaderComplexSchema.java        |   3 -
 .../hcatalog/pig/TestHCatLoaderEncryption.java  |   9 +-
 .../hive/hcatalog/pig/TestHCatStorerMulti.java  |  10 +-
 .../hcatalog/pig/TestParquetHCatLoader.java     |  49 ------
 .../hcatalog/pig/TestParquetHCatStorer.java     | 167 -------------------
 .../io/parquet/MapredParquetOutputFormat.java   |  11 +-
 .../ql/io/parquet/serde/ParquetTableUtils.java  |  22 +++
 .../write/ParquetRecordWriterWrapper.java       |  24 +++
 .../parquet/TestMapredParquetOutputFormat.java  |  10 --
 11 files changed, 89 insertions(+), 250 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
index b2abc5f..d547a2b 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
@@ -27,6 +27,7 @@ import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.parquet.write.ParquetRecordWriterWrapper;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -117,10 +118,10 @@ abstract class FileRecordWriterContainer extends RecordWriterContainer
{
       value.remove(colToDel);
     }
 
-    // The key given by user is ignored
     try {
-      localWriter.write(NullWritable.get(),
-          localSerDe.serialize(value.getAll(), localObjectInspector));
+      // The key given by user is ignored - in case of Parquet we need to supply null
+      Object keyToWrite = localWriter instanceof ParquetRecordWriterWrapper ? null : NullWritable.get();
+      localWriter.write(keyToWrite, localSerDe.serialize(value.getAll(), localObjectInspector));
     } catch (SerDeException e) {
       throw new IOException("Failed to serialize object", e);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
index 60af5c0..6d82ef9 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
@@ -24,6 +24,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
 import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat;
+import org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat;
+import org.apache.hadoop.hive.ql.io.parquet.convert.HiveSchemaConverter;
+import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetTableUtils;
+import org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport;
 import org.apache.orc.OrcConf;
 import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
 import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
@@ -38,6 +42,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
+import com.google.common.collect.Maps;
+
 /**
  * This class is a place to put all the code associated with
  * Special cases. If there is a corner case required to make
@@ -120,6 +126,27 @@ public class SpecialCases {
       }
 
 
+    } else if (ofclass == MapredParquetOutputFormat.class) {
+      //Handle table properties
+      Properties tblProperties = new Properties();
+      Map<String, String> tableProps = jobInfo.getTableInfo().getTable().getParameters();
+      for (String key : tableProps.keySet()) {
+        if (ParquetTableUtils.isParquetProperty(key)) {
+          tblProperties.put(key, tableProps.get(key));
+        }
+      }
+      
+      //Handle table schema
+      List<String> colNames = jobInfo.getOutputSchema().getFieldNames();
+      List<TypeInfo> colTypes = new ArrayList<TypeInfo>();
+      for (HCatFieldSchema field : jobInfo.getOutputSchema().getFields()){
+        colTypes.add(TypeInfoUtils.getTypeInfoFromTypeString(field.getTypeString()));
+      }
+      String parquetSchema = HiveSchemaConverter.convert(colNames, colTypes).toString();
+      jobProperties.put(DataWritableWriteSupport.PARQUET_HIVE_SCHEMA, parquetSchema);
+
+      jobProperties.putAll(Maps.fromProperties(tblProperties));
+
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
index 4c686fe..ea9cdda 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
@@ -77,9 +77,6 @@ public class TestHCatLoaderComplexSchema {
           add("testMapNullKey");
         }});
         put(IOConstants.PARQUETFILE, new HashSet<String>() {{
-          add("testSyntheticComplexSchema");
-          add("testTupleInBagInTupleInBag");
-          add("testMapWithComplexData");
           add("testMapNullKey");
         }});
       }};

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index ad11eab..903578b 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -101,14 +101,7 @@ public class TestHCatLoaderEncryption {
   private static List<HCatRecord> readRecords = new ArrayList<HCatRecord>();
 
   private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
-      new HashMap<String, Set<String>>() {{
-        put(IOConstants.PARQUETFILE, new HashSet<String>() {{
-          add("testReadDataBasic");
-          add("testReadPartitionedBasic");
-          add("testProjectionsBasic");
-          add("testReadDataFromEncryptedHiveTable");
-        }});
-      }};
+      new HashMap<String, Set<String>>();
 
   private String storageFormat;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
index 918332d..40ea923 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
@@ -24,7 +24,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
@@ -33,7 +32,6 @@ import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -67,13 +65,7 @@ public class TestHCatStorerMulti {
   private static Map<Integer, Pair<Integer, String>> basicInputData;
 
   private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
-      new HashMap<String, Set<String>>() {{
-        put(IOConstants.PARQUETFILE, new HashSet<String>() {{
-          add("testStoreBasicTable");
-          add("testStorePartitionedTable");
-          add("testStoreTableMulti");
-        }});
-      }};
+      new HashMap<String, Set<String>>();
 
   private final String storageFormat;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
index 6cd3821..082a6c2 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
@@ -33,53 +33,4 @@ public class TestParquetHCatLoader extends AbstractHCatLoaderTest {
   String getStorageFormat() {
     return IOConstants.PARQUET;
   }
-
-  @Override
-  @Test
-  @Ignore("Temporarily disable until fixed")
-  public void testReadDataBasic() throws IOException {
-    super.testReadDataBasic();
-  }
-
-  @Override
-  @Test
-  @Ignore("Temporarily disable until fixed")
-  public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException {
-    super.testReadPartitionedBasic();
-  }
-
-  @Override
-  @Test
-  @Ignore("Temporarily disable until fixed")
-  public void testProjectionsBasic() throws IOException {
-    super.testProjectionsBasic();
-  }
-
-  /**
-   * Tests the failure case caused by HIVE-10752
-   * @throws Exception
-   */
-  @Override
-  @Test
-  @Ignore("Temporarily disable until fixed")
-  public void testColumnarStorePushdown2() throws Exception {
-    super.testColumnarStorePushdown2();
-  }
-
-  @Override
-  @Test
-  @Ignore("Temporarily disable until fixed")
-  public void testReadMissingPartitionBasicNeg() throws IOException, CommandNeedRetryException
{
-    super.testReadMissingPartitionBasicNeg();
-  }
-
-  /**
-   * Test if we can read a date partitioned table
-   */
-  @Override
-  @Test
-  @Ignore("Temporarily disable until fixed")
-  public void testDatePartitionPushUp() throws Exception {
-    super.testDatePartitionPushUp();
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatStorer.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatStorer.java
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatStorer.java
index 6dfdc04..1f67e21 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatStorer.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatStorer.java
@@ -18,12 +18,8 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import java.io.IOException;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.io.IOConstants;
-import org.junit.Ignore;
-import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,167 +30,4 @@ public class TestParquetHCatStorer extends AbstractHCatStorerTest {
   String getStorageFormat() {
     return IOConstants.PARQUETFILE;
   }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testBagNStruct() throws IOException, CommandNeedRetryException {
-    super.testBagNStruct();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testDateCharTypes() throws Exception {
-    super.testDateCharTypes();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
-    super.testDynamicPartitioningMultiPartColsInDataNoSpec();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException,
-      CommandNeedRetryException {
-    super.testDynamicPartitioningMultiPartColsInDataPartialSpec();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testMultiPartColsInData() throws Exception {
-    super.testMultiPartColsInData();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testPartColsInData() throws IOException, CommandNeedRetryException {
-    super.testPartColsInData();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException
{
-    super.testStoreFuncAllSimpleTypes();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
-    super.testStoreFuncSimple();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testStoreInPartiitonedTbl() throws Exception {
-    super.testStoreInPartiitonedTbl();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
-    super.testStoreMultiTables();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
-    super.testStoreWithNoCtorArgs();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
-    super.testStoreWithNoSchema();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteChar() throws Exception {
-    super.testWriteChar();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteDate() throws Exception {
-    super.testWriteDate();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteDate2() throws Exception {
-    super.testWriteDate2();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteDate3() throws Exception {
-    super.testWriteDate3();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteDecimal() throws Exception {
-    super.testWriteDecimal();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteDecimalX() throws Exception {
-    super.testWriteDecimalX();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteDecimalXY() throws Exception {
-    super.testWriteDecimalXY();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteSmallint() throws Exception {
-    super.testWriteSmallint();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteTimestamp() throws Exception {
-    super.testWriteTimestamp();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteTinyint() throws Exception {
-    super.testWriteTinyint();
-  }
-
-  @Test
-  @Override
-  @Ignore("Temporarily disable until fixed")
-  public void testWriteVarchar() throws Exception {
-    super.testWriteVarchar();
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
index 379a913..dd8247c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
@@ -68,6 +68,15 @@ public class MapredParquetOutputFormat extends FileOutputFormat<NullWritable,
Pa
     realOutputFormat.checkOutputSpecs(ShimLoader.getHadoopShims().getHCatShim().createJobContext(job,
null));
   }
 
+  /**
+   *
+   * @param ignored Unused parameter
+   * @param job JobConf - expecting mandatory parameter PARQUET_HIVE_SCHEMA
+   * @param name Path to write to
+   * @param progress Progress
+   * @return
+   * @throws IOException
+   */
   @Override
   public RecordWriter<NullWritable, ParquetHiveRecord> getRecordWriter(
       final FileSystem ignored,
@@ -75,7 +84,7 @@ public class MapredParquetOutputFormat extends FileOutputFormat<NullWritable,
Pa
       final String name,
       final Progressable progress
       ) throws IOException {
-    throw new RuntimeException("Should never be used");
+    return new ParquetRecordWriterWrapper(realOutputFormat, job, name, progress);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetTableUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetTableUtils.java
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetTableUtils.java
new file mode 100644
index 0000000..cb3b16c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetTableUtils.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.io.parquet.serde;
+
+public class ParquetTableUtils {
+
+    public static boolean isParquetProperty(String key) {
+        return key.startsWith("parquet.");
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
index c021daf..af9393e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
@@ -14,12 +14,15 @@
 package org.apache.hadoop.hive.ql.io.parquet.write;
 
 import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
 import java.util.Properties;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetTableUtils;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
@@ -72,6 +75,27 @@ public class ParquetRecordWriterWrapper implements RecordWriter<NullWritable,
Pa
     }
   }
 
+  public ParquetRecordWriterWrapper(
+          final ParquetOutputFormat<ParquetHiveRecord> realOutputFormat,
+          final JobConf jobConf,
+          final String name,
+          final Progressable progress) throws IOException {
+    this(realOutputFormat, jobConf, name, progress, getParquetProperties(jobConf));
+  }
+
+  private static Properties getParquetProperties(JobConf jobConf) {
+    Properties tblProperties = new Properties();
+    Iterator<Map.Entry<String, String>> it = jobConf.iterator();
+    while (it.hasNext()) {
+      Map.Entry<String, String> entry = it.next();
+      if (ParquetTableUtils.isParquetProperty(entry.getKey())) {
+        tblProperties.put(entry.getKey(), entry.getValue());
+      }
+    }
+    return tblProperties;
+  }
+
+
   private void initializeSerProperties(JobContext job, Properties tableProperties) {
     String blockSize = tableProperties.getProperty(ParquetOutputFormat.BLOCK_SIZE);
     Configuration conf = ContextUtil.getConfiguration(job);

http://git-wip-us.apache.org/repos/asf/hive/blob/fadfcc67/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java
b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java
index ec85b5d..a9086ba 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java
@@ -44,16 +44,6 @@ public class TestMapredParquetOutputFormat {
     new MapredParquetOutputFormat((ParquetOutputFormat<ParquetHiveRecord>) mock(ParquetOutputFormat.class));
   }
 
-  @Test
-  public void testGetRecordWriterThrowsException() {
-    try {
-      new MapredParquetOutputFormat().getRecordWriter(null, null, null, null);
-      fail("should throw runtime exception.");
-    } catch (Exception e) {
-      assertEquals("Should never be used", e.getMessage());
-    }
-  }
-
   @SuppressWarnings("unchecked")
   @Test
   public void testGetHiveRecordWriter() throws IOException {


Mime
View raw message