incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject svn commit: r1307163 - in /incubator/hcatalog/branches/branch-0.4: CHANGES.txt src/java/org/apache/hcatalog/common/HCatUtil.java src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
Date Thu, 29 Mar 2012 23:25:48 GMT
Author: gates
Date: Thu Mar 29 23:25:48 2012
New Revision: 1307163

URL: http://svn.apache.org/viewvc?rev=1307163&view=rev
Log:
HCATALOG-348 HCatUtil::getHiveConf(Configuration) ignores passed-in conf while creating HiveConf

Added:
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
Modified:
    incubator/hcatalog/branches/branch-0.4/CHANGES.txt
    incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java

Modified: incubator/hcatalog/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/CHANGES.txt?rev=1307163&r1=1307162&r2=1307163&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/CHANGES.txt (original)
+++ incubator/hcatalog/branches/branch-0.4/CHANGES.txt Thu Mar 29 23:25:48 2012
@@ -92,6 +92,8 @@ Release 0.4.0 - Unreleased
   OPTIMIZATIONS
 
   BUG FIXES
+  HCAT-348 HCatUtil::getHiveConf(Configuration) ignores passed-in conf while creating HiveConf
(gates)
+
   HCAT-312 Hcatalog rpms change the ownership of /usr/lib64 to hcat user (gkesavan via gates)
 
   HCAT-301 Reading from a table created with upper case table name throws error. (rohini
via toffer)

Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1307163&r1=1307162&r2=1307163&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java
(original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java
Thu Mar 29 23:25:48 2012
@@ -627,7 +627,7 @@ public class HCatUtil {
     public static HiveConf getHiveConf(Configuration conf) 
       throws IOException {
 
-      HiveConf hiveConf = new HiveConf();
+      HiveConf hiveConf = new HiveConf(conf, HCatUtil.class);
 
       //copy the hive conf into the job conf and restore it
       //in the backend context

Added: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java?rev=1307163&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
(added)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
Thu Mar 29 23:25:48 2012
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.mapreduce;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hcatalog.HcatTestUtils;
+import org.apache.hcatalog.MiniCluster;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.pig.ExecType;
+import org.apache.pig.PigServer;
+import org.apache.pig.data.Tuple;
+import org.junit.Test;
+
+public class TestPassProperties {
+  private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
+      "/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
+  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
+  private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
+
+    private static MiniCluster cluster = MiniCluster.buildCluster();
+    private static Driver driver;
+    private static String[] input;
+    private static HiveConf hiveConf;
+    private static final String basicFile = "/tmp/basic.input.data";
+
+    public void Initialize() throws Exception {
+        hiveConf = new HiveConf(this.getClass());
+        hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+        hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+        hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+        hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
+        driver = new Driver(hiveConf);
+        SessionState.start(new CliSessionState(hiveConf));
+
+        new File(TEST_WAREHOUSE_DIR).mkdirs();
+
+        int numRows = 3;
+        input = new String[numRows];
+        for (int i = 0; i < numRows; i++) {
+            String col1 = "a" + i;
+            String col2 = "b" + i;
+            input[i] = i + "," + col1 + "," + col2;
+        }
+        MiniCluster.deleteFile(cluster, basicFile);
+        MiniCluster.createInputFile(cluster, basicFile, input);
+    }
+
+    @Test
+    public void testSequenceTableWriteReadMR() throws Exception{
+        Initialize();
+        String createTable = "CREATE TABLE bad_props_table(a0 int, a1 String, a2 String)
STORED AS SEQUENCEFILE";
+        driver.run("drop table bad_props_table");
+        int retCode1 = driver.run(createTable).getResponseCode();
+        assertTrue(retCode1 == 0);
+
+        boolean caughtException = false;
+        try {
+          Configuration conf = new Configuration();
+          conf.set("hive.metastore.uris", "thrift://no.such.machine:10888");
+          conf.set("hive.metastore.local", "false");
+          Job job = new Job(conf, "Write-hcat-seq-table");
+          job.setJarByClass(TestSequenceFileReadWrite.class);
+  
+          job.setMapperClass(Map.class);
+          job.setOutputKeyClass(NullWritable.class);
+          job.setOutputValueClass(DefaultHCatRecord.class);
+          job.setInputFormatClass(TextInputFormat.class);
+          TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+  
+          HCatOutputFormat.setOutput(job, OutputJobInfo.create(
+                  MetaStoreUtils.DEFAULT_DATABASE_NAME, "bad_props_table", null));
+          job.setOutputFormatClass(HCatOutputFormat.class);
+          HCatOutputFormat.setSchema(job, getSchema());
+          job.setNumReduceTasks(0);
+          assertTrue(job.waitForCompletion(true));
+          new FileOutputCommitterContainer(job, null).cleanupJob(job);
+        } catch (Exception e) {
+            caughtException = true;
+            assertTrue(e.getMessage().contains(
+              "Could not connect to meta store using any of the URIs provided"));
+        }
+        assertTrue(caughtException);
+    }
+    
+    public static class Map extends Mapper<LongWritable, Text, NullWritable, DefaultHCatRecord>{
+
+      public void map(LongWritable key, Text value, Context context) throws IOException,
InterruptedException {
+          String[] cols = value.toString().split(",");
+          DefaultHCatRecord record = new DefaultHCatRecord(3);
+          record.set(0,Integer.parseInt(cols[0]));
+          record.set(1,cols[1]);
+          record.set(2,cols[2]);
+          context.write(NullWritable.get(), record);
+      }
+    }
+
+  private HCatSchema getSchema() throws HCatException {
+      HCatSchema schema = new HCatSchema(new ArrayList<HCatFieldSchema>());
+      schema.append(new HCatFieldSchema("a0", HCatFieldSchema.Type.INT,
+              ""));
+      schema.append(new HCatFieldSchema("a1",
+              HCatFieldSchema.Type.STRING, ""));
+      schema.append(new HCatFieldSchema("a2",
+              HCatFieldSchema.Type.STRING, ""));
+      return schema;
+  }
+
+
+
+}



Mime
View raw message