incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From khorg...@apache.org
Subject svn commit: r1151295 [2/2] - in /incubator/hcatalog/trunk/src: java/org/apache/hcatalog/common/ java/org/apache/hcatalog/mapreduce/ java/org/apache/hcatalog/pig/ java/org/apache/hcatalog/rcfile/ test/org/apache/hcatalog/listener/ test/org/apache/hcatal...
Date Tue, 26 Jul 2011 23:39:28 GMT
Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java?rev=1151295&r1=1151294&r2=1151295&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java Tue Jul 26 23:39:25
2011
@@ -17,10 +17,6 @@
  */
 package org.apache.hcatalog.pig;
 
-import java.io.IOException;
-import java.util.List;
-import java.util.Properties;
-
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -31,7 +27,7 @@ import org.apache.hcatalog.common.HCatUt
 import org.apache.hcatalog.data.Pair;
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hcatalog.mapreduce.HCatTableInfo;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
 import org.apache.pig.Expression;
 import org.apache.pig.Expression.BinaryExpression;
 import org.apache.pig.LoadFunc;
@@ -39,6 +35,10 @@ import org.apache.pig.PigException;
 import org.apache.pig.ResourceSchema;
 import org.apache.pig.impl.util.UDFContext;
 
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
 /**
  * Pig {@link LoadFunc} to read data from HCat
  */
@@ -82,14 +82,12 @@ public class HCatLoader extends HCatBase
     // in the hadoop front end mapred.task.id property will not be set in
     // the Configuration
     if (!HCatUtil.checkJobContextIfRunningFromBackend(job)){
-
-      HCatInputFormat.setInput(job, HCatTableInfo.getInputTableInfo(
-              hcatServerUri!=null ? hcatServerUri :
-                  (hcatServerUri = PigHCatUtil.getHCatServerUri(job)),
-              PigHCatUtil.getHCatServerPrincipal(job),
-              dbName,
-              tableName,
-              getPartitionFilterString()));
+      HCatInputFormat.setInput(job,
+                                            InputJobInfo.create(dbName,
+                                                                         tableName,
+                                                                         getPartitionFilterString(),
+                                                                         hcatServerUri !=
null ? hcatServerUri : (hcatServerUri = PigHCatUtil.getHCatServerUri(job)),
+                                                                         PigHCatUtil.getHCatServerPrincipal(job)));
     }
 
     // Need to also push projections by calling setOutputSchema on

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatStorer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatStorer.java?rev=1151295&r1=1151294&r2=1151295&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatStorer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatStorer.java Tue Jul 26 23:39:25
2011
@@ -18,9 +18,6 @@
 
 package org.apache.hcatalog.pig;
 
-import java.io.IOException;
-import java.util.Properties;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputFormat;
@@ -30,7 +27,7 @@ import org.apache.hcatalog.common.HCatUt
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.mapreduce.HCatOutputCommitter;
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hcatalog.mapreduce.HCatTableInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
 import org.apache.pig.PigException;
 import org.apache.pig.ResourceSchema;
 import org.apache.pig.impl.logicalLayer.FrontendException;
@@ -38,6 +35,9 @@ import org.apache.pig.impl.logicalLayer.
 import org.apache.pig.impl.util.ObjectSerializer;
 import org.apache.pig.impl.util.UDFContext;
 
+import java.io.IOException;
+import java.util.Properties;
+
 /**
  * HCatStorer.
  *
@@ -72,13 +72,20 @@ public class HCatStorer extends HCatBase
     Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign});
 
     String[] userStr = location.split("\\.");
-    HCatTableInfo tblInfo;
+    OutputJobInfo outputJobInfo;
+
     if(userStr.length == 2) {
-      tblInfo = HCatTableInfo.getOutputTableInfo(PigHCatUtil.getHCatServerUri(job),
-          PigHCatUtil.getHCatServerPrincipal(job), userStr[0],userStr[1],partitions);
+      outputJobInfo = OutputJobInfo.create(userStr[0],
+                                                             userStr[1],
+                                                             partitions,
+                                                             PigHCatUtil.getHCatServerUri(job),
+                                                             PigHCatUtil.getHCatServerPrincipal(job));
     } else {
-      tblInfo = HCatTableInfo.getOutputTableInfo(PigHCatUtil.getHCatServerUri(job),
-          PigHCatUtil.getHCatServerPrincipal(job), null,userStr[0],partitions);
+      outputJobInfo = OutputJobInfo.create(null,
+                                                             userStr[0],
+                                                             partitions,
+                                                             PigHCatUtil.getHCatServerUri(job),
+                                                             PigHCatUtil.getHCatServerPrincipal(job));
     }
 
 
@@ -94,7 +101,7 @@ public class HCatStorer extends HCatBase
         throw new FrontendException("Schema for data cannot be determined.", PigHCatUtil.PIG_EXCEPTION_CODE);
       }
       try{
-        HCatOutputFormat.setOutput(job, tblInfo);
+        HCatOutputFormat.setOutput(job, outputJobInfo);
       } catch(HCatException he) {
           // pass the message to the user - essentially something about the table
           // information passed to HCatOutputFormat was not right

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java?rev=1151295&r1=1151294&r2=1151295&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java Tue
Jul 26 23:39:25 2011
@@ -17,16 +17,6 @@
  */
 package org.apache.hcatalog.rcfile;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Map.Entry;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -38,12 +28,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarStruct;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.*;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -56,6 +41,10 @@ import org.apache.hcatalog.data.schema.H
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.mapreduce.HCatInputStorageDriver;
 
+import java.io.IOException;
+import java.util.*;
+import java.util.Map.Entry;
+
 public class RCFileInputDriver extends HCatInputStorageDriver{
 
 

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java?rev=1151295&r1=1151294&r2=1151295&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java
(original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/listener/TestNotificationListener.java
Tue Jul 26 23:39:25 2011
@@ -18,45 +18,25 @@
 
 package org.apache.hcatalog.listener;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import javax.jms.Connection;
-import javax.jms.ConnectionFactory;
-import javax.jms.Destination;
-import javax.jms.JMSException;
-import javax.jms.MapMessage;
-import javax.jms.Message;
-import javax.jms.MessageConsumer;
-import javax.jms.MessageListener;
-import javax.jms.ObjectMessage;
-import javax.jms.Session;
-
+import junit.framework.TestCase;
 import org.apache.activemq.ActiveMQConnectionFactory;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PartitionEventType;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.UnknownDBException;
-import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
-import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hcatalog.common.HCatConstants;
 import org.apache.thrift.TException;
 
-import junit.framework.TestCase;
+import javax.jms.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
 
 public class TestNotificationListener extends TestCase implements MessageListener{
 
@@ -101,7 +81,7 @@ public class TestNotificationListener ex
 		super.tearDown();
 	}
 
-	public void testAMQListener() throws MetaException, TException, UnknownTableException, NoSuchObjectException,

+	public void testAMQListener() throws MetaException, TException, UnknownTableException, NoSuchObjectException,
 	CommandNeedRetryException, UnknownDBException, InvalidPartitionException, UnknownPartitionException{
 		driver.run("create database mydb");
 		driver.run("use mydb");

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java?rev=1151295&r1=1151294&r2=1151295&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
(original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
Tue Jul 26 23:39:25 2011
@@ -263,8 +263,8 @@ public abstract class HCatMapReduceTest 
 
     job.setOutputFormatClass(HCatOutputFormat.class);
 
-    HCatTableInfo outputInfo = HCatTableInfo.getOutputTableInfo(thriftUri, null, dbName,
tableName, partitionValues);
-    HCatOutputFormat.setOutput(job, outputInfo);
+    OutputJobInfo outputJobInfo = OutputJobInfo.create(dbName, tableName, partitionValues,
thriftUri, null);
+    HCatOutputFormat.setOutput(job, outputJobInfo);
 
     job.setMapOutputKeyClass(BytesWritable.class);
     job.setMapOutputValueClass(DefaultHCatRecord.class);
@@ -300,9 +300,8 @@ public abstract class HCatMapReduceTest 
     job.setInputFormatClass(HCatInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);
 
-    HCatTableInfo inputInfo = HCatTableInfo.getInputTableInfo(
-          thriftUri, null, dbName, tableName, filter);
-    HCatInputFormat.setInput(job, inputInfo);
+    InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,filter,thriftUri,null);
+    HCatInputFormat.setInput(job, inputJobInfo);
 
     job.setMapOutputKeyClass(BytesWritable.class);
     job.setMapOutputValueClass(Text.class);
@@ -333,8 +332,8 @@ public abstract class HCatMapReduceTest 
     job.setInputFormatClass(HCatInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);
 
-    HCatTableInfo inputInfo = HCatTableInfo.getInputTableInfo(thriftUri, null, dbName, tableName);
-    HCatInputFormat.setInput(job, inputInfo);
+    InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,null,thriftUri,null);
+    HCatInputFormat.setInput(job, inputJobInfo);
 
     return HCatInputFormat.getTableSchema(job);
   }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java?rev=1151295&r1=1151294&r2=1151295&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
(original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
Tue Jul 26 23:39:25 2011
@@ -41,12 +41,6 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hcatalog.common.HCatConstants;
-import org.apache.hcatalog.mapreduce.HCatOutputCommitter;
-import org.apache.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hcatalog.mapreduce.HCatTableInfo;
-import org.apache.hcatalog.mapreduce.InitializeInput;
-import org.apache.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hcatalog.mapreduce.StorerInfo;
 import org.apache.hcatalog.rcfile.RCFileOutputDriver;
 
 public class TestHCatOutputFormat extends TestCase {
@@ -142,20 +136,19 @@ public class TestHCatOutputFormat extend
     Map<String, String> partitionValues = new HashMap<String, String>();
     partitionValues.put("colname", "p1");
     //null server url means local mode
-    HCatTableInfo info = HCatTableInfo.getOutputTableInfo(null, null, dbName, tblName, partitionValues);
+    OutputJobInfo info = OutputJobInfo.create(dbName, tblName, partitionValues, null, null);
 
     HCatOutputFormat.setOutput(job, info);
     OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(job);
 
     assertNotNull(jobInfo.getTableInfo());
-    assertEquals(1, jobInfo.getTableInfo().getPartitionValues().size());
-    assertEquals("p1", jobInfo.getTableInfo().getPartitionValues().get("colname"));
-    assertEquals(1, jobInfo.getTableSchema().getFields().size());
-    assertEquals("colname", jobInfo.getTableSchema().getFields().get(0).getName());
+    assertEquals(1, jobInfo.getPartitionValues().size());
+    assertEquals("p1", jobInfo.getPartitionValues().get("colname"));
+    assertEquals(1, jobInfo.getTableInfo().getDataColumns().getFields().size());
+    assertEquals("colname", jobInfo.getTableInfo().getDataColumns().getFields().get(0).getName());
 
-    StorerInfo storer = jobInfo.getStorerInfo();
+    StorerInfo storer = jobInfo.getTableInfo().getStorerInfo();
     assertEquals(RCFileOutputDriver.class.getName(), storer.getOutputSDClass());
-
     publishTest(job);
   }
 



Mime
View raw message