incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From macy...@apache.org
Subject svn commit: r1104614 [1/2] - in /incubator/hcatalog/trunk: bin/ src/java/org/apache/hcatalog/cli/SemanticAnalysis/ src/java/org/apache/hcatalog/common/ src/java/org/apache/hcatalog/data/ src/java/org/apache/hcatalog/data/schema/ src/java/org/apache/hca...
Date Tue, 17 May 2011 22:50:51 GMT
Author: macyang
Date: Tue May 17 22:50:49 2011
New Revision: 1104614

URL: http://svn.apache.org/viewvc?rev=1104614&view=rev
Log:
HCAT-15: committed https://issues.apache.org/jira/secure/attachment/12479413/HCAT-15.patch (Jakob via Mac)

Modified:
    incubator/hcatalog/trunk/bin/hcat.sh
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/ErrorType.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatArrayBag.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchema.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximInputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputCommitter.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputStorageDriver.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputStorageDriver.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordWriter.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/JobInfo.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseLoader.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseStorer.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatStorer.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/PigHCatUtil.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/drivers/LoadFuncBasedInputDriver.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/rcfile/RCFileInputDriver.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestPermsGrp.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestUseDatabase.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/pig/TestHCatStorer.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/pig/TestPigStorageDriver.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/rcfile/TestRCFileOutputStorageDriver.java

Modified: incubator/hcatalog/trunk/bin/hcat.sh
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/bin/hcat.sh?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/bin/hcat.sh (original)
+++ incubator/hcatalog/trunk/bin/hcat.sh Tue May 17 22:50:49 2011
@@ -16,11 +16,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-HOWL_DIR=`dirname "$0"`
+HCAT_DIR=`dirname "$0"`
 
-HOWL_JAR_LOC=`find . -name "hcatalog*.jar"`
+HCAT_JAR_LOC=`find . -name "hcatalog*.jar"`
 
-HADOOP_CLASSPATH=$HADOOP_CLASSPATH:${HOWL_JAR_LOC}:../lib/commons-cli-2.0-SNAPSHOT.jar:../build/cli/hive-cli-0.7.0.jar:../ql/lib/antlr-runtime-3.0.1.jar
+HADOOP_CLASSPATH=$HADOOP_CLASSPATH:${HCAT_JAR_LOC}:../lib/commons-cli-2.0-SNAPSHOT.jar:../build/cli/hive-cli-0.7.0.jar:../ql/lib/antlr-runtime-3.0.1.jar
 
 export HADOOP_CLASSPATH=$HADOOP_CLASSPATH
 
@@ -32,7 +32,7 @@ HADOOP_OPTS="$HADOOP_OPTS -Dhive.metasto
 
 export HADOOP_OPTS=$HADOOP_OPTS
 
-exec $HADOOP_HOME/bin/hadoop jar  ${HOWL_JAR_LOC} org.apache.hcatalog.cli.HCatCli "$@"
+exec $HADOOP_HOME/bin/hadoop jar  ${HCAT_JAR_LOC} org.apache.hcatalog.cli.HCatCli "$@"
 
 # Above is the recommended way to launch hcatalog cli. If it doesnt work, you can try the following:
-# java -Dhive.metastore.uris=thrift://localhost:9083 -cp ../lib/commons-logging-1.0.4.jar:../build/hadoopcore/hadoop-0.20.0/hadoop-0.20.0-core.jar:../lib/commons-cli-2.0-SNAPSHOT.jar:../build/cli/hive-cli-0.7.0.jar:../ql/lib/antlr-runtime-3.0.1.jar:$HOWL_JAR org.apache.hcatalog.cli.HCatCli "$@"
+# java -Dhive.metastore.uris=thrift://localhost:9083 -cp ../lib/commons-logging-1.0.4.jar:../build/hadoopcore/hadoop-0.20.0/hadoop-0.20.0-core.jar:../lib/commons-cli-2.0-SNAPSHOT.jar:../build/cli/hive-cli-0.7.0.jar:../ql/lib/antlr-runtime-3.0.1.jar:$HCAT_JAR_LOC org.apache.hcatalog.cli.HCatCli "$@"

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java Tue May 17 22:50:49 2011
@@ -87,16 +87,16 @@ public class AlterTableFileFormatHook ex
       List<Task<? extends Serializable>> rootTasks) throws SemanticException {
 
     Map<String,String> partSpec = ((DDLWork)rootTasks.get(rootTasks.size()-1).getWork()).getAlterTblDesc().getPartSpec();
-    Map<String, String> howlProps = new HashMap<String, String>(2);
-    howlProps.put(HCatConstants.HCAT_ISD_CLASS, inDriver);
-    howlProps.put(HCatConstants.HCAT_OSD_CLASS, outDriver);
+    Map<String, String> hcatProps = new HashMap<String, String>(2);
+    hcatProps.put(HCatConstants.HCAT_ISD_CLASS, inDriver);
+    hcatProps.put(HCatConstants.HCAT_OSD_CLASS, outDriver);
 
     try {
       Hive db = context.getHive();
       Table tbl = db.getTable(tableName);
       if(partSpec == null){
         // File format is for table; not for partition.
-        tbl.getTTable().getParameters().putAll(howlProps);
+        tbl.getTTable().getParameters().putAll(hcatProps);
         db.alterTable(tableName, tbl);
       }else{
         Partition part = db.getPartition(tbl,partSpec,false);
@@ -104,7 +104,7 @@ public class AlterTableFileFormatHook ex
         if(partParams == null){
           partParams = new HashMap<String, String>();
         }
-        partParams.putAll(howlProps);
+        partParams.putAll(hcatProps);
         part.getTPartition().setParameters(partParams);
         db.alterPartition(tableName, part);
       }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java Tue May 17 22:50:49 2011
@@ -100,7 +100,7 @@ final class CreateTableHook  extends Abs
 //            throw new SemanticException(he);
 //          }
 //          if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) && tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){
-//            throw new SemanticException("Operation not supported. Table "+likeTableName+" should have been created through Howl. Seems like its not.");
+//            throw new SemanticException("Operation not supported. Table "+likeTableName+" should have been created through HCat. Seems like its not.");
 //          }
 //          return ast;
         }
@@ -168,7 +168,7 @@ final class CreateTableHook  extends Abs
 
     if(desc == null){
       // Desc will be null if its CREATE TABLE LIKE. Desc will be contained
-      // in CreateTableLikeDesc. Currently, Howl disallows CTLT in pre-hook.
+      // in CreateTableLikeDesc. Currently, HCat disallows CTLT in pre-hook.
       // So, desc can never be null.
       return;
     }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java Tue May 17 22:50:49 2011
@@ -54,7 +54,7 @@ public class HCatSemanticAnalyzer extend
     this.ast = ast;
     switch (ast.getToken().getType()) {
 
-    // Howl wants to intercept following tokens and special-handle them.
+    // HCat wants to intercept following tokens and special-handle them.
     case HiveParser.TOK_CREATETABLE:
       hook = new CreateTableHook();
       return hook.preAnalyze(context, ast);
@@ -63,13 +63,13 @@ public class HCatSemanticAnalyzer extend
       hook = new CreateDatabaseHook();
       return hook.preAnalyze(context, ast);
 
-    // DML commands used in Howl where we use the same implementation as default Hive.
+    // DML commands used in HCat where we use the same implementation as default Hive.
     case HiveParser.TOK_SHOWDATABASES:
     case HiveParser.TOK_DROPDATABASE:
     case HiveParser.TOK_SWITCHDATABASE:
       return ast;
 
-    // Howl will allow these operations to be performed since they are DDL statements.
+    // HCat will allow these operations to be performed since they are DDL statements.
     case HiveParser.TOK_DROPTABLE:
     case HiveParser.TOK_DESCTABLE:
     case HiveParser.TOK_ALTERTABLE_ADDCOLS:

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/ErrorType.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/ErrorType.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/ErrorType.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/ErrorType.java Tue May 17 22:50:49 2011
@@ -18,21 +18,21 @@
 package org.apache.hcatalog.common;
 
 /**
- * Enum type representing the various errors throws by Howl.
+ * Enum type representing the various errors throws by HCat.
  */
 public enum ErrorType {
 
-    /* Howl Input Format related errors 1000 - 1999 */
+    /* HCat Input Format related errors 1000 - 1999 */
     ERROR_DB_INIT                       (1000, "Error initializing database session"),
     ERROR_EXCEED_MAXPART                (1001, "Query result exceeded maximum number of partitions allowed"),
 
 
-    /* Howl Output Format related errors 2000 - 2999 */
+    /* HCat Output Format related errors 2000 - 2999 */
     ERROR_INVALID_TABLE                 (2000, "Table specified does not exist"),
     ERROR_SET_OUTPUT                    (2001, "Error setting output information"),
     ERROR_DUPLICATE_PARTITION           (2002, "Partition already present with given partition key values"),
     ERROR_NON_EMPTY_TABLE               (2003, "Non-partitioned table already contains data"),
-    ERROR_NOT_INITIALIZED               (2004, "HowlOutputFormat not initialized, setOutput has to be called"),
+    ERROR_NOT_INITIALIZED               (2004, "HCatOutputFormat not initialized, setOutput has to be called"),
     ERROR_INIT_STORAGE_DRIVER           (2005, "Error initializing output storage driver instance"),
     ERROR_PUBLISHING_PARTITION          (2006, "Error adding partition to metastore"),
     ERROR_SCHEMA_COLUMN_MISMATCH        (2007, "Invalid column position in partition schema"),
@@ -48,7 +48,7 @@ public enum ErrorType {
 
     /* Miscellaneous errors, range 9000 - 9998 */
     ERROR_UNIMPLEMENTED                 (9000, "Functionality currently unimplemented"),
-    ERROR_INTERNAL_EXCEPTION            (9001, "Exception occurred while processing Howl request");
+    ERROR_INTERNAL_EXCEPTION            (9001, "Exception occurred while processing HCat request");
 
     /** The error code. */
     private int errorCode;

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java Tue May 17 22:50:49 2011
@@ -56,7 +56,7 @@ public final class HCatConstants {
 
   // IMPORTANT IMPORTANT IMPORTANT!!!!!
   //The keys used to store info into the job Configuration.
-  //If any new keys are added, the HowlStorer needs to be updated. The HowlStorer
+  //If any new keys are added, the HCatStorer needs to be updated. The HCatStorer
   //updates the job configuration in the backend to insert these keys to avoid
   //having to call setOutput from the backend (which would cause a metastore call
   //from the map jobs)

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java Tue May 17 22:50:49 2011
@@ -20,7 +20,7 @@ package org.apache.hcatalog.common;
 import java.io.IOException;
 
 /**
- * Class representing exceptions thrown by Howl.
+ * Class representing exceptions thrown by HCat.
  */
 public class HCatException extends IOException {
 
@@ -30,7 +30,7 @@ public class HCatException extends IOExc
   private final ErrorType errorType;
 
   /**
-   * Instantiates a new howl exception.
+   * Instantiates a new hcat exception.
    * @param errorType the error type
    */
   public HCatException(ErrorType errorType) {
@@ -39,7 +39,7 @@ public class HCatException extends IOExc
 
 
   /**
-   * Instantiates a new howl exception.
+   * Instantiates a new hcat exception.
    * @param errorType the error type
    * @param cause the cause
    */
@@ -48,7 +48,7 @@ public class HCatException extends IOExc
   }
 
   /**
-   * Instantiates a new howl exception.
+   * Instantiates a new hcat exception.
    * @param errorType the error type
    * @param extraMessage extra messages to add to the message string
    */
@@ -57,7 +57,7 @@ public class HCatException extends IOExc
   }
 
   /**
-   * Instantiates a new howl exception.
+   * Instantiates a new hcat exception.
    * @param errorType the error type
    * @param extraMessage extra messages to add to the message string
    * @param cause the cause
@@ -74,7 +74,7 @@ public class HCatException extends IOExc
   //TODO : remove default error type constructors after all exceptions
   //are changed to use error types
   /**
-   * Instantiates a new howl exception.
+   * Instantiates a new hcat exception.
    * @param message the error message
    */
   public HCatException(String message) {
@@ -82,7 +82,7 @@ public class HCatException extends IOExc
   }
 
   /**
-   * Instantiates a new howl exception.
+   * Instantiates a new hcat exception.
    * @param message the error message
    * @param cause the cause
    */

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java Tue May 17 22:50:49 2011
@@ -131,12 +131,12 @@ public class HCatUtil {
         return schema;
   }
 
-  public static List<FieldSchema> getFieldSchemaList(List<HCatFieldSchema> howlFields) {
-      if(howlFields == null) {
+  public static List<FieldSchema> getFieldSchemaList(List<HCatFieldSchema> hcatFields) {
+      if(hcatFields == null) {
           return null;
       } else {
           List<FieldSchema> result = new ArrayList<FieldSchema>();
-          for(HCatFieldSchema f: howlFields) {
+          for(HCatFieldSchema f: hcatFields) {
               result.add(HCatSchemaUtils.getFieldSchema(f));
           }
           return result;

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatArrayBag.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatArrayBag.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatArrayBag.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatArrayBag.java Tue May 17 22:50:49 2011
@@ -38,11 +38,11 @@ public class HCatArrayBag<T> implements 
   DataBag convertedBag = null;
 //  List<Tuple> tupleList = null;
 
-  public class HowlArrayBagIterator implements Iterator<Tuple> {
+  public class HCatArrayBagIterator implements Iterator<Tuple> {
 
     Iterator<T> iter = null;
 
-    public HowlArrayBagIterator(List<T> rawItemList) {
+    public HCatArrayBagIterator(List<T> rawItemList) {
       iter = rawItemList.iterator();
     }
 
@@ -123,7 +123,7 @@ public class HCatArrayBag<T> implements 
     if (convertedBag != null){
       return convertedBag.iterator();
     }else{
-      return new HowlArrayBagIterator(rawItemList);
+      return new HCatArrayBagIterator(rawItemList);
     }
   }
 

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java Tue May 17 22:50:49 2011
@@ -27,7 +27,7 @@ import org.apache.hcatalog.data.schema.H
 /**
  * Abstract class exposing get and set semantics for basic record usage.
  * Note :
- *   HowlRecord is designed only to be used as in-memory representation only.
+ *   HCatRecord is designed only to be used as in-memory representation only.
  *   Don't use it to store data on the physical device.
  */
 public abstract class HCatRecord implements HCatRecordable {

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordable.java Tue May 17 22:50:49 2011
@@ -22,7 +22,7 @@ import java.util.List;
 import org.apache.hadoop.io.WritableComparable;
 
 /**
- * Interface that determines whether we can implement a HowlRecord on top of it
+ * Interface that determines whether we can implement a HCatRecord on top of it
  */
 public interface HCatRecordable extends WritableComparable<Object> {
 
@@ -34,7 +34,7 @@ public interface HCatRecordable extends 
   Object get(int fieldNum);
 
   /**
-   * Gets all the fields of the howl record.
+   * Gets all the fields of the hcat record.
    * @return the list of fields
    */
   List<Object> getAll();
@@ -47,7 +47,7 @@ public interface HCatRecordable extends 
   void set(int fieldNum, Object value);
 
   /**
-   * Gets the size of the howl record.
+   * Gets the size of the hcat record.
    * @return the size
    */
   int size();

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchema.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchema.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchema.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchema.java Tue May 17 22:50:49 2011
@@ -110,15 +110,15 @@ public class HCatSchema implements Seria
       return fieldSchemas.size();
     }
 
-    public void remove(final HCatFieldSchema howlFieldSchema) throws HCatException {
+    public void remove(final HCatFieldSchema hcatFieldSchema) throws HCatException {
 
-      if(!fieldSchemas.contains(howlFieldSchema)){
-        throw new HCatException("Attempt to delete a non-existent column from Howl Schema: "+ howlFieldSchema);
+      if(!fieldSchemas.contains(hcatFieldSchema)){
+        throw new HCatException("Attempt to delete a non-existent column from HCat Schema: "+ hcatFieldSchema);
       }
 
-      fieldSchemas.remove(howlFieldSchema);
-      fieldPositionMap.remove(howlFieldSchema);
-      fieldNames.remove(howlFieldSchema.getName());
+      fieldSchemas.remove(hcatFieldSchema);
+      fieldPositionMap.remove(hcatFieldSchema);
+      fieldNames.remove(hcatFieldSchema.getName());
     }
 
     @Override

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java Tue May 17 22:50:49 2011
@@ -209,13 +209,13 @@ public class HCatSchemaUtils {
         return outerSchema.get(0).getStructSubSchema();
     }
 
-    public static FieldSchema getFieldSchema(HCatFieldSchema howlFieldSchema){
-        return new FieldSchema(howlFieldSchema.getName(),howlFieldSchema.getTypeString(),howlFieldSchema.getComment());
+    public static FieldSchema getFieldSchema(HCatFieldSchema hcatFieldSchema){
+        return new FieldSchema(hcatFieldSchema.getName(),hcatFieldSchema.getTypeString(),hcatFieldSchema.getComment());
     }
 
-    public static List<FieldSchema> getFieldSchemas(List<HCatFieldSchema> howlFieldSchemas){
+    public static List<FieldSchema> getFieldSchemas(List<HCatFieldSchema> hcatFieldSchemas){
         List<FieldSchema> lfs = new ArrayList<FieldSchema>();
-        for (HCatFieldSchema hfs : howlFieldSchemas){
+        for (HCatFieldSchema hfs : hcatFieldSchemas){
             lfs.add(getFieldSchema(hfs));
         }
         return lfs;

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java Tue May 17 22:50:49 2011
@@ -37,11 +37,11 @@ import org.apache.hcatalog.data.schema.H
 public abstract class HCatBaseInputFormat extends InputFormat<WritableComparable, HCatRecord> {
   
   /**
-   * get the schema for the HowlRecord data returned by HowlInputFormat.
+   * get the schema for the HCatRecord data returned by HCatInputFormat.
    * 
    * @param job
    *          the job object
-   * @param howlSchema
+   * @param hcatSchema
    *          the schema to use as the consolidated schema
    * @throws IllegalArgumentException
    */
@@ -55,7 +55,7 @@ public abstract class HCatBaseInputForma
   }
   
   /**
-   * Set the schema for the HowlRecord data returned by HowlInputFormat.
+   * Set the schema for the HCatRecord data returned by HCatInputFormat.
    * @param job the job object
    * @param hcatSchema the schema to use as the consolidated schema
    */
@@ -68,7 +68,7 @@ public abstract class HCatBaseInputForma
    * Logically split the set of input files for the job. Returns the
    * underlying InputFormat's splits
    * @param jobContext the job context object
-   * @return the splits, an HowlInputSplit wrapper over the storage
+   * @return the splits, an HCatInputSplit wrapper over the storage
    *         driver InputSplits
    * @throws IOException or InterruptedException
    */
@@ -127,11 +127,11 @@ public abstract class HCatBaseInputForma
   /**
    * Create the RecordReader for the given InputSplit. Returns the underlying
    * RecordReader if the required operations are supported and schema matches
-   * with HowlTable schema. Returns an HowlRecordReader if operations need to
-   * be implemented in Howl.
+   * with HCatTable schema. Returns an HCatRecordReader if operations need to
+   * be implemented in HCat.
    * @param split the split
    * @param taskContext the task attempt context
-   * @return the record reader instance, either an HowlRecordReader(later) or
+   * @return the record reader instance, either an HCatRecordReader(later) or
    *         the underlying storage driver's RecordReader
    * @throws IOException or InterruptedException
    */
@@ -139,12 +139,12 @@ public abstract class HCatBaseInputForma
   public RecordReader<WritableComparable, HCatRecord> createRecordReader(InputSplit split,
       TaskAttemptContext taskContext) throws IOException, InterruptedException {
 
-    HCatSplit howlSplit = (HCatSplit) split;
-    PartInfo partitionInfo = howlSplit.getPartitionInfo();
+    HCatSplit hcatSplit = (HCatSplit) split;
+    PartInfo partitionInfo = hcatSplit.getPartitionInfo();
 
     //If running through a Pig job, the JobInfo will not be available in the
-    //backend process context (since HowlLoader works on a copy of the JobContext and does
-    //not call HowlInputFormat.setInput in the backend process).
+    //backend process context (since HCatLoader works on a copy of the JobContext and does
+    //not call HCatInputFormat.setInput in the backend process).
     //So this function should NOT attempt to read the JobInfo.
 
     HCatInputStorageDriver storageDriver;
@@ -155,26 +155,26 @@ public abstract class HCatBaseInputForma
     }
 
     //Pass all required information to the storage driver
-    initStorageDriver(storageDriver, taskContext, partitionInfo, howlSplit.getTableSchema());
+    initStorageDriver(storageDriver, taskContext, partitionInfo, hcatSplit.getTableSchema());
 
     //Get the input format for the storage driver
     InputFormat inputFormat =
       storageDriver.getInputFormat(partitionInfo.getInputStorageDriverProperties());
 
-    //Create the underlying input formats record record and an Howl wrapper
+    //Create the underlying input formats record record and an HCat wrapper
     RecordReader recordReader =
-      inputFormat.createRecordReader(howlSplit.getBaseSplit(), taskContext);
+      inputFormat.createRecordReader(hcatSplit.getBaseSplit(), taskContext);
 
     return new HCatRecordReader(storageDriver,recordReader);
   }
 
   /**
-   * Gets the HowlTable schema for the table specified in the HowlInputFormat.setInput call
-   * on the specified job context. This information is available only after HowlInputFormat.setInput
+   * Gets the HCatTable schema for the table specified in the HCatInputFormat.setInput call
+   * on the specified job context. This information is available only after HCatInputFormat.setInput
    * has been called for a JobContext.
    * @param context the context
    * @return the table schema
-   * @throws Exception if HowlInputFromat.setInput has not been called for the current context
+   * @throws Exception if HCatInputFromat.setInput has not been called for the current context
    */
   public static HCatSchema getTableSchema(JobContext context) throws Exception {
     JobInfo jobInfo = getJobInfo(context);
@@ -184,7 +184,7 @@ public abstract class HCatBaseInputForma
   /**
    * Gets the JobInfo object by reading the Configuration and deserializing
    * the string. If JobInfo is not present in the configuration, throws an
-   * exception since that means HowlInputFormat.setInput has not been called.
+   * exception since that means HCatInputFormat.setInput has not been called.
    * @param jobContext the job context
    * @return the JobInfo object
    * @throws Exception the exception
@@ -192,7 +192,7 @@ public abstract class HCatBaseInputForma
   private static JobInfo getJobInfo(JobContext jobContext) throws Exception {
     String jobString = jobContext.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO);
     if( jobString == null ) {
-      throw new Exception("job information not found in JobContext. HowlInputFormat.setInput() not called?");
+      throw new Exception("job information not found in JobContext. HCatInputFormat.setInput() not called?");
     }
 
     return (JobInfo) HCatUtil.deserialize(jobString);

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java Tue May 17 22:50:49 2011
@@ -37,11 +37,11 @@ import org.apache.hcatalog.data.schema.H
 public abstract class HCatBaseOutputFormat extends OutputFormat<WritableComparable<?>, HCatRecord> {
 
   /**
-   * Gets the table schema for the table specified in the HowlOutputFormat.setOutput call
+   * Gets the table schema for the table specified in the HCatOutputFormat.setOutput call
    * on the specified job context.
    * @param context the context
    * @return the table schema
-   * @throws IOException if HowlOutputFromat.setOutput has not been called for the passed context
+   * @throws IOException if HCatOutputFromat.setOutput has not been called for the passed context
    */
   public static HCatSchema getTableSchema(JobContext context) throws IOException {
       OutputJobInfo jobInfo = getJobInfo(context);
@@ -76,9 +76,9 @@ public abstract class HCatBaseOutputForm
   }
 
   /**
-   * Gets the HowlOuputJobInfo object by reading the Configuration and deserializing
+   * Gets the HCatOuputJobInfo object by reading the Configuration and deserializing
    * the string. If JobInfo is not present in the configuration, throws an
-   * exception since that means HowlOutputFormat.setOutput has not been called.
+   * exception since that means HCatOutputFormat.setOutput has not been called.
    * @param jobContext the job context
    * @return the OutputJobInfo object
    * @throws IOException the IO exception

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximInputFormat.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximInputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximInputFormat.java Tue May 17 22:50:49 2011
@@ -39,7 +39,7 @@ import org.apache.hcatalog.common.HCatUt
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.data.schema.HCatSchemaUtils;
 
-/** The InputFormat to use to read data from Howl */
+/** The InputFormat to use to read data from HCat */
 public class HCatEximInputFormat extends HCatBaseInputFormat {
 
   /**
@@ -52,7 +52,7 @@ public class HCatEximInputFormat extends
    *          the job object
    * @param inputInfo
    *          the table input info
-   * @return two howl schemas, for the table columns and the partition keys
+   * @return two hcat schemas, for the table columns and the partition keys
    * @throws IOException
    *           the exception in communicating with the metadata server
    */
@@ -92,20 +92,20 @@ public class HCatEximInputFormat extends
         }else{
           throw new IOException("No input storage driver classname found, cannot read partition");
         }
-        Properties howlProperties = new Properties();
+        Properties hcatProperties = new Properties();
         for (String key : parameters.keySet()){
           if (key.startsWith(InitializeInput.HCAT_KEY_PREFIX)){
-            howlProperties.put(key, parameters.get(key));
+            hcatProperties.put(key, parameters.get(key));
           }
         }
-        PartInfo partInfo = new PartInfo(schema, inputStorageDriverClass,  location + "/data", howlProperties);
+        PartInfo partInfo = new PartInfo(schema, inputStorageDriverClass,  location + "/data", hcatProperties);
         partInfoList.add(partInfo);
       }
-      JobInfo howlJobInfo = new JobInfo(inputInfo,
+      JobInfo hcatJobInfo = new JobInfo(inputInfo,
           HCatUtil.getTableSchemaWithPtnCols(table), partInfoList);
       job.getConfiguration().set(
           HCatConstants.HCAT_KEY_JOB_INFO,
-          HCatUtil.serialize(howlJobInfo));
+          HCatUtil.serialize(hcatJobInfo));
       List<HCatSchema> rv = new ArrayList<HCatSchema>(2);
       rv.add(HCatSchemaUtils.getHCatSchema(table.getSd().getCols()));
       rv.add(HCatSchemaUtils.getHCatSchema(partCols));

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputCommitter.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputCommitter.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputCommitter.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputCommitter.java Tue May 17 22:50:49 2011
@@ -53,7 +53,7 @@ public class HCatEximOutputCommitter ext
 
   @Override
   public void cleanupJob(JobContext jobContext) throws IOException {
-    LOG.info("HowlEximOutputCommitter.cleanup invoked; m.o.d : " +
+    LOG.info("HCatEximOutputCommitter.cleanup invoked; m.o.d : " +
         jobContext.getConfiguration().get("mapred.output.dir"));
     if (baseCommitter != null) {
       LOG.info("baseCommitter.class = " + baseCommitter.getClass().getName());

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputFormat.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatEximOutputFormat.java Tue May 17 22:50:49 2011
@@ -54,10 +54,10 @@ import org.apache.hcatalog.rcfile.RCFile
 import org.apache.hcatalog.rcfile.RCFileOutputDriver;
 
 /**
- * The OutputFormat to use to write data to Howl without a howl server. This can then
- * be imported into a howl instance, or used with a HowlEximInputFormat. As in
- * HowlOutputFormat, the key value is ignored and
- * and should be given as null. The value is the HowlRecord to write.
+ * The OutputFormat to use to write data to HCat without a hcat server. This can then
+ * be imported into a hcat instance, or used with a HCatEximInputFormat. As in
+ * HCatOutputFormat, the key value is ignored and
+ * and should be given as null. The value is the HCatRecord to write.
  */
 public class HCatEximOutputFormat extends HCatBaseOutputFormat {
 

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java Tue May 17 22:50:49 2011
@@ -22,7 +22,7 @@ import java.io.IOException;
 
 import org.apache.hadoop.mapreduce.Job;
 
-/** The InputFormat to use to read data from Howl */
+/** The InputFormat to use to read data from HCat */
 public class HCatInputFormat extends HCatBaseInputFormat {
 
   /**

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputStorageDriver.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputStorageDriver.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputStorageDriver.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatInputStorageDriver.java Tue May 17 22:50:49 2011
@@ -34,8 +34,8 @@ import org.apache.hadoop.util.StringUtil
 import org.apache.hcatalog.data.HCatRecord;
 import org.apache.hcatalog.data.schema.HCatSchema;
 
-/** The abstract class to be implemented by underlying storage drivers to enable data access from Howl through
- *  HowlInputFormat.
+/** The abstract class to be implemented by underlying storage drivers to enable data access from HCat through
+ *  HCatInputFormat.
  */
 public abstract class HCatInputStorageDriver {
 
@@ -48,15 +48,15 @@ public abstract class HCatInputStorageDr
    * @param properties the properties containing parameters required for initialization of InputFormat
    * @return the InputFormat instance
    */
-  public abstract InputFormat<? extends WritableComparable, ? extends Writable> getInputFormat(Properties howlProperties);
+  public abstract InputFormat<? extends WritableComparable, ? extends Writable> getInputFormat(Properties hcatProperties);
 
 
   /**
-   * Converts to HowlRecord format usable by HowlInputFormat to convert to required valuetype.
+   * Converts to HCatRecord format usable by HCatInputFormat to convert to required valuetype.
    * Implementers of StorageDriver should look to overwriting this function so as to convert their
-   * value type to HowlRecord. Default implementation is provided for StorageDriver implementations
-   * on top of an underlying InputFormat that already uses HowlRecord as a tuple
-   * @param value the underlying value to convert to HowlRecord
+   * value type to HCatRecord. Default implementation is provided for StorageDriver implementations
+   * on top of an underlying InputFormat that already uses HCatRecord as a tuple
+   * @param value the underlying value to convert to HCatRecord
    */
   public abstract HCatRecord convertToHCatRecord(WritableComparable baseKey, Writable baseValue) throws IOException;
 
@@ -126,29 +126,29 @@ public abstract class HCatInputStorageDr
   }
 
   /**
-   * Set the schema of the data as originally published in Howl. The storage driver might validate that this matches with
-   * the schema it has (like Zebra) or it will use this to create a HowlRecord matching the output schema.
+   * Set the schema of the data as originally published in HCat. The storage driver might validate that this matches with
+   * the schema it has (like Zebra) or it will use this to create a HCatRecord matching the output schema.
    * @param jobContext the job context object
-   * @param howlSchema the schema published in Howl for this data
+   * @param hcatSchema the schema published in HCat for this data
    * @param instantiationState
    * @throws IOException Signals that an I/O exception has occurred.
    */
-  public abstract void setOriginalSchema(JobContext jobContext, HCatSchema howlSchema) throws IOException;
+  public abstract void setOriginalSchema(JobContext jobContext, HCatSchema hcatSchema) throws IOException;
 
   /**
-   * Set the consolidated schema for the HowlRecord data returned by the storage driver. All tuples returned by the RecordReader should
+   * Set the consolidated schema for the HCatRecord data returned by the storage driver. All tuples returned by the RecordReader should
    * have this schema. Nulls should be inserted for columns not present in the data.
    * @param jobContext the job context object
-   * @param howlSchema the schema to use as the consolidated schema
+   * @param hcatSchema the schema to use as the consolidated schema
    * @throws IOException Signals that an I/O exception has occurred.
    */
-  public abstract void setOutputSchema(JobContext jobContext, HCatSchema howlSchema) throws IOException;
+  public abstract void setOutputSchema(JobContext jobContext, HCatSchema hcatSchema) throws IOException;
 
   /**
    * Sets the partition key values for the current partition. The storage driver is passed this so that the storage
-   * driver can add the partition key values to the output HowlRecord if the partition key values are not present on disk.
+   * driver can add the partition key values to the output HCatRecord if the partition key values are not present on disk.
    * @param jobContext the job context object
-   * @param partitionValues the partition values having a map with partition key name as key and the HowlKeyValue as value
+   * @param partitionValues the partition values having a map with partition key name as key and the HCatKeyValue as value
    * @param instantiationState
    * @throws IOException Signals that an I/O exception has occurred.
    */

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java Tue May 17 22:50:49 2011
@@ -62,8 +62,8 @@ import org.apache.hcatalog.data.HCatReco
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.thrift.TException;
 
-/** The OutputFormat to use to write data to Howl. The key value is ignored and
- * and should be given as null. The value is the HowlRecord to write.*/
+/** The OutputFormat to use to write data to HCat. The key value is ignored and
+ * and should be given as null. The value is the HCatRecord to write.*/
 public class HCatOutputFormat extends HCatBaseOutputFormat {
 
     /** The directory under which data is initially written for a non partitioned table */
@@ -147,13 +147,13 @@ public class HCatOutputFormat extends HC
 
         if(UserGroupInformation.isSecurityEnabled()){
           UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-          // check if oozie has set up a howl deleg. token - if so use it
+          // check if oozie has set up a hcat deleg. token - if so use it
           TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
           // TODO: will oozie use a "service" called "oozie" - then instead of
           // new Text() do new Text("oozie") below - if this change is made also
           // remember to do:
           //  job.getConfiguration().set(HCAT_KEY_TOKEN_SIGNATURE, "oozie");
-          // Also change code in HowlOutputCommitter.cleanupJob() to cancel the
+          // Also change code in HCatOutputCommitter.cleanupJob() to cancel the
           // token only if token.service is not "oozie" - remove the condition of
           // HCAT_KEY_TOKEN_SIGNATURE != null in that code.
           Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
@@ -165,9 +165,9 @@ public class HCatOutputFormat extends HC
           } else {
 
             // we did not get token set up by oozie, let's get them ourselves here.
-            // we essentially get a token per unique Output HowlTableInfo - this is
+            // we essentially get a token per unique Output HCatTableInfo - this is
             // done because through Pig, setOutput() method is called multiple times
-            // We want to only get the token once per unique output HowlTableInfo -
+            // We want to only get the token once per unique output HCatTableInfo -
             // we cannot just get one token since in multi-query case (> 1 store in 1 job)
             // or the case when a single pig script results in > 1 jobs, the single
             // token will get cancelled by the output committer and the subsequent
@@ -178,9 +178,9 @@ public class HCatOutputFormat extends HC
             // cancel.
             String tokenSignature = getTokenSignature(outputInfo);
             if(tokenMap.get(tokenSignature) == null) {
-              // get delegation tokens from howl server and store them into the "job"
-              // These will be used in the HowlOutputCommitter to publish partitions to
-              // howl
+              // get delegation tokens from hcat server and store them into the "job"
+              // These will be used in the HCatOutputCommitter to publish partitions to
+              // hcat
               // when the JobTracker in Hadoop MapReduce starts supporting renewal of 
               // arbitrary tokens, the renewer should be the principal of the JobTracker
               String tokenStrForm = client.getDelegationToken(ugi.getUserName());
@@ -211,7 +211,7 @@ public class HCatOutputFormat extends HC
     }
 
 
-    // a signature string to associate with a HowlTableInfo - essentially
+    // a signature string to associate with a HCatTableInfo - essentially
     // a concatenation of dbname, tablename and partition keyvalues.
     private static String getTokenSignature(HCatTableInfo outputInfo) {
       StringBuilder result = new StringBuilder("");
@@ -312,7 +312,7 @@ public class HCatOutputFormat extends HC
       try{
         fs.setOwner(workFile, null, tblPathStat.getGroup());
       } catch(AccessControlException ace){
-        // log the messages before ignoring. Currently, logging is not built in Howl.
+        // log the messages before ignoring. Currently, logging is not built in HCat.
       }
       return rw;
     }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputStorageDriver.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputStorageDriver.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputStorageDriver.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputStorageDriver.java Tue May 17 22:50:49 2011
@@ -35,18 +35,18 @@ import org.apache.hcatalog.data.HCatReco
 import org.apache.hcatalog.data.schema.HCatSchema;
 
 
-/** The abstract class to be implemented by underlying storage drivers to enable data access from Howl through
- *  HowlOutputFormat.
+/** The abstract class to be implemented by underlying storage drivers to enable data access from HCat through
+ *  HCatOutputFormat.
  */
 public abstract class HCatOutputStorageDriver {
 
   /**
    * Initialize the storage driver with specified properties, default implementation does nothing.
    * @param context the job context object
-   * @param howlProperties the properties for the storage driver
+   * @param hcatProperties the properties for the storage driver
    * @throws IOException Signals that an I/O exception has occurred.
    */
-    public void initialize(JobContext context, Properties howlProperties) throws IOException {
+    public void initialize(JobContext context, Properties hcatProperties) throws IOException {
     }
 
     /**
@@ -81,17 +81,17 @@ public abstract class HCatOutputStorageD
     public abstract void setPartitionValues(JobContext jobContext, Map<String, String> partitionValues) throws IOException;
 
     /**
-     * Generate the key for the underlying outputformat. The value given to HowlOutputFormat is passed as the
-     * argument. The key given to HowlOutputFormat is ignored..
-     * @param value the value given to HowlOutputFormat
+     * Generate the key for the underlying outputformat. The value given to HCatOutputFormat is passed as the
+     * argument. The key given to HCatOutputFormat is ignored..
+     * @param value the value given to HCatOutputFormat
      * @return a key instance
      * @throws IOException Signals that an I/O exception has occurred.
      */
     public abstract WritableComparable<?> generateKey(HCatRecord value) throws IOException;
 
     /**
-     * Convert the given HowlRecord value to the actual value type.
-     * @param value the HowlRecord value to convert
+     * Convert the given HCatRecord value to the actual value type.
+     * @param value the HCatRecord value to convert
      * @return a value instance
      * @throws IOException Signals that an I/O exception has occurred.
      */

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java Tue May 17 22:50:49 2011
@@ -26,8 +26,8 @@ import org.apache.hadoop.mapreduce.Recor
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hcatalog.data.HCatRecord;
 
-/** The Howl wrapper for the underlying RecordReader, this ensures that the initialize on
- * the underlying record reader is done with the underlying split, not with HowlSplit.
+/** The HCat wrapper for the underlying RecordReader, this ensures that the initialize on
+ * the underlying record reader is done with the underlying split, not with HCatSplit.
  */
 class HCatRecordReader extends RecordReader<WritableComparable, HCatRecord> {
 
@@ -38,7 +38,7 @@ class HCatRecordReader extends RecordRea
     private final HCatInputStorageDriver storageDriver;
 
     /**
-     * Instantiates a new howl record reader.
+     * Instantiates a new hcat record reader.
      * @param baseRecordReader the base record reader
      */
     public HCatRecordReader(HCatInputStorageDriver storageDriver, RecordReader<? extends WritableComparable, ? extends Writable> baseRecordReader) {

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordWriter.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordWriter.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordWriter.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatRecordWriter.java Tue May 17 22:50:49 2011
@@ -49,7 +49,7 @@ public class HCatRecordWriter extends Re
 
       if(partColsToDel == null){
         throw new HCatException("It seems that setSchema() is not called on " +
-        		"HowlOutputFormat. Please make sure that method is called.");
+        		"HCatOutputFormat. Please make sure that method is called.");
       }
 
       this.storageDriver = HCatOutputFormat.getOutputDriverInstance(context, jobInfo);

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java Tue May 17 22:50:49 2011
@@ -28,7 +28,7 @@ import org.apache.hadoop.mapreduce.Input
 import org.apache.hcatalog.common.HCatUtil;
 import org.apache.hcatalog.data.schema.HCatSchema;
 
-/** The HowlSplit wrapper around the InputSplit returned by the underlying InputFormat */
+/** The HCatSplit wrapper around the InputSplit returned by the underlying InputFormat */
 class HCatSplit extends InputSplit implements Writable {
 
     /** The partition info for the split. */
@@ -37,16 +37,16 @@ class HCatSplit extends InputSplit imple
     /** The split returned by the underlying InputFormat split. */
     private InputSplit baseSplit;
 
-    /** The schema for the HowlTable */
+    /** The schema for the HCatTable */
     private HCatSchema tableSchema;
     /**
-     * Instantiates a new howl split.
+     * Instantiates a new hcat split.
      */
     public HCatSplit() {
     }
 
     /**
-     * Instantiates a new howl split.
+     * Instantiates a new hcat split.
      *
      * @param partitionInfo the partition info
      * @param baseSplit the base split

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java Tue May 17 22:50:49 2011
@@ -25,8 +25,8 @@ import org.apache.hadoop.hive.metastore.
 
 /**
  *
- * HCatTableInfo - class to communicate table information to {@link HowlInputFormat}
- * and {@link HowlOutputFormat}
+ * HCatTableInfo - class to communicate table information to {@link HCatInputFormat}
+ * and {@link HCatOutputFormat}
  *
  */
 public class HCatTableInfo implements Serializable {
@@ -44,9 +44,9 @@ public class HCatTableInfo implements Se
   /** The Metadata server uri */
   private final String serverUri;
 
-  /** If the howl server is configured to work with hadoop security, this
+  /** If the hcat server is configured to work with hadoop security, this
    * variable will hold the principal name of the server - this will be used
-   * in the authentication to the howl server using kerberos
+   * in the authentication to the hcat server using kerberos
    */
   private final String serverKerberosPrincipal;
 
@@ -67,13 +67,13 @@ public class HCatTableInfo implements Se
   private Map<String, String> partitionValues;
 
   /**
-   * Initializes a new HCatTableInfo instance to be used with {@link HowlInputFormat}
+   * Initializes a new HCatTableInfo instance to be used with {@link HCatInputFormat}
    * for reading data from a table.
    * @param serverUri the Metadata server uri
-   * @param serverKerberosPrincipal If the howl server is configured to
+   * @param serverKerberosPrincipal If the hcat server is configured to
    * work with hadoop security, the kerberos principal name of the server - else null
    * The principal name should be of the form:
-   * <servicename>/_HOST@<realm> like "howl/_HOST@myrealm.com"
+   * <servicename>/_HOST@<realm> like "hcat/_HOST@myrealm.com"
    * The special string _HOST will be replaced automatically with the correct host name
    * @param dbName the db name
    * @param tableName the table name
@@ -86,13 +86,13 @@ public class HCatTableInfo implements Se
   }
 
   /**
-   * Initializes a new HCatTableInfo instance to be used with {@link HowlInputFormat}
+   * Initializes a new HCatTableInfo instance to be used with {@link HCatInputFormat}
    * for reading data from a table.
    * @param serverUri the Metadata server uri
-   * @param serverKerberosPrincipal If the howl server is configured to
+   * @param serverKerberosPrincipal If the hcat server is configured to
    * work with hadoop security, the kerberos principal name of the server - else null
    * The principal name should be of the form:
-   * <servicename>/_HOST@<realm> like "howl/_HOST@myrealm.com"
+   * <servicename>/_HOST@<realm> like "hcat/_HOST@myrealm.com"
    * The special string _HOST will be replaced automatically with the correct host name
    * @param dbName the db name
    * @param tableName the table name
@@ -115,13 +115,13 @@ public class HCatTableInfo implements Se
       this.filter = filter;
   }
   /**
-   * Initializes a new HCatTableInfo instance to be used with {@link HowlOutputFormat}
+   * Initializes a new HCatTableInfo instance to be used with {@link HCatOutputFormat}
    * for writing data from a table.
    * @param serverUri the Metadata server uri
-   * @param serverKerberosPrincipal If the howl server is configured to
+   * @param serverKerberosPrincipal If the hcat server is configured to
    * work with hadoop security, the kerberos principal name of the server - else null
    * The principal name should be of the form:
-   * <servicename>/_HOST@<realm> like "howl/_HOST@myrealm.com"
+   * <servicename>/_HOST@<realm> like "hcat/_HOST@myrealm.com"
    * The special string _HOST will be replaced automatically with the correct host name
    * @param dbName the db name
    * @param tableName the table name

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java Tue May 17 22:50:49 2011
@@ -68,7 +68,7 @@ public class InitializeInput {
    * Set the input to use for the Job. This queries the metadata server with the specified partition predicates,
    * gets the matching partitions, puts the information in the configuration object.
    * @param job the job object
-   * @param inputInfo the howl table input info
+   * @param inputInfo the hcat table input info
    * @throws Exception
    */
   public static void setInput(Job job, HCatTableInfo inputInfo) throws Exception {
@@ -111,12 +111,12 @@ public class InitializeInput {
         partInfoList.add(partInfo);
       }
 
-      JobInfo howlJobInfo = new JobInfo(inputInfo, tableSchema, partInfoList);
-      inputInfo.setJobInfo(howlJobInfo);
+      JobInfo hcatJobInfo = new JobInfo(inputInfo, tableSchema, partInfoList);
+      inputInfo.setJobInfo(hcatJobInfo);
 
       job.getConfiguration().set(
           HCatConstants.HCAT_KEY_JOB_INFO,
-          HCatUtil.serialize(howlJobInfo)
+          HCatUtil.serialize(hcatJobInfo)
       );
     } finally {
       if (client != null ) {
@@ -149,7 +149,7 @@ public class InitializeInput {
   static PartInfo extractPartInfo(StorageDescriptor sd, Map<String,String> parameters) throws IOException{
     HCatSchema schema = HCatUtil.extractSchemaFromStorageDescriptor(sd);
     String inputStorageDriverClass = null;
-    Properties howlProperties = new Properties();
+    Properties hcatProperties = new Properties();
     if (parameters.containsKey(HCatConstants.HCAT_ISD_CLASS)){
       inputStorageDriverClass = parameters.get(HCatConstants.HCAT_ISD_CLASS);
     }else{
@@ -162,10 +162,10 @@ public class InitializeInput {
     }
     for (String key : parameters.keySet()){
       if (key.startsWith(HCAT_KEY_PREFIX)){
-        howlProperties.put(key, parameters.get(key));
+        hcatProperties.put(key, parameters.get(key));
       }
     }
-    return new PartInfo(schema,inputStorageDriverClass,  sd.getLocation(), howlProperties);
+    return new PartInfo(schema,inputStorageDriverClass,  sd.getLocation(), hcatProperties);
   }
 
 
@@ -195,14 +195,14 @@ public class InitializeInput {
       }
     }
 
-    Properties howlProperties = new Properties();
+    Properties hcatProperties = new Properties();
     for (String key : properties.keySet()){
       if (key.startsWith(HCAT_KEY_PREFIX)){
-        howlProperties.put(key, properties.get(key));
+        hcatProperties.put(key, properties.get(key));
       }
     }
 
-    return new StorerInfo(inputSDClass, outputSDClass, howlProperties);
+    return new StorerInfo(inputSDClass, outputSDClass, hcatProperties);
   }
 
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/JobInfo.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/JobInfo.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/JobInfo.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/JobInfo.java Tue May 17 22:50:49 2011
@@ -39,15 +39,15 @@ public class JobInfo implements Serializ
     private final List<PartInfo> partitions;
 
     /**
-     * Instantiates a new howl job info.
+     * Instantiates a new hcat job info.
      * @param tableName the table name
      * @param tableSchema the table schema
      * @param partitions the partitions
      */
-    public JobInfo(HCatTableInfo howlTableInfo, HCatSchema tableSchema,
+    public JobInfo(HCatTableInfo hcatTableInfo, HCatSchema tableSchema,
             List<PartInfo> partitions) {
-        this.tableName = howlTableInfo.getTableName();
-        this.dbName = howlTableInfo.getDatabaseName();
+        this.tableName = hcatTableInfo.getTableName();
+        this.dbName = hcatTableInfo.getDatabaseName();
         this.tableSchema = tableSchema;
         this.partitions = partitions;
     }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java Tue May 17 22:50:49 2011
@@ -35,8 +35,8 @@ public class PartInfo implements Seriali
   /** The information about which input storage driver to use */
   private final String inputStorageDriverClass;
 
-  /** Howl-specific properties set at the partition */
-  private final Properties howlProperties;
+  /** HCat-specific properties set at the partition */
+  private final Properties hcatProperties;
 
   /** The data location. */
   private final String location;
@@ -45,17 +45,17 @@ public class PartInfo implements Seriali
   private Map<String,String> partitionValues;
 
   /**
-   * Instantiates a new howl partition info.
+   * Instantiates a new hcat partition info.
    * @param partitionSchema the partition schema
    * @param inputStorageDriverClass the input storage driver class name
    * @param location the location
-   * @param howlProperties howl-specific properties at the partition
+   * @param hcatProperties hcat-specific properties at the partition
    */
-  public PartInfo(HCatSchema partitionSchema, String inputStorageDriverClass, String location, Properties howlProperties){
+  public PartInfo(HCatSchema partitionSchema, String inputStorageDriverClass, String location, Properties hcatProperties){
     this.partitionSchema = partitionSchema;
     this.inputStorageDriverClass = inputStorageDriverClass;
     this.location = location;
-    this.howlProperties = howlProperties;
+    this.hcatProperties = hcatProperties;
   }
 
   /**
@@ -77,11 +77,11 @@ public class PartInfo implements Seriali
 
 
   /**
-   * Gets the value of howlProperties.
-   * @return the howlProperties
+   * Gets the value of hcatProperties.
+   * @return the hcatProperties
    */
   public Properties getInputStorageDriverProperties() {
-    return howlProperties;
+    return hcatProperties;
   }
 
   /**

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseLoader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseLoader.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseLoader.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseLoader.java Tue May 17 22:50:49 2011
@@ -95,7 +95,7 @@ public abstract class HCatBaseLoader ext
     // can retrieve it later.
     storeInUDFContext(signature, PRUNE_PROJECTION_INFO, requiredFieldsInfo);
 
-    // Howl will always prune columns based on what we ask of it - so the
+    // HCat will always prune columns based on what we ask of it - so the
     // response is true
     return new RequiredFieldResponse(true);
   }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseStorer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseStorer.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseStorer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatBaseStorer.java Tue May 17 22:50:49 2011
@@ -62,12 +62,12 @@ public abstract class HCatBaseStorer ext
   /**
    *
    */
-  protected static final String COMPUTED_OUTPUT_SCHEMA = "howl.output.schema";
+  protected static final String COMPUTED_OUTPUT_SCHEMA = "hcat.output.schema";
   protected final Map<String,String> partitions;
   protected Schema pigSchema;
   private RecordWriter<WritableComparable<?>, HCatRecord> writer;
   protected HCatSchema computedSchema;
-  protected static final String PIG_SCHEMA = "howl.pig.store.schema";
+  protected static final String PIG_SCHEMA = "hcat.pig.store.schema";
   protected String sign;
 
   public HCatBaseStorer(String partSpecs, String schema) throws ParseException, FrontendException {
@@ -101,7 +101,7 @@ public abstract class HCatBaseStorer ext
     if(pigSchema != null){
       if(! Schema.equals(runtimeSchema, pigSchema, false, true) ){
         throw new FrontendException("Schema provided in store statement doesn't match with the Schema" +
-            "returned by Pig run-time. Schema provided in HowlStorer: "+pigSchema.toString()+ " Schema received from Pig runtime: "+runtimeSchema.toString(), PigHCatUtil.PIG_EXCEPTION_CODE);
+            "returned by Pig run-time. Schema provided in HCatStorer: "+pigSchema.toString()+ " Schema received from Pig runtime: "+runtimeSchema.toString(), PigHCatUtil.PIG_EXCEPTION_CODE);
       }
     } else {
       pigSchema = runtimeSchema;
@@ -117,20 +117,20 @@ public abstract class HCatBaseStorer ext
     List<HCatFieldSchema> fieldSchemas = new ArrayList<HCatFieldSchema>(pigSchema.size());
     for(FieldSchema fSchema : pigSchema.getFields()){
       byte type = fSchema.type;
-      HCatFieldSchema howlFSchema;
+      HCatFieldSchema hcatFSchema;
 
       try {
 
         // Find out if we need to throw away the tuple or not.
         if(type == DataType.BAG && removeTupleFromBag(tableSchema, fSchema)){
           List<HCatFieldSchema> arrFields = new ArrayList<HCatFieldSchema>(1);
-          arrFields.add(getHowlFSFromPigFS(fSchema.schema.getField(0).schema.getField(0), tableSchema));
-          howlFSchema = new HCatFieldSchema(fSchema.alias, Type.ARRAY, new HCatSchema(arrFields), null);
+          arrFields.add(getHCatFSFromPigFS(fSchema.schema.getField(0).schema.getField(0), tableSchema));
+          hcatFSchema = new HCatFieldSchema(fSchema.alias, Type.ARRAY, new HCatSchema(arrFields), null);
       }
       else{
-          howlFSchema = getHowlFSFromPigFS(fSchema, tableSchema);
+          hcatFSchema = getHCatFSFromPigFS(fSchema, tableSchema);
       }
-      fieldSchemas.add(howlFSchema);
+      fieldSchemas.add(hcatFSchema);
       } catch (HCatException he){
           throw new FrontendException(he.getMessage(),PigHCatUtil.PIG_EXCEPTION_CODE,he);
       }
@@ -163,7 +163,7 @@ public abstract class HCatBaseStorer ext
   }
 
 
-  private HCatFieldSchema getHowlFSFromPigFS(FieldSchema fSchema, HCatSchema hcatTblSchema) throws FrontendException, HCatException{
+  private HCatFieldSchema getHCatFSFromPigFS(FieldSchema fSchema, HCatSchema hcatTblSchema) throws FrontendException, HCatException{
 
     byte type = fSchema.type;
     switch(type){
@@ -187,17 +187,17 @@ public abstract class HCatBaseStorer ext
     case DataType.BAG:
       Schema bagSchema = fSchema.schema;
       List<HCatFieldSchema> arrFields = new ArrayList<HCatFieldSchema>(1);
-      arrFields.add(getHowlFSFromPigFS(bagSchema.getField(0), hcatTblSchema));
+      arrFields.add(getHCatFSFromPigFS(bagSchema.getField(0), hcatTblSchema));
       return new HCatFieldSchema(fSchema.alias, Type.ARRAY, new HCatSchema(arrFields), "");
 
     case DataType.TUPLE:
       List<String> fieldNames = new ArrayList<String>();
-      List<HCatFieldSchema> howlFSs = new ArrayList<HCatFieldSchema>();
+      List<HCatFieldSchema> hcatFSs = new ArrayList<HCatFieldSchema>();
       for( FieldSchema fieldSchema : fSchema.schema.getFields()){
         fieldNames.add( fieldSchema.alias);
-        howlFSs.add(getHowlFSFromPigFS(fieldSchema, hcatTblSchema));
+        hcatFSs.add(getHCatFSFromPigFS(fieldSchema, hcatTblSchema));
       }
-      return new HCatFieldSchema(fSchema.alias, Type.STRUCT, new HCatSchema(howlFSs), "");
+      return new HCatFieldSchema(fSchema.alias, Type.STRUCT, new HCatSchema(hcatFSs), "");
 
     case DataType.MAP:{
       // Pig's schema contain no type information about map's keys and
@@ -258,11 +258,11 @@ public abstract class HCatBaseStorer ext
     }
   }
 
-  private Object getJavaObj(Object pigObj, HCatFieldSchema howlFS) throws ExecException, HCatException{
+  private Object getJavaObj(Object pigObj, HCatFieldSchema hcatFS) throws ExecException, HCatException{
 
     // The real work-horse. Spend time and energy in this method if there is
-    // need to keep HowlStorer lean and go fast.
-    Type type = howlFS.getType();
+    // need to keep HCatStorer lean and go fast.
+    Type type = hcatFS.getType();
 
     switch(type){
 
@@ -273,14 +273,14 @@ public abstract class HCatBaseStorer ext
       //
       //      List<Object> innerList = new ArrayList<Object>(innerTup.size());
       //      int i = 0;
-      //      for(HowlTypeInfo structFieldTypeInfo : typeInfo.getAllStructFieldTypeInfos()){
+      //      for(HCatTypeInfo structFieldTypeInfo : typeInfo.getAllStructFieldTypeInfos()){
       //        innerList.add(getJavaObj(innerTup.get(i++), structFieldTypeInfo));
       //      }
       //      return innerList;
     case ARRAY:
       // Unwrap the bag.
       DataBag pigBag = (DataBag)pigObj;
-      HCatFieldSchema tupFS = howlFS.getArrayElementSchema().get(0);
+      HCatFieldSchema tupFS = hcatFS.getArrayElementSchema().get(0);
       boolean needTuple = tupFS.getType() == Type.STRUCT;
       List<Object> bagContents = new ArrayList<Object>((int)pigBag.size());
       Iterator<Tuple> bagItr = pigBag.iterator();
@@ -327,18 +327,18 @@ public abstract class HCatBaseStorer ext
       byte type = pigField.type;
       String alias = pigField.alias;
       validateAlias(alias);
-      HCatFieldSchema howlField = getTableCol(alias, tblSchema);
+      HCatFieldSchema hcatField = getTableCol(alias, tblSchema);
 
       if(DataType.isComplex(type)){
         switch(type){
 
         case DataType.MAP:
-          if(howlField != null){
-            if(howlField.getMapKeyType() != Type.STRING){
-              throw new FrontendException("Key Type of map must be String "+howlField,  PigHCatUtil.PIG_EXCEPTION_CODE);
+          if(hcatField != null){
+            if(hcatField.getMapKeyType() != Type.STRING){
+              throw new FrontendException("Key Type of map must be String "+hcatField,  PigHCatUtil.PIG_EXCEPTION_CODE);
             }
-            if(howlField.getMapValueSchema().get(0).isComplex()){
-              throw new FrontendException("Value type of map cannot be complex" + howlField, PigHCatUtil.PIG_EXCEPTION_CODE);
+            if(hcatField.getMapValueSchema().get(0).isComplex()){
+              throw new FrontendException("Value type of map cannot be complex" + hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
             }
           }
           break;
@@ -351,37 +351,37 @@ public abstract class HCatBaseStorer ext
             }
             validateAlias(innerField.alias);
           }
-          if(howlField != null){
+          if(hcatField != null){
             // Do the same validation for HCatSchema.
-            HCatFieldSchema arrayFieldScehma = howlField.getArrayElementSchema().get(0);
+            HCatFieldSchema arrayFieldScehma = hcatField.getArrayElementSchema().get(0);
             Type hType = arrayFieldScehma.getType();
             if(hType == Type.STRUCT){
               for(HCatFieldSchema structFieldInBag : arrayFieldScehma.getStructSubSchema().getFields()){
                 if(structFieldInBag.getType() == Type.STRUCT || structFieldInBag.getType() == Type.ARRAY){
-                  throw new FrontendException("Nested Complex types not allowed "+ howlField, PigHCatUtil.PIG_EXCEPTION_CODE);
+                  throw new FrontendException("Nested Complex types not allowed "+ hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
                 }
               }
             }
             if(hType == Type.MAP){
               if(arrayFieldScehma.getMapKeyType() != Type.STRING){
-                throw new FrontendException("Key Type of map must be String "+howlField, PigHCatUtil.PIG_EXCEPTION_CODE);
+                throw new FrontendException("Key Type of map must be String "+hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
               }
               if(arrayFieldScehma.getMapValueSchema().get(0).isComplex()){
-                throw new FrontendException("Value type of map cannot be complex "+howlField, PigHCatUtil.PIG_EXCEPTION_CODE);
+                throw new FrontendException("Value type of map cannot be complex "+hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
               }
             }
             if(hType == Type.ARRAY) {
-              throw new FrontendException("Arrays cannot contain array within it. "+howlField, PigHCatUtil.PIG_EXCEPTION_CODE);
+              throw new FrontendException("Arrays cannot contain array within it. "+hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
             }
           }
           break;
 
         case DataType.TUPLE:
           validateUnNested(pigField.schema);
-          if(howlField != null){
-            for(HCatFieldSchema structFieldSchema : howlField.getStructSubSchema().getFields()){
+          if(hcatField != null){
+            for(HCatFieldSchema structFieldSchema : hcatField.getStructSubSchema().getFields()){
               if(structFieldSchema.isComplex()){
-                throw new FrontendException("Nested Complex types are not allowed."+howlField, PigHCatUtil.PIG_EXCEPTION_CODE);
+                throw new FrontendException("Nested Complex types are not allowed."+hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
               }
             }
           }
@@ -393,15 +393,15 @@ public abstract class HCatBaseStorer ext
       }
     }
 
-    for(HCatFieldSchema howlField : tblSchema.getFields()){
+    for(HCatFieldSchema hcatField : tblSchema.getFields()){
 
       // We dont do type promotion/demotion.
-      Type hType = howlField.getType();
+      Type hType = hcatField.getType();
       switch(hType){
       case SMALLINT:
       case TINYINT:
       case BOOLEAN:
-        throw new FrontendException("Incompatible type found in howl table schema: "+howlField, PigHCatUtil.PIG_EXCEPTION_CODE);
+        throw new FrontendException("Incompatible type found in hcat table schema: "+hcatField, PigHCatUtil.PIG_EXCEPTION_CODE);
       }
     }
   }
@@ -418,9 +418,9 @@ public abstract class HCatBaseStorer ext
   // Finds column by name in HCatSchema, if not found returns null.
   private HCatFieldSchema getTableCol(String alias, HCatSchema tblSchema){
 
-    for(HCatFieldSchema howlField : tblSchema.getFields()){
-      if(howlField.getName().equalsIgnoreCase(alias)){
-        return howlField;
+    for(HCatFieldSchema hcatField : tblSchema.getFields()){
+      if(hcatField.getName().equalsIgnoreCase(alias)){
+        return hcatField;
       }
     }
     // Its a new column

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java?rev=1104614&r1=1104613&r2=1104614&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java Tue May 17 22:50:49 2011
@@ -40,26 +40,26 @@ import org.apache.pig.ResourceSchema;
 import org.apache.pig.impl.util.UDFContext;
 
 /**
- * Pig {@link LoadFunc} to read data from Howl
+ * Pig {@link LoadFunc} to read data from HCat
  */
 
 public class HCatLoader extends HCatBaseLoader {
 
   private static final String PARTITION_FILTER = "partition.filter"; // for future use
 
-  private HCatInputFormat howlInputFormat = null;
+  private HCatInputFormat hcatInputFormat = null;
   private String dbName;
   private String tableName;
-  private String howlServerUri;
+  private String hcatServerUri;
   private String partitionFilterString;
   private final PigHCatUtil phutil = new PigHCatUtil();
 
   @Override
   public InputFormat<?,?> getInputFormat() throws IOException {
-    if(howlInputFormat == null) {
-      howlInputFormat = new HCatInputFormat();
+    if(hcatInputFormat == null) {
+      hcatInputFormat = new HCatInputFormat();
     }
-    return howlInputFormat;
+    return hcatInputFormat;
   }
 
   @Override
@@ -76,30 +76,30 @@ public class HCatLoader extends HCatBase
 
     // get partitionFilterString stored in the UDFContext - it would have
     // been stored there by an earlier call to setPartitionFilter
-    // call setInput on OwlInputFormat only in the frontend because internally
-    // it makes calls to the owl server - we don't want these to happen in
+    // call setInput on HCatInputFormat only in the frontend because internally
+    // it makes calls to the hcat server - we don't want these to happen in
     // the backend
     // in the hadoop front end mapred.task.id property will not be set in
     // the Configuration
     if (!HCatUtil.checkJobContextIfRunningFromBackend(job)){
 
       HCatInputFormat.setInput(job, HCatTableInfo.getInputTableInfo(
-              howlServerUri!=null ? howlServerUri :
-                  (howlServerUri = PigHCatUtil.getHowlServerUri(job)),
-              PigHCatUtil.getHowlServerPrincipal(job),
+              hcatServerUri!=null ? hcatServerUri :
+                  (hcatServerUri = PigHCatUtil.getHCatServerUri(job)),
+              PigHCatUtil.getHCatServerPrincipal(job),
               dbName,
               tableName,
               getPartitionFilterString()));
     }
 
     // Need to also push projections by calling setOutputSchema on
-    // OwlInputFormat - we have to get the RequiredFields information
+    // HCatInputFormat - we have to get the RequiredFields information
     // from the UdfContext, translate it to an Schema and then pass it
     // The reason we do this here is because setLocation() is called by
     // Pig runtime at InputFormat.getSplits() and
     // InputFormat.createRecordReader() time - we are not sure when
-    // OwlInputFormat needs to know about pruned projections - so doing it
-    // here will ensure we communicate to OwlInputFormat about pruned
+    // HCatInputFormat needs to know about pruned projections - so doing it
+    // here will ensure we communicate to HCatInputFormat about pruned
     // projections at getSplits() and createRecordReader() time
 
     UDFContext udfContext = UDFContext.getUDFContext();
@@ -108,7 +108,7 @@ public class HCatLoader extends HCatBase
     RequiredFieldList requiredFieldsInfo =
       (RequiredFieldList)props.get(PRUNE_PROJECTION_INFO);
     if(requiredFieldsInfo != null) {
-      // convert to owlschema and pass to OwlInputFormat
+      // convert to hcatschema and pass to HCatInputFormat
       try {
         outputSchema = phutil.getHCatSchema(requiredFieldsInfo.getFields(),signature,this.getClass());
         HCatInputFormat.setOutputSchema(job, outputSchema);
@@ -118,11 +118,11 @@ public class HCatLoader extends HCatBase
     } else{
       // else - this means pig's optimizer never invoked the pushProjection
       // method - so we need all fields and hence we should not call the
-      // setOutputSchema on OwlInputFormat
+      // setOutputSchema on HCatInputFormat
       if (HCatUtil.checkJobContextIfRunningFromBackend(job)){
         try {
-          HCatSchema howlTableSchema = (HCatSchema) props.get(HCatConstants.HCAT_TABLE_SCHEMA);
-          outputSchema = howlTableSchema;
+          HCatSchema hcatTableSchema = (HCatSchema) props.get(HCatConstants.HCAT_TABLE_SCHEMA);
+          outputSchema = hcatTableSchema;
         } catch (Exception e) {
           throw new IOException(e);
         }
@@ -134,8 +134,8 @@ public class HCatLoader extends HCatBase
   public String[] getPartitionKeys(String location, Job job)
   throws IOException {
     Table table = phutil.getTable(location,
-        howlServerUri!=null?howlServerUri:PigHCatUtil.getHowlServerUri(job),
-            PigHCatUtil.getHowlServerPrincipal(job));
+        hcatServerUri!=null?hcatServerUri:PigHCatUtil.getHCatServerUri(job),
+            PigHCatUtil.getHCatServerPrincipal(job));
     List<FieldSchema> tablePartitionKeys = table.getPartitionKeys();
     String[] partitionKeys = new String[tablePartitionKeys.size()];
     for(int i = 0; i < tablePartitionKeys.size(); i++) {
@@ -147,28 +147,28 @@ public class HCatLoader extends HCatBase
   @Override
   public ResourceSchema getSchema(String location, Job job) throws IOException {
     Table table = phutil.getTable(location,
-        howlServerUri!=null?howlServerUri:PigHCatUtil.getHowlServerUri(job),
-            PigHCatUtil.getHowlServerPrincipal(job));
-    HCatSchema howlTableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
+        hcatServerUri!=null?hcatServerUri:PigHCatUtil.getHCatServerUri(job),
+            PigHCatUtil.getHCatServerPrincipal(job));
+    HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
     try {
-      PigHCatUtil.validateHowlTableSchemaFollowsPigRules(howlTableSchema);
+      PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema);
     } catch (IOException e){
       throw new PigException(
-          "Table schema incompatible for reading through HowlLoader :" + e.getMessage()
-          + ";[Table schema was "+ howlTableSchema.toString() +"]"
+          "Table schema incompatible for reading through HCatLoader :" + e.getMessage()
+          + ";[Table schema was "+ hcatTableSchema.toString() +"]"
           ,PigHCatUtil.PIG_EXCEPTION_CODE, e);
     }
-    storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, howlTableSchema);
-    outputSchema = howlTableSchema;
-    return PigHCatUtil.getResourceSchema(howlTableSchema);
+    storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema);
+    outputSchema = hcatTableSchema;
+    return PigHCatUtil.getResourceSchema(hcatTableSchema);
   }
 
   @Override
   public void setPartitionFilter(Expression partitionFilter) throws IOException {
     // convert the partition filter expression into a string expected by
-    // howl and pass it in setLocation()
+    // hcat and pass it in setLocation()
 
-    partitionFilterString = getHowlComparisonString(partitionFilter);
+    partitionFilterString = getHCatComparisonString(partitionFilter);
 
     // store this in the udf context so we can get it later
     storeInUDFContext(signature,
@@ -184,9 +184,9 @@ public class HCatLoader extends HCatBase
     return partitionFilterString;
   }
 
-  private String getHowlComparisonString(Expression expr) {
+  private String getHCatComparisonString(Expression expr) {
     if(expr instanceof BinaryExpression){
-      // call getOwlComparisonString on lhs and rhs, and and join the
+      // call getHCatComparisonString on lhs and rhs, and and join the
       // results with OpType string
 
       // we can just use OpType.toString() on all Expression types except
@@ -201,9 +201,9 @@ public class HCatLoader extends HCatBase
           opStr = expr.getOpType().toString();
       }
       BinaryExpression be = (BinaryExpression)expr;
-      return "(" + getHowlComparisonString(be.getLhs()) +
+      return "(" + getHCatComparisonString(be.getLhs()) +
                   opStr +
-                  getHowlComparisonString(be.getRhs()) + ")";
+                  getHCatComparisonString(be.getRhs()) + ")";
     } else {
       // should be a constant or column
       return expr.toString();



Mime
View raw message