hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1462670 [14/14] - in /hive/branches/ptf-windowing: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/ data/conf/ data/fil...
Date Fri, 29 Mar 2013 22:59:41 GMT
Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out Fri Mar 29 22:58:43 2013
@@ -7,12 +7,8 @@ ABSTRACT SYNTAX TREE:
 
 STAGE DEPENDENCIES:
   Stage-2 is a root stage
-  Stage-5 depends on stages: Stage-2 , consists of Stage-6, Stage-7, Stage-1
-  Stage-6 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-6
-  Stage-7 has a backup stage: Stage-1
-  Stage-4 depends on stages: Stage-7
-  Stage-1
+  Stage-4 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-4
   Stage-0 is a root stage
 
 STAGE PLANS:
@@ -52,10 +48,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
 
-  Stage: Stage-5
-    Conditional Operator
-
-  Stage: Stage-6
+  Stage: Stage-4
     Map Reduce Local Work
       Alias -> Map Local Tables:
         b 
@@ -112,126 +105,6 @@ STAGE PLANS:
       Local Work:
         Map Reduce Local Work
 
-  Stage: Stage-7
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $INTNAME 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $INTNAME 
-            HashTable Sink Operator
-              condition expressions:
-                0 {_col0} {_col1}
-                1 {key} {value}
-              handleSkewJoin: false
-              keys:
-                0 [Column[_col0]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
-  Stage: Stage-4
-    Map Reduce
-      Alias -> Map Operator Tree:
-        b 
-          TableScan
-            alias: b
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              condition expressions:
-                0 {_col0} {_col1}
-                1 {key} {value}
-              handleSkewJoin: false
-              keys:
-                0 [Column[_col0]]
-                1 [Column[key]]
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Position of Big Table: 1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: _col1
-                      type: string
-                      expr: _col2
-                      type: string
-                      expr: _col3
-                      type: string
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Limit
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-      Local Work:
-        Map Reduce Local Work
-
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        $INTNAME 
-            Reduce Output Operator
-              key expressions:
-                    expr: _col0
-                    type: string
-              sort order: +
-              Map-reduce partition columns:
-                    expr: _col0
-                    type: string
-              tag: 0
-              value expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: string
-        b 
-          TableScan
-            alias: b
-            Reduce Output Operator
-              key expressions:
-                    expr: key
-                    type: string
-              sort order: +
-              Map-reduce partition columns:
-                    expr: key
-                    type: string
-              tag: 1
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
-          handleSkewJoin: false
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Select Operator
-            expressions:
-                  expr: _col0
-                  type: string
-                  expr: _col1
-                  type: string
-                  expr: _col2
-                  type: string
-                  expr: _col3
-                  type: string
-            outputColumnNames: _col0, _col1, _col2, _col3
-            Limit
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
   Stage: Stage-0
     Fetch Operator
       limit: 1

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats0.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats0.q.out?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats0.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats0.q.out Fri Mar 29 22:58:43 2013
@@ -757,10 +757,12 @@ POSTHOOK: Lineage: stats_partitioned PAR
 ds=1
 PREHOOK: query: select * from stats_partitioned where ds is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_partitioned
 PREHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from stats_partitioned where ds is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_partitioned
 POSTHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: stats_non_partitioned.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -2324,10 +2326,12 @@ POSTHOOK: Lineage: stats_partitioned PAR
 ds=1
 PREHOOK: query: select * from stats_partitioned where ds is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_partitioned
 PREHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from stats_partitioned where ds is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_partitioned
 POSTHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: stats_non_partitioned.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats3.q.out?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats3.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats3.q.out Fri Mar 29 22:58:43 2013
@@ -111,10 +111,12 @@ POSTHOOK: Output: default@hive_test_dst@
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
@@ -148,9 +150,11 @@ POSTHOOK: Lineage: hive_test_dst PARTITI
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
@@ -169,10 +173,12 @@ POSTHOOK: Lineage: hive_test_dst PARTITI
 6
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
@@ -185,17 +191,21 @@ POSTHOOK: Lineage: hive_test_dst PARTITI
 6	test_part	test_Part
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 PREHOOK: query: select * from hive_test_dst where pcol1='test_Part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_Part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats4.q.out?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats4.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/stats4.q.out Fri Mar 29 22:58:43 2013
@@ -283,11 +283,13 @@ ds=2008-12-31/hr=11
 ds=2008-12-31/hr=12
 PREHOOK: query: select * from nzhang_part1 where ds is not null and hr is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_part1
 PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11
 PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: select * from nzhang_part1 where ds is not null and hr is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_part1
 POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11
 POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
@@ -1301,11 +1303,13 @@ POSTHOOK: Lineage: nzhang_part2 PARTITIO
 97	val_97	2008-04-08	12
 PREHOOK: query: select * from nzhang_part2 where ds is not null and hr is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_part2
 PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11
 PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: select * from nzhang_part2 where ds is not null and hr is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_part2
 POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11
 POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12
 #### A masked pattern was here ####

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/truncate_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/truncate_table.q.out?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/truncate_table.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/truncate_table.q.out Fri Mar 29 22:58:43 2013
@@ -129,10 +129,12 @@ POSTHOOK: type: TRUNCATETABLE
 POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
 PREHOOK: query: select * from srcpart_truncate where ds='2008-04-08' and hr='11'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
 POSTHOOK: query: select * from srcpart_truncate where ds='2008-04-08' and hr='11'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
 PREHOOK: query: -- truncate partitions with partial spec
@@ -167,11 +169,13 @@ POSTHOOK: Output: default@srcpart_trunca
 POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
 PREHOOK: query: select * from srcpart_truncate where hr='12'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: select * from srcpart_truncate where hr='12'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
@@ -208,6 +212,7 @@ POSTHOOK: Output: default@srcpart_trunca
 POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
 PREHOOK: query: select * from srcpart_truncate
 PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
@@ -215,6 +220,7 @@ PREHOOK: Input: default@srcpart_truncate
 #### A masked pattern was here ####
 POSTHOOK: query: select * from srcpart_truncate
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java Fri Mar 29 22:58:43 2013
@@ -31,8 +31,10 @@ import org.apache.hadoop.io.Writable;
  * HiveDeserializer also provides the ObjectInspector which can be used to
  * inspect the internal structure of the object (that is returned by deserialize
  * function).
- *
+ * All deserializers should extend the abstract class AbstractDeserializer, and eventually
+ * Deserializer interface should be removed
  */
+@Deprecated
 public interface Deserializer {
 
   /**
@@ -50,7 +52,7 @@ public interface Deserializer {
    * Deserialize an object out of a Writable blob. In most cases, the return
    * value of this function will be constant since the function will reuse the
    * returned object. If the client wants to keep a copy of the object, the
-   * client needs to clone the returned value by calling
+   * client needs to clone the returnDeserializered value by calling
    * ObjectInspectorUtils.getStandardObject().
    *
    * @param blob

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java Fri Mar 29 22:58:43 2013
@@ -30,10 +30,10 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -42,7 +42,7 @@ import org.apache.hadoop.io.Writable;
  * MetadataTypedColumnsetSerDe.
  *
  */
-public class MetadataTypedColumnsetSerDe implements SerDe {
+public class MetadataTypedColumnsetSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory
       .getLog(MetadataTypedColumnsetSerDe.class.getName());
@@ -95,6 +95,7 @@ public class MetadataTypedColumnsetSerDe
     return defaultVal;
   }
 
+  @Override
   public void initialize(Configuration job, Properties tbl) throws SerDeException {
     String altSep = tbl.getProperty(serdeConstants.SERIALIZATION_FORMAT);
     separator = getByteValue(altSep, DefaultSeparator);
@@ -167,6 +168,7 @@ public class MetadataTypedColumnsetSerDe
 
   ColumnSet deserializeCache = new ColumnSet();
 
+  @Override
   public Object deserialize(Writable field) throws SerDeException {
     String row = null;
     if (field instanceof BytesWritable) {
@@ -193,16 +195,19 @@ public class MetadataTypedColumnsetSerDe
     }
   }
 
+  @Override
   public ObjectInspector getObjectInspector() throws SerDeException {
     return cachedObjectInspector;
   }
 
+  @Override
   public Class<? extends Writable> getSerializedClass() {
     return Text.class;
   }
 
   Text serializeCache = new Text();
 
+  @Override
   public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
 
     if (objInspector.getCategory() != Category.STRUCT) {
@@ -232,6 +237,7 @@ public class MetadataTypedColumnsetSerDe
     return serializeCache;
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java Fri Mar 29 22:58:43 2013
@@ -32,7 +32,7 @@ import org.apache.hadoop.io.Writable;
  * Placeholder SerDe for cases where neither serialization nor deserialization is needed
  *
  */
-public class NullStructSerDe implements SerDe {
+public class NullStructSerDe extends AbstractSerDe {
 
   class NullStructField implements StructField {
     @Override

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java Fri Mar 29 22:58:43 2013
@@ -57,7 +57,7 @@ import org.apache.hadoop.io.Writable;
  * writableStringObjectInspector. We should switch to that when we have a UTF-8
  * based Regex library.
  */
-public class RegexSerDe implements SerDe {
+public class RegexSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName());
 
@@ -249,6 +249,7 @@ public class RegexSerDe implements SerDe
           "Regex SerDe doesn't support the serialize() method");
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java Fri Mar 29 22:58:43 2013
@@ -20,14 +20,16 @@ package org.apache.hadoop.hive.serde2;
 
 /**
  * A union of HiveDeserializer and HiveSerializer interface.
- * 
+ *
  * If a developer wants his hive table to be read-only, then he just want to
  * return
- * 
+ *
  * both readable and writable, then
- * 
- * 
+ *
+ * All serdes should extend the abstract class AbstractSerDe, and eventually SerDe interface
+ * should be removed
  */
+@Deprecated
 public interface SerDe extends Deserializer, Serializer {
 
 }

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java Fri Mar 29 22:58:43 2013
@@ -28,8 +28,10 @@ import org.apache.hadoop.io.Writable;
  * HiveSerializer is used to serialize data to a Hadoop Writable object. The
  * serialize In addition to the interface below, all implementations are assume
  * to have a ctor that takes a single 'Table' object as argument.
- *
+ * All serializers should extend the abstract class AbstractSerializer, and eventually
+ * Serializer interface should be removed
  */
+@Deprecated
 public interface Serializer {
 
   /**

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java Fri Mar 29 22:58:43 2013
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.Reflection
  * TypedSerDe.
  *
  */
-public abstract class TypedSerDe implements SerDe {
+public abstract class TypedSerDe extends AbstractSerDe {
 
   protected Type objectType;
   protected Class<?> objectClass;
@@ -52,6 +52,7 @@ public abstract class TypedSerDe impleme
 
   protected Object deserializeCache;
 
+  @Override
   public Object deserialize(Writable blob) throws SerDeException {
     if (deserializeCache == null) {
       return ReflectionUtils.newInstance(objectClass, null);
@@ -61,6 +62,7 @@ public abstract class TypedSerDe impleme
     }
   }
 
+  @Override
   public ObjectInspector getObjectInspector() throws SerDeException {
     return ObjectInspectorFactory.getReflectionObjectInspector(objectType,
         getObjectInspectorOptions());
@@ -70,18 +72,22 @@ public abstract class TypedSerDe impleme
     return ObjectInspectorFactory.ObjectInspectorOptions.JAVA;
   }
 
+  @Override
   public void initialize(Configuration job, Properties tbl) throws SerDeException {
     // do nothing
   }
 
+  @Override
   public Class<? extends Writable> getSerializedClass() {
     return BytesWritable.class;
   }
 
+  @Override
   public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
     throw new RuntimeException("not supported");
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java Fri Mar 29 22:58:43 2013
@@ -17,24 +17,24 @@
  */
 package org.apache.hadoop.hive.serde2.avro;
 
+import java.util.List;
+import java.util.Properties;
+
 import org.apache.avro.Schema;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.io.Writable;
 
-import java.util.List;
-import java.util.Properties;
-
 /**
  * Read or write Avro data from Hive.
  */
-public class AvroSerDe implements SerDe {
+public class AvroSerDe extends AbstractSerDe {
   private static final Log LOG = LogFactory.getLog(AvroSerDe.class);
   private ObjectInspector oi;
   private List<String> columnNames;
@@ -48,8 +48,9 @@ public class AvroSerDe implements SerDe 
   @Override
   public void initialize(Configuration configuration, Properties properties) throws SerDeException {
     // Reset member variables so we don't get in a half-constructed state
-    if(schema != null)
+    if(schema != null) {
       LOG.info("Resetting already initialized AvroSerDe");
+    }
 
     schema = null;
     oi = null;
@@ -80,13 +81,17 @@ public class AvroSerDe implements SerDe 
 
   @Override
   public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException {
-    if(badSchema) throw new BadSchemaException();
+    if(badSchema) {
+      throw new BadSchemaException();
+    }
     return getSerializer().serialize(o, objectInspector, columnNames, columnTypes, schema);
   }
 
   @Override
   public Object deserialize(Writable writable) throws SerDeException {
-    if(badSchema) throw new BadSchemaException();
+    if(badSchema) {
+      throw new BadSchemaException();
+    }
     return getDeserializer().deserialize(columnNames, columnTypes, writable, schema);
   }
 
@@ -102,13 +107,17 @@ public class AvroSerDe implements SerDe 
   }
 
   private AvroDeserializer getDeserializer() {
-    if(avroDeserializer == null) avroDeserializer = new AvroDeserializer();
+    if(avroDeserializer == null) {
+      avroDeserializer = new AvroDeserializer();
+    }
 
     return avroDeserializer;
   }
 
   private AvroSerializer getSerializer() {
-    if(avroSerializer == null) avroSerializer = new AvroSerializer();
+    if(avroSerializer == null) {
+      avroSerializer = new AvroSerializer();
+    }
 
     return avroSerializer;
   }

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Fri Mar 29 22:58:43 2013
@@ -33,7 +33,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
@@ -99,7 +99,7 @@ import org.apache.hadoop.io.Writable;
  * fields in the same top-level field will have the same sort order.
  *
  */
-public class BinarySortableSerDe implements SerDe {
+public class BinarySortableSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(BinarySortableSerDe.class
       .getName());
@@ -111,7 +111,7 @@ public class BinarySortableSerDe impleme
   StructObjectInspector rowObjectInspector;
 
   boolean[] columnSortOrderIsDesc;
-  
+
   private static byte[] decimalBuffer = null;
   private static Charset decimalCharSet = Charset.forName("US-ASCII");
 
@@ -186,7 +186,7 @@ public class BinarySortableSerDe impleme
 
   static Object deserialize(InputByteBuffer buffer, TypeInfo type,
       boolean invert, Object reuse) throws IOException {
-      
+
     // Is this field a null?
     byte isNull = buffer.read(invert);
     if (isNull == 0) {
@@ -378,33 +378,33 @@ public class BinarySortableSerDe impleme
         }
         t.setBinarySortable(bytes, 0);
         return t;
-        
+
       case DECIMAL: {
         // See serialization of decimal for explanation (below)
 
         BigDecimalWritable bdw = (reuse == null ? new BigDecimalWritable() :
           (BigDecimalWritable) reuse);
-        
+
         int b = buffer.read(invert) - 1;
         assert (b == 1 || b == -1 || b == 0);
         boolean positive = b != -1;
-        
+
         int factor = buffer.read(invert) ^ 0x80;
         for (int i = 0; i < 3; i++) {
           factor = (factor << 8) + (buffer.read(invert) & 0xff);
         }
-        
+
         if (!positive) {
           factor = -factor;
         }
-        
+
         int start = buffer.tell();
         int length = 0;
-        
+
         do {
           b = buffer.read(positive ? invert : !invert);
           assert(b != 1);
-          
+
           if (b == 0) {
             // end of digits
             break;
@@ -412,7 +412,7 @@ public class BinarySortableSerDe impleme
 
           length++;
         } while (true);
-        
+
         if(decimalBuffer == null || decimalBuffer.length < length) {
           decimalBuffer = new byte[length];
         }
@@ -428,11 +428,11 @@ public class BinarySortableSerDe impleme
         String digits = new String(decimalBuffer, 0, length, decimalCharSet);
         BigInteger bi = new BigInteger(digits);
         BigDecimal bd = new BigDecimal(bi).scaleByPowerOfTen(factor-length);
-        
+
         if (!positive) {
           bd = bd.negate();
         }
-        
+
         bdw.set(bd);
         return bdw;
       }
@@ -443,7 +443,7 @@ public class BinarySortableSerDe impleme
       }
       }
     }
-    
+
     case LIST: {
       ListTypeInfo ltype = (ListTypeInfo) type;
       TypeInfo etype = ltype.getListElementTypeInfo();
@@ -690,32 +690,32 @@ public class BinarySortableSerDe impleme
 
         BigDecimalObjectInspector boi = (BigDecimalObjectInspector) poi;
         BigDecimal dec = boi.getPrimitiveJavaObject(o).stripTrailingZeros();
-        
+
         // get the sign of the big decimal
         int sign = dec.compareTo(BigDecimal.ZERO);
-        
+
         // we'll encode the absolute value (sign is separate)
         dec = dec.abs();
-        
+
         // get the scale factor to turn big decimal into a decimal < 1
         int factor = dec.precision() - dec.scale();
         factor = sign == 1 ? factor : -factor;
-        
+
         // convert the absolute big decimal to string
         dec.scaleByPowerOfTen(Math.abs(dec.scale()));
         String digits = dec.unscaledValue().toString();
-        
+
         // finally write out the pieces (sign, scale, digits)
         buffer.write((byte) ( sign + 1), invert);
         buffer.write((byte) ((factor >> 24) ^ 0x80), invert);
         buffer.write((byte) ( factor >> 16), invert);
         buffer.write((byte) ( factor >> 8), invert);
         buffer.write((byte)   factor, invert);
-        serializeBytes(buffer, digits.getBytes(decimalCharSet), 
+        serializeBytes(buffer, digits.getBytes(decimalCharSet),
             digits.length(), sign == -1 ? !invert : invert);
         return;
       }
-        
+
       default: {
         throw new RuntimeException("Unrecognized type: "
             + poi.getPrimitiveCategory());

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java Fri Mar 29 22:58:43 2013
@@ -18,14 +18,14 @@
 
 package org.apache.hadoop.hive.serde2.columnar;
 
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
 
-public abstract class ColumnarSerDeBase implements SerDe {
+public abstract class ColumnarSerDeBase extends AbstractSerDe {
 
   // The object for storing row data
   ColumnarStructBase cachedLazyStruct;

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java Fri Mar 29 22:58:43 2013
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.serde2.dynamic_type;
 
 import java.io.ByteArrayInputStream;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
@@ -28,8 +27,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -50,7 +49,7 @@ import org.apache.thrift.transport.TIOSt
  * DynamicSerDe.
  *
  */
-public class DynamicSerDe implements SerDe, Serializable {
+public class DynamicSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(DynamicSerDe.class.getName());
 

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java Fri Mar 29 22:58:43 2013
@@ -187,7 +187,7 @@ public class LazyArray extends LazyNonPr
         && 0 == LazyUtils
         .compare(bytes.getData(), startPosition[index], elementLength,
         nullSequence.getBytes(), 0, nullSequence.getLength())) {
-      return null;
+      return arrayElements[index] = null;
     }
     arrayElements[index] = LazyFactory
         .createLazyObject(oi.getListElementObjectInspector());

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java Fri Mar 29 22:58:43 2013
@@ -28,6 +28,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -35,11 +36,11 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -59,7 +60,7 @@ import org.apache.hadoop.io.Writable;
  * Also LazySimpleSerDe outputs typed columns instead of treating all columns as
  * String like MetadataTypedColumnsetSerDe.
  */
-public class LazySimpleSerDe implements SerDe {
+public class LazySimpleSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(LazySimpleSerDe.class
       .getName());

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java Fri Mar 29 22:58:43 2013
@@ -28,9 +28,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
@@ -67,7 +67,7 @@ import org.apache.hadoop.io.Writable;
  * deserialized until required. Binary means a field is serialized in binary
  * compact format.
  */
-public class LazyBinarySerDe implements SerDe {
+public class LazyBinarySerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(LazyBinarySerDe.class
       .getName());

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java Fri Mar 29 22:58:43 2013
@@ -32,7 +32,14 @@ public class JavaBigDecimalObjectInspect
 
   @Override
   public BigDecimalWritable getPrimitiveWritableObject(Object o) {
-    return o == null ? null : new BigDecimalWritable((BigDecimal) o);
+    if (o == null) {
+      return null;
+    }
+
+    if (o instanceof String) {
+      o = new BigDecimal((String)o);
+    }
+    return new BigDecimalWritable((BigDecimal) o);
   }
 
   @Override

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java Fri Mar 29 22:58:43 2013
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.serde2.th
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.AbstractDeserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -32,13 +32,14 @@ import org.apache.thrift.protocol.TProto
  * ThriftDeserializer.
  *
  */
-public class ThriftDeserializer implements Deserializer {
+public class ThriftDeserializer extends AbstractDeserializer {
 
   private ThriftByteStreamTypedSerDe tsd;
 
   public ThriftDeserializer() {
   }
 
+  @Override
   public void initialize(Configuration job, Properties tbl)
       throws SerDeException {
     try {
@@ -68,14 +69,17 @@ public class ThriftDeserializer implemen
     }
   }
 
+  @Override
   public Object deserialize(Writable field) throws SerDeException {
     return tsd.deserialize(field);
   }
 
+  @Override
   public ObjectInspector getObjectInspector() throws SerDeException {
     return tsd.getObjectInspector();
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/ptf-windowing/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java (original)
+++ hive/branches/ptf-windowing/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java Fri Mar 29 22:58:43 2013
@@ -95,6 +95,20 @@ public class TestLazyArrayMapStruct exte
       assertNull((b.getListElementObject(5)));
       assertEquals(5, b.getList().size());
 
+      // -- HIVE-4149
+      b = (LazyArray) LazyFactory.createLazyObject(oi);
+
+      data = new byte[] {'a', '\t', '\\', 'N'};
+      TestLazyPrimitive.initLazyObject(b, data, 0, data.length);
+      assertEquals(new Text("a"), ((LazyString) b.getListElementObject(0)).getWritableObject());
+      assertNull(b.getListElementObject(1));
+
+      data = new byte[] {'\\', 'N', '\t', 'a'};
+      TestLazyPrimitive.initLazyObject(b, data, 0, data.length);
+      assertNull(b.getListElementObject(0));
+      assertNull(b.getListElementObject(0));  // twice (returns not cleaned cache)
+      assertEquals(new Text("a"), ((LazyString) b.getListElementObject(1)).getWritableObject());
+
     } catch (Throwable e) {
       e.printStackTrace();
       throw e;

Modified: hive/branches/ptf-windowing/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/ivy.xml?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/ivy.xml (original)
+++ hive/branches/ptf-windowing/shims/ivy.xml Fri Mar 29 22:58:43 2013
@@ -91,6 +91,36 @@
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
 
+    <!-- jobclient tests dependency -->
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default" transitive="false">
+      <artifact name="hadoop-mapreduce-client-jobclient" ext="jar" />
+      <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <artifact name="hadoop-yarn-server-tests" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+
     <!-- Hadoop 0.20 shim dependencies. Used for building 0.20 shims. -->
     <dependency org="com.google.guava" name="guava" rev="${guava-hadoop20.version}"
                 conf="hadoop0.20.shim->default" transitive="false"/>

Modified: hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Fri Mar 29 22:58:43 2013
@@ -52,6 +52,7 @@ import org.apache.hadoop.mapred.InputSpl
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.OutputCommitter;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
@@ -100,6 +101,43 @@ public class Hadoop20Shims implements Ha
     // gone in 20+
   }
 
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
+        String nameNode, int numDir) throws IOException {
+      this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      return mr.getJobTrackerPort();
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
+    }
+  }
+
   public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
       int numDataNodes,
       boolean format,

Modified: hive/branches/ptf-windowing/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/ptf-windowing/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Fri Mar 29 22:58:43 2013
@@ -26,6 +26,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskLogServlet;
@@ -119,4 +121,71 @@ public class Hadoop20SShims extends Hado
   public short getDefaultReplication(FileSystem fs, Path path) {
     return fs.getDefaultReplication();
   }
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, 
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
+        String nameNode, int numDir) throws IOException {
+      this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      return mr.getJobTrackerPort();
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
+    }
+  }
+
+  // Don't move this code to the parent class. There's a binary
+  // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
+  // need to have two different shim classes even though they are
+  // exactly the same.
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
 }

Modified: hive/branches/ptf-windowing/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/ptf-windowing/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Fri Mar 29 22:58:43 2013
@@ -21,13 +21,17 @@ import java.io.IOException;
 import java.lang.Integer;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.util.Map;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.Job;
@@ -132,4 +136,91 @@ public class Hadoop23Shims extends Hadoo
           throws IOException {
     return Trash.moveToAppropriateTrash(fs, path, conf);
   }
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, 
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+    private final Configuration conf;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers, 
+                      String nameNode, int numDir) throws IOException {
+      this.conf = conf;
+
+      JobConf jConf = new JobConf(conf);
+      jConf.set("yarn.scheduler.capacity.root.queues", "default");
+      jConf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+
+      mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir, null, null, jConf);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      String address = conf.get("yarn.resourcemanager.address");
+      address = StringUtils.substringAfterLast(address, ":");
+
+      if (StringUtils.isBlank(address)) {
+        throw new IllegalArgumentException("Invalid YARN resource manager port.");
+      }
+      
+      return Integer.parseInt(address);
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+    
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      JobConf jConf = mr.createJobConf();
+      for (Map.Entry<String, String> pair: jConf) {
+	//System.out.println("XXX Var: "+pair.getKey() +"="+pair.getValue());
+        //if (conf.get(pair.getKey()) == null) {
+          conf.set(pair.getKey(), pair.getValue());
+	  //}
+      }
+    }
+  }
+  
+  // Don't move this code to the parent class. There's a binary
+  // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
+  // need to have two different shim classes even though they are
+  // exactly the same.
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
 }

Modified: hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Fri Mar 29 22:58:43 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
@@ -106,33 +105,6 @@ public abstract class HadoopShimsSecure 
     // gone in 20+
   }
 
-  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
-      int numDataNodes,
-      boolean format,
-      String[] racks) throws IOException {
-    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
-  }
-
-  /**
-   * MiniDFSShim.
-   *
-   */
-  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
-    private final MiniDFSCluster cluster;
-
-    public MiniDFSShim(MiniDFSCluster cluster) {
-      this.cluster = cluster;
-    }
-
-    public FileSystem getFileSystem() throws IOException {
-      return cluster.getFileSystem();
-    }
-
-    public void shutdown() {
-      cluster.shutdown();
-    }
-  }
-
   /**
    * We define this function here to make the code compatible between
    * hadoop 0.17 and hadoop 0.20.

Modified: hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1462670&r1=1462669&r2=1462670&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Fri Mar 29 22:58:43 2013
@@ -113,6 +113,21 @@ public interface HadoopShims {
   long getAccessTime(FileStatus file);
 
   /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
+                                     String nameNode, int numDir) throws IOException;
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public interface MiniMrShim {
+    public int getJobTrackerPort() throws UnsupportedOperationException;
+    public void shutdown() throws IOException;
+    public void setupConfiguration(Configuration conf);
+  }
+
+  /**
    * Returns a shim to wrap MiniDFSCluster. This is necessary since this class
    * was moved from org.apache.hadoop.dfs to org.apache.hadoop.hdfs
    */



Mime
View raw message