Author: zshao
Date: Tue Mar 24 23:44:50 2009
New Revision: 758089
URL: http://svn.apache.org/viewvc?rev=758089&view=rev
Log:
HIVE-337. LazySimpleSerDe to support multi-level nested array, map, struct types. (zshao)
Added:
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/ByteArrayRef.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (contents, props changed)
- copied, changed from r757479, hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyNonPrimitive.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/LazyListObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/LazyMapObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/ (props changed)
- copied from r755033, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java
Removed:
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map_skew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_noskew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map_skew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_noskew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map_skew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_noskew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_map_skew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_noskew.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_dynamicserde.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testsequencefile.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join17.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/quote1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample7.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input7.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyByte.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyInteger.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyLong.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyObject.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyShort.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/LazySimpleStructObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/MapObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/ListTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/MapTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/PrimitiveTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Mar 24 23:44:50 2009
@@ -6,6 +6,9 @@
NEW FEATURES
+ HIVE-337. LazySimpleSerDe to support multi-level nested array, map, struct
+ types. (zshao)
+
HIVE-313. Add UDF date_add, date_sub, datediff. (zshao)
HIVE-79. Print number of rows inserted to table(s).
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Tue Mar 24 23:44:50 2009
@@ -52,6 +52,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.util.StringUtils;
public class MetaStoreUtils {
@@ -486,6 +488,32 @@
return schema;
}
+ /** Convert FieldSchemas to columnNames.
+ */
+ public static String getColumnNamesFromFieldSchema(List<FieldSchema> fieldSchemas) {
+ StringBuilder sb = new StringBuilder();
+ for (int i=0; i<fieldSchemas.size(); i++) {
+ if (i>0) {
+ sb.append(",");
+ }
+ sb.append(fieldSchemas.get(i).getName());
+ }
+ return sb.toString();
+ }
+
+ /** Convert FieldSchemas to columnTypes.
+ */
+ public static String getColumnTypesFromFieldSchema(List<FieldSchema> fieldSchemas) {
+ StringBuilder sb = new StringBuilder();
+ for (int i=0; i<fieldSchemas.size(); i++) {
+ if (i>0) {
+ sb.append(",");
+ }
+ sb.append(fieldSchemas.get(i).getType());
+ }
+ return sb.toString();
+ }
+
public static void makeDir(Path path, HiveConf hiveConf) throws MetaException {
FileSystem fs;
try {
@@ -564,4 +592,13 @@
return str_fields;
}
+ /**
+ * Convert TypeInfo to FieldSchema.
+ */
+ public static FieldSchema getFieldSchemaFromTypeInfo(String fieldName, TypeInfo typeInfo) {
+ return new FieldSchema(
+ fieldName, TypeInfoUtils.getTypeStringFromTypeInfo(typeInfo), "generated by TypeInfoUtils.getFieldSchemaFromTypeInfo"
+ );
+ }
+
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java Tue Mar 24 23:44:50 2009
@@ -21,8 +21,8 @@
import java.lang.Class;
import java.io.*;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
/**
* Implementation for ColumnInfo which contains the internal name for the
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Tue Mar 24 23:44:50 2009
@@ -514,7 +514,7 @@
tbl.getTTable().getSd().setCols(oldCols);
}
} else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.REPLACECOLS) {
- // change SerDe to MetadataTypedColumnsetSerDe if it is columnsetSerDe
+ // change SerDe to LazySimpleSerDe if it is columnsetSerDe
if (tbl.getSerializationLib().equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) {
console
.printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe");
@@ -669,22 +669,8 @@
* we will have to use DynamicSerDe instead.
*/
if (crtTbl.getSerName() == null) {
- boolean useDynamicSerDe = false;
- if (crtTbl.getCols() != null) {
- for (FieldSchema field: crtTbl.getCols()) {
- if (field.getType().indexOf('<') >= 0 || field.getType().indexOf('>') >= 0) {
- useDynamicSerDe = true;
- }
- }
- }
- if (useDynamicSerDe) {
- LOG.info("Default to DynamicSerDe for table " + crtTbl.getTableName() );
- tbl.setSerializationLib(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class.getName());
- tbl.setSerdeParam(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName());
- } else {
- LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName() );
- tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
- }
+ LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName() );
+ tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
}
if (crtTbl.getComment() != null)
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Tue Mar 24 23:44:50 2009
@@ -337,7 +337,9 @@
job.setMapperClass(ExecMapper.class);
job.setMapOutputKeyClass(HiveKey.class);
- job.setMapOutputValueClass(BytesWritable.class);
+ // LazySimpleSerDe writes to Text
+ // Revert to DynamicSerDe: job.setMapOutputValueClass(BytesWritable.class);
+ job.setMapOutputValueClass(Text.class);
job.setNumReduceTasks(work.getNumReduceTasks().intValue());
job.setReducerClass(ExecReducer.class);
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java Tue Mar 24 23:44:50 2009
@@ -26,8 +26,11 @@
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.util.ReflectionUtils;
public class ExprNodeFuncEvaluator extends ExprNodeEvaluator {
@@ -70,7 +73,21 @@
// Evaluate all children first
for(int i=0; i<paramEvaluators.length; i++) {
paramEvaluators[i].evaluate(row, rowInspector, paramInspectableObjects[i]);
- paramValues[i] = paramInspectableObjects[i].o;
+ Category c = paramInspectableObjects[i].oi.getCategory();
+ // TODO: Both getList and getMap are not very efficient.
+ // We should convert them to UDFTemplate - UDFs that accepts Object with
+ // ObjectInspectors when needed.
+ if (c.equals(Category.LIST)) {
+ // Need to pass a Java List for List type
+ paramValues[i] = ((ListObjectInspector)paramInspectableObjects[i].oi)
+ .getList(paramInspectableObjects[i].o);
+ } else if (c.equals(Category.MAP)) {
+ // Need to pass a Java Map for Map type
+ paramValues[i] = ((MapObjectInspector)paramInspectableObjects[i].oi)
+ .getMap(paramInspectableObjects[i].o);
+ } else {
+ paramValues[i] = paramInspectableObjects[i].o;
+ }
}
result.o = FunctionRegistry.invoke(udfMethod, udf, paramValues);
result.oi = outputObjectInspector;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java Tue Mar 24 23:44:50 2009
@@ -37,8 +37,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.commons.logging.Log;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java Tue Mar 24 23:44:50 2009
@@ -23,8 +23,8 @@
import java.lang.Object;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
/**
* The input signature of a function or operator. The signature basically consists
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java Tue Mar 24 23:44:50 2009
@@ -30,11 +30,11 @@
import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.ql.udf.UDFOPAnd;
import org.apache.hadoop.hive.ql.udf.UDFOPNot;
import org.apache.hadoop.hive.ql.udf.UDFOPOr;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Mar 24 23:44:50 2009
@@ -23,6 +23,9 @@
import java.io.Serializable;
import java.lang.reflect.Method;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -52,9 +55,6 @@
import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink1;
import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink2;
import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
import org.apache.hadoop.hive.ql.exec.*;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -2208,16 +2208,20 @@
boolean converted = false;
int columnNumber = tableFields.size();
ArrayList<exprNodeDesc> expressions = new ArrayList<exprNodeDesc>(columnNumber);
- // MetadataTypedColumnsetSerDe/LazySimpleSerDe does not need type conversions because it does
+ // MetadataTypedColumnsetSerDe does not need type conversions because it does
// the conversion to String by itself.
- if (! table_desc.getDeserializerClass().equals(MetadataTypedColumnsetSerDe.class)
- && ! table_desc.getDeserializerClass().equals(LazySimpleSerDe.class)) {
+ boolean isMetaDataSerDe = table_desc.getDeserializerClass().equals(MetadataTypedColumnsetSerDe.class);
+ boolean isLazySimpleSerDe = table_desc.getDeserializerClass().equals(LazySimpleSerDe.class);
+ if (!isMetaDataSerDe) {
for (int i=0; i<columnNumber; i++) {
ObjectInspector tableFieldOI = tableFields.get(i).getFieldObjectInspector();
TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI);
TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
exprNodeDesc column = new exprNodeColumnDesc(rowFieldTypeInfo, Integer.valueOf(i).toString());
- if (! tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
+ // LazySimpleSerDe can convert any types to String type using JSON-format.
+ if (!tableFieldTypeInfo.equals(rowFieldTypeInfo)
+ && !(isLazySimpleSerDe && tableFieldTypeInfo.getCategory().equals(Category.PRIMITIVE)
+ && tableFieldTypeInfo.getPrimitiveClass().equals(String.class))) {
// need to do some conversions here
converted = true;
if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Tue Mar 24 23:44:50 2009
@@ -38,8 +38,8 @@
import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Tue Mar 24 23:44:50 2009
@@ -29,12 +29,10 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
@@ -138,7 +136,21 @@
MetaStoreUtils.getDDLFromFieldSchema(structName, fieldSchemas)
));
}
-
+
+ /**
+ * Generate the table descriptor of LazySimpleSerDe.
+ */
+ public static tableDesc getLazySimpleSerDeTableDesc(List<FieldSchema> fieldSchemas) {
+ return new tableDesc(
+ LazySimpleSerDe.class,
+ SequenceFileInputFormat.class,
+ SequenceFileOutputFormat.class,
+ Utilities.makeProperties(
+ "columns", MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
+ "columns.types", MetaStoreUtils.getColumnTypesFromFieldSchema(fieldSchemas)
+ ));
+ }
+
/**
* Convert the ColumnList to FieldSchema list.
@@ -147,7 +159,7 @@
String fieldPrefix) {
List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
for (int i=0; i<cols.size(); i++) {
- schemas.add(TypeInfoUtils.getFieldSchemaFromTypeInfo(fieldPrefix + i, cols.get(i).getTypeInfo()));
+ schemas.add(MetaStoreUtils.getFieldSchemaFromTypeInfo(fieldPrefix + i, cols.get(i).getTypeInfo()));
}
return schemas;
}
@@ -170,7 +182,7 @@
if (name.equals(Integer.valueOf(i).toString())) {
name = fieldPrefix + name;
}
- schemas.add(TypeInfoUtils.getFieldSchemaFromTypeInfo(name, cols.get(i).getType()));
+ schemas.add(MetaStoreUtils.getFieldSchemaFromTypeInfo(name, cols.get(i).getType()));
}
return schemas;
}
@@ -194,7 +206,8 @@
return new reduceSinkDesc(keyCols, valueCols, tag, partitionCols, numReducers,
getBinarySortableTableDesc(getFieldSchemasFromColumnList(keyCols, "reducesinkkey"), order),
- getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue")));
+ // Revert to DynamicSerDe: getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue")));
+ getLazySimpleSerDeTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue")));
}
/**
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java Tue Mar 24 23:44:50 2009
@@ -22,8 +22,8 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
public class exprNodeColumnDesc extends exprNodeDesc implements Serializable {
private static final long serialVersionUID = 1L;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java Tue Mar 24 23:44:50 2009
@@ -20,8 +20,8 @@
import java.io.Serializable;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
public class exprNodeConstantDesc extends exprNodeDesc implements Serializable {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java Tue Mar 24 23:44:50 2009
@@ -21,7 +21,7 @@
import java.io.Serializable;
import java.util.List;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
public class exprNodeDesc implements Serializable {
private static final long serialVersionUID = 1L;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java Tue Mar 24 23:44:50 2009
@@ -22,7 +22,7 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.parse.RowResolver;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java Tue Mar 24 23:44:50 2009
@@ -23,7 +23,7 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDF;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java Tue Mar 24 23:44:50 2009
@@ -22,7 +22,7 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.parse.RowResolver;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java Tue Mar 24 23:44:50 2009
@@ -20,7 +20,7 @@
import java.io.Serializable;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
public class exprNodeNullDesc extends exprNodeDesc implements Serializable {
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Tue Mar 24 23:44:50 2009
@@ -38,8 +38,8 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Tue Mar 24 23:44:50 2009
@@ -31,9 +31,9 @@
import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
public class TestExpressionEvaluator extends TestCase {
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Tue Mar 24 23:44:50 2009
@@ -31,7 +31,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Tue Mar 24 23:44:50 2009
@@ -28,7 +28,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q?rev=758089&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q Tue Mar 24 23:44:50 2009
@@ -0,0 +1,18 @@
+CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '1'
+COLLECTION ITEMS TERMINATED BY '2'
+MAP KEYS TERMINATED BY '3'
+LINES TERMINATED BY '10'
+STORED AS TEXTFILE;
+
+EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1;
+
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1;
+
+SELECT dest1.* FROM dest1 DISTRIBUTE BY 1;
+
+SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1;
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Tue Mar 24 23:44:50 2009
@@ -15,7 +15,7 @@
expr: lint
type: array<int>
expr: lintstring
- type: array<struct{myint:int,mystring:string}>
+ type: array<struct<myint:int,mystring:string>>
Filter Operator
predicate:
expr: (0[0] > 0)
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out Tue Mar 24 23:44:50 2009
@@ -42,7 +42,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/suresh/hive_external/build/ql/tmp/173094765/312984077.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/383864989/212664797.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -69,14 +69,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest_g1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest_g1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out Tue Mar 24 23:44:50 2009
@@ -56,7 +56,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive1/hive/build/ql/tmp/746802126/1233171241.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/1330446638/19184728.10001
Reduce Output Operator
sort order:
tag: -1
@@ -68,14 +68,20 @@
Reduce Operator Tree:
Extract
Limit
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out Tue Mar 24 23:44:50 2009
@@ -43,14 +43,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out Tue Mar 24 23:44:50 2009
@@ -43,14 +43,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out Tue Mar 24 23:44:50 2009
@@ -49,7 +49,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive1/hive/build/ql/tmp/98848080/907077792.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/133273148/247233082.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -76,14 +76,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out Tue Mar 24 23:44:50 2009
@@ -36,14 +36,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest_g1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest_g1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out Tue Mar 24 23:44:50 2009
@@ -44,7 +44,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive/build/ql/tmp/458651873/137748425.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/118170734/653990469.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -76,14 +76,22 @@
type: bigint
expr: concat(0, UDFToString(2))
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest_g2
+ Select Operator
+ expressions:
+ expr: 0
+ type: string
+ expr: UDFToInteger(1)
+ type: int
+ expr: 2
+ type: string
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest_g2
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out Tue Mar 24 23:44:50 2009
@@ -53,14 +53,22 @@
type: bigint
expr: concat(0, UDFToString(2))
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: string
+ expr: UDFToInteger(1)
+ type: int
+ expr: 2
+ type: string
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out Tue Mar 24 23:44:50 2009
@@ -59,7 +59,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive/build/ql/tmp/76959905/5017405.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/1067456520/103403345.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -91,14 +91,22 @@
type: bigint
expr: concat(0, UDFToString(2))
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: string
+ expr: UDFToInteger(1)
+ type: int
+ expr: 2
+ type: string
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out Tue Mar 24 23:44:50 2009
@@ -38,14 +38,22 @@
type: bigint
expr: concat(0, UDFToString(2))
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest_g2
+ Select Operator
+ expressions:
+ expr: 0
+ type: string
+ expr: UDFToInteger(1)
+ type: int
+ expr: 2
+ type: string
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest_g2
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out Tue Mar 24 23:44:50 2009
@@ -44,7 +44,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive/build/ql/tmp/513475036/98479737.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/412878724/247817393.10001
Reduce Output Operator
sort order:
tag: -1
@@ -80,14 +80,26 @@
type: string
expr: 4
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: double
+ expr: 1
+ type: double
+ expr: 2
+ type: double
+ expr: UDFToDouble(3)
+ type: double
+ expr: UDFToDouble(4)
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out Tue Mar 24 23:44:50 2009
@@ -63,14 +63,26 @@
type: string
expr: 4
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: double
+ expr: 1
+ type: double
+ expr: 2
+ type: double
+ expr: UDFToDouble(3)
+ type: double
+ expr: UDFToDouble(4)
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map_skew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map_skew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map_skew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map_skew.q.out Tue Mar 24 23:44:50 2009
@@ -66,7 +66,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive2/hive/build/ql/tmp/189942724/9179609.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/62935826/32361129.10001
Reduce Output Operator
sort order:
tag: -1
@@ -102,14 +102,26 @@
type: string
expr: 4
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: double
+ expr: 1
+ type: double
+ expr: 2
+ type: double
+ expr: UDFToDouble(3)
+ type: double
+ expr: UDFToDouble(4)
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_noskew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_noskew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_noskew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_noskew.q.out Tue Mar 24 23:44:50 2009
@@ -41,14 +41,26 @@
type: string
expr: 4
type: string
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: double
+ expr: 1
+ type: double
+ expr: 2
+ type: double
+ expr: UDFToDouble(3)
+ type: double
+ expr: UDFToDouble(4)
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out Tue Mar 24 23:44:50 2009
@@ -30,14 +30,18 @@
expressions:
expr: 0
type: bigint
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out Tue Mar 24 23:44:50 2009
@@ -30,14 +30,18 @@
expressions:
expr: 0
type: bigint
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out Tue Mar 24 23:44:50 2009
@@ -42,7 +42,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/suresh/hive_external/build/ql/tmp/1860802028/54191199.10001
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/808701189/631585736.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -69,14 +69,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out Tue Mar 24 23:44:50 2009
@@ -33,14 +33,18 @@
expressions:
expr: 0
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -53,4 +57,4 @@
name: dest1
-NULL
+130091
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map_skew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map_skew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map_skew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map_skew.q.out Tue Mar 24 23:44:50 2009
@@ -33,14 +33,18 @@
expressions:
expr: 0
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -53,4 +57,4 @@
name: dest1
-NULL
+130091
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_noskew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_noskew.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_noskew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_noskew.q.out Tue Mar 24 23:44:50 2009
@@ -36,14 +36,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map.q.out?rev=758089&r1=758088&r2=758089&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map.q.out Tue Mar 24 23:44:50 2009
@@ -59,14 +59,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: true
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: true
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -87,7 +93,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/njain/hive1/hive/build/ql/tmp/272279570/1527190226.10002
+ /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/804020905/86268938.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -114,14 +120,20 @@
type: string
expr: 1
type: double
- File Output Operator
- compressed: true
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: dest2
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: true
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest2
0 0.0
|