hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r991415 - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/
Date Wed, 01 Sep 2010 01:28:54 GMT
Author: namit
Date: Wed Sep  1 01:28:54 2010
New Revision: 991415

URL: http://svn.apache.org/viewvc?rev=991415&view=rev
Log:
HIVE-1598. Create an option to specify an option for the format of
intermediate results (Ning Zhang via namit)

M    CHANGES.txt
M    common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
A    ql/src/test/results/clientpositive/query_result_fileformat.q.out
A    ql/src/test/queries/clientpositive/query_result_fileformat.q
M    ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
M    ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java

Added:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/query_result_fileformat.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/query_result_fileformat.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=991415&r1=991414&r2=991415&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Sep  1 01:28:54 2010
@@ -68,6 +68,9 @@ Trunk -  Unreleased
     HIVE-471. Add a UDF for simple reflection
     (Edward Capriolo via namit)
 
+    HIVE-1598. Create an option to specify an option for the format of
+    intermediate results (Ning Zhang via namit)
+
   IMPROVEMENTS
 
     HIVE-1394. Do not update transient_lastDdlTime if the partition is modified by a housekeeping
@@ -228,7 +231,7 @@ Trunk -  Unreleased
     HIVE-1605. Regression and improvements in handling NULLs in joins
     (Ning Zhang via namit)
 
-    HIVE-1607. Reinstate and deprecate IMetaStoreClient methods removed 
+    HIVE-1607. Reinstate and deprecate IMetaStoreClient methods removed
     in HIVE-675
     (Carl Steinbach via Ning Zhang)
 

Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=991415&r1=991414&r2=991415&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Sep  1
01:28:54 2010
@@ -205,6 +205,7 @@ public class HiveConf extends Configurat
     // Default file format for CREATE TABLE statement
     // Options: TextFile, SequenceFile
     HIVEDEFAULTFILEFORMAT("hive.default.fileformat", "TextFile"),
+    HIVEQUERYRESULTFILEFORMAT("hive.query.result.fileformat", "TextFile"),
     HIVECHECKFILEFORMAT("hive.fileformat.check", true),
 
     //Location of Hive run time structured log file

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=991415&r1=991414&r2=991415&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed
Sep  1 01:28:54 2010
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import static org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS;
-import static org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES;
-import static org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT;
 import static org.apache.hadoop.util.StringUtils.stringifyException;
 
 import java.io.IOException;
@@ -32,9 +29,9 @@ import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeSet;
-import java.util.Map.Entry;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
@@ -74,7 +71,6 @@ import org.apache.hadoop.hive.ql.hooks.R
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -93,6 +89,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1;
 import org.apache.hadoop.hive.ql.optimizer.GenMROperator;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext;
+import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
 import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink1;
 import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink2;
 import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3;
@@ -102,7 +99,6 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
 import org.apache.hadoop.hive.ql.optimizer.MapJoinFactory;
 import org.apache.hadoop.hive.ql.optimizer.Optimizer;
-import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
 import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext;
 import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalOptimizer;
 import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
@@ -122,6 +118,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.FetchWork;
 import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
 import org.apache.hadoop.hive.ql.plan.ForwardDesc;
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
 import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
@@ -143,12 +140,11 @@ import org.apache.hadoop.hive.ql.plan.Ta
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.ql.plan.UDTFDesc;
 import org.apache.hadoop.hive.ql.plan.UnionDesc;
-import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
@@ -156,14 +152,13 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.TextInputFormat;
 
 /**
  * Implementation of the semantic analyzer.
@@ -3478,8 +3473,13 @@ public class SemanticAnalyzer extends Ba
           colTypes));
 
       if (tblDesc == null) {
-        table_desc = PlanUtils.getDefaultTableDesc(Integer
-            .toString(Utilities.ctrlaCode), cols, colTypes, false);
+        if (qb.getIsQuery()) {
+          String fileFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT);
+          table_desc = PlanUtils.getDefaultQueryOutputTableDesc(cols, colTypes, fileFormat);
+        } else {
+          table_desc = PlanUtils.getDefaultTableDesc(Integer
+              .toString(Utilities.ctrlaCode), cols, colTypes, false);
+        }
       } else {
         table_desc = PlanUtils.getTableDesc(tblDesc, cols, colTypes);
       }
@@ -5891,13 +5891,11 @@ public class SemanticAnalyzer extends Ba
       String cols = loadFileWork.get(0).getColumns();
       String colTypes = loadFileWork.get(0).getColumnTypes();
 
+      String resFileFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT);
+      TableDesc resultTab = PlanUtils.getDefaultQueryOutputTableDesc(cols, colTypes, resFileFormat);
+
       fetch = new FetchWork(new Path(loadFileWork.get(0).getSourceDir()).toString(),
-          new TableDesc(LazySimpleSerDe.class,
-          TextInputFormat.class, IgnoreKeyTextOutputFormat.class, Utilities
-          .makeProperties(SERIALIZATION_FORMAT, "" + Utilities.ctrlaCode,
-          LIST_COLUMNS, cols,
-          LIST_COLUMN_TYPES, colTypes)),
-          qb.getParseInfo().getOuterQueryLimit());
+          resultTab, qb.getParseInfo().getOuterQueryLimit());
 
       fetchTask = (FetchTask) TaskFactory.get(fetch, conf);
       setFetchTask(fetchTask);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=991415&r1=991414&r2=991415&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Wed Sep  1
01:28:54 2010
@@ -29,6 +29,7 @@ import java.util.Properties;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
@@ -37,23 +38,25 @@ import org.apache.hadoop.hive.ql.exec.Ro
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
+import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
+import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.hive.conf.HiveConf;
 
 /**
  * PlanUtils.
@@ -150,6 +153,15 @@ public final class PlanUtils {
       String columns, String columnTypes, boolean lastColumnTakesRestOfTheLine,
       boolean useJSONForLazy) {
 
+    return getTableDesc(serdeClass, separatorCode, columns, columnTypes,
+        lastColumnTakesRestOfTheLine, useJSONForLazy, "TextFile");
+ }
+
+  public static TableDesc getTableDesc(
+      Class<? extends Deserializer> serdeClass, String separatorCode,
+      String columns, String columnTypes, boolean lastColumnTakesRestOfTheLine,
+      boolean useJSONForLazy, String fileFormat) {
+
     Properties properties = Utilities.makeProperties(
         Constants.SERIALIZATION_FORMAT, separatorCode, Constants.LIST_COLUMNS,
         columns);
@@ -176,11 +188,29 @@ public final class PlanUtils {
       properties.setProperty(Constants.SERIALIZATION_USE_JSON_OBJECTS, "true");
     }
 
-    return new TableDesc(serdeClass, TextInputFormat.class,
-        IgnoreKeyTextOutputFormat.class, properties);
+    Class inputFormat, outputFormat;
+    // get the input & output file formats
+    if ("SequenceFile".equalsIgnoreCase(fileFormat)) {
+      inputFormat = SequenceFileInputFormat.class;
+      outputFormat = SequenceFileOutputFormat.class;
+    } else if ("RCFile".equalsIgnoreCase(fileFormat)) {
+      inputFormat = RCFileInputFormat.class;
+      outputFormat = RCFileOutputFormat.class;
+      assert serdeClass == ColumnarSerDe.class;
+    } else { // use TextFile by default
+      inputFormat = TextInputFormat.class;
+      outputFormat = IgnoreKeyTextOutputFormat.class;
+    }
+    return new TableDesc(serdeClass, inputFormat, outputFormat, properties);
+  }
+
+  public static TableDesc getDefaultQueryOutputTableDesc(String cols, String colTypes,
+      String fileFormat) {
+    return getTableDesc(LazySimpleSerDe.class, "" + Utilities.ctrlaCode, cols, colTypes,
+        false, false, fileFormat);
   }
 
-  /**
+ /**
    * Generate a table descriptor from a createTableDesc.
    */
   public static TableDesc getTableDesc(CreateTableDesc crtTblDesc, String cols,

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/query_result_fileformat.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/query_result_fileformat.q?rev=991415&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/query_result_fileformat.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/query_result_fileformat.q Wed Sep
 1 01:28:54 2010
@@ -0,0 +1,23 @@
+create table nzhang_test1 stored as sequencefile as select 'key1' as key, 'value
+1
+
+http://asdf' value from src limit 1;
+
+select * from nzhang_test1;
+select count(*) from nzhang_test1;
+
+explain
+select * from nzhang_test1 where key='key1';
+
+select * from nzhang_test1 where key='key1';
+
+set hive.query.result.fileformat=SequenceFile;
+
+select * from nzhang_test1;
+
+select count(*) from nzhang_test1;
+
+explain
+select * from nzhang_test1 where key='key1';
+
+select * from nzhang_test1 where key='key1';

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/query_result_fileformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/query_result_fileformat.q.out?rev=991415&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/query_result_fileformat.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/query_result_fileformat.q.out Wed
Sep  1 01:28:54 2010
@@ -0,0 +1,173 @@
+PREHOOK: query: create table nzhang_test1 stored as sequencefile as select 'key1' as key,
'value
+1
+
+http://asdf' value from src limit 1
+PREHOOK: type: CREATETABLE
+PREHOOK: Input: default@src
+POSTHOOK: query: create table nzhang_test1 stored as sequencefile as select 'key1' as key,
'value
+1
+
+http://asdf' value from src limit 1
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@nzhang_test1
+PREHOOK: query: select * from nzhang_test1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_test1
+PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-37-49_137_345185714437305649/-mr-10000
+POSTHOOK: query: select * from nzhang_test1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_test1
+POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-37-49_137_345185714437305649/-mr-10000
+key1	value
+1
+
+http://asdf
+PREHOOK: query: select count(*) from nzhang_test1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_test1
+PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-37-49_893_2470605464847588988/-mr-10000
+POSTHOOK: query: select count(*) from nzhang_test1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_test1
+POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-37-49_893_2470605464847588988/-mr-10000
+1
+PREHOOK: query: explain
+select * from nzhang_test1 where key='key1'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from nzhang_test1 where key='key1'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF nzhang_test1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE))
(TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 'key1'))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        nzhang_test1 
+          TableScan
+            alias: nzhang_test1
+            Filter Operator
+              predicate:
+                  expr: (key = 'key1')
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (key = 'key1')
+                    type: boolean
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: string
+                        expr: value
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select * from nzhang_test1 where key='key1'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_test1
+PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-37-56_447_1539901914223140072/-mr-10000
+POSTHOOK: query: select * from nzhang_test1 where key='key1'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_test1
+POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-37-56_447_1539901914223140072/-mr-10000
+key1	value
+1	NULL
+	NULL
+http://asdf	NULL
+PREHOOK: query: select * from nzhang_test1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_test1
+PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-38-00_606_2534525216891512327/-mr-10000
+POSTHOOK: query: select * from nzhang_test1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_test1
+POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-38-00_606_2534525216891512327/-mr-10000
+key1	value
+1
+
+http://asdf
+PREHOOK: query: select count(*) from nzhang_test1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_test1
+PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-38-02_968_2091791272244763520/-mr-10000
+POSTHOOK: query: select count(*) from nzhang_test1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_test1
+POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-38-02_968_2091791272244763520/-mr-10000
+1
+PREHOOK: query: explain
+select * from nzhang_test1 where key='key1'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from nzhang_test1 where key='key1'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF nzhang_test1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE))
(TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 'key1'))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        nzhang_test1 
+          TableScan
+            alias: nzhang_test1
+            Filter Operator
+              predicate:
+                  expr: (key = 'key1')
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (key = 'key1')
+                    type: boolean
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: string
+                        expr: value
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select * from nzhang_test1 where key='key1'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_test1
+PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-38-09_247_8932362895617955403/-mr-10000
+POSTHOOK: query: select * from nzhang_test1 where key='key1'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_test1
+POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-30_23-38-09_247_8932362895617955403/-mr-10000
+key1	value
+1
+
+http://asdf



Mime
View raw message