hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rmur...@apache.org
Subject svn commit: r790652 [5/5] - in /hadoop/hive/trunk: ./ jdbc/src/java/org/apache/hadoop/hive/jdbc/ jdbc/src/test/org/apache/hadoop/hive/jdbc/ metastore/if/ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen-php/ metastore/...
Date Thu, 02 Jul 2009 16:26:45 GMT
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml Thu Jul  2 16:26:44
2009
@@ -64,7 +64,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/src</string>

+            <string>file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src</string>

            </void> 
           </object> 
          </void> 
@@ -96,7 +96,7 @@
                     <void property="conf"> 
                      <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                       <void property="dirName"> 
-                       <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/792684261/10001</string>

+                       <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/1177328759/10001</string>

                       </void> 
                       <void property="tableInfo"> 
                        <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -119,6 +119,10 @@
                            <string>serialization.format</string> 
                            <string>1</string> 
                           </void> 
+                          <void method="put"> 
+                           <string>columns.types</string> 
+                           <string>int:int</string> 
+                          </void> 
                          </object> 
                         </void> 
                        </object> 
@@ -144,7 +148,7 @@
                           <void property="type"> 
                            <object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">

                             <void property="typeName"> 
-                             <string>string</string> 
+                             <string>int</string> 
                             </void> 
                            </object> 
                           </void> 
@@ -192,11 +196,7 @@
                        <string>_col0</string> 
                       </void> 
                       <void property="type"> 
-                       <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">

-                        <void property="typeName"> 
-                         <string>int</string> 
-                        </void> 
-                       </object> 
+                       <object idref="PrimitiveTypeInfo0"/> 
                       </void> 
                      </object> 
                     </void> 
@@ -206,7 +206,7 @@
                        <string>_col1</string> 
                       </void> 
                       <void property="type"> 
-                       <object idref="PrimitiveTypeInfo1"/> 
+                       <object idref="PrimitiveTypeInfo0"/> 
                       </void> 
                      </object> 
                     </void> 
@@ -248,7 +248,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>12</int> 
@@ -258,7 +258,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>11</int> 
@@ -268,7 +268,7 @@
                      </object> 
                     </void> 
                     <void property="typeInfo"> 
-                     <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">

+                     <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">

                       <void property="typeName"> 
                        <string>boolean</string> 
                       </void> 
@@ -279,7 +279,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo1"/> 
+                     <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
                     <void property="value"> 
                      <int>13</int> 
@@ -309,7 +309,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>14</int> 
@@ -319,7 +319,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>10</int> 
@@ -329,14 +329,14 @@
                      </object> 
                     </void> 
                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo2"/> 
+                     <object idref="PrimitiveTypeInfo1"/> 
                     </void> 
                    </object> 
                   </void> 
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo1"/> 
+                     <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
                     <void property="value"> 
                      <int>15</int> 
@@ -349,7 +349,7 @@
                  <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen</class>

                 </void> 
                 <void property="typeInfo"> 
-                 <object idref="PrimitiveTypeInfo1"/> 
+                 <object idref="PrimitiveTypeInfo0"/> 
                 </void> 
                </object> 
               </void> 
@@ -381,7 +381,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>1</int> 
@@ -391,7 +391,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>1</int> 
@@ -401,14 +401,14 @@
                      </object> 
                     </void> 
                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo2"/> 
+                     <object idref="PrimitiveTypeInfo1"/> 
                     </void> 
                    </object> 
                   </void> 
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo1"/> 
+                     <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
                     <void property="value"> 
                      <int>2</int> 
@@ -438,7 +438,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>3</int> 
@@ -448,7 +448,7 @@
                       <void method="add"> 
                        <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                         <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo1"/> 
+                         <object idref="PrimitiveTypeInfo0"/> 
                         </void> 
                         <void property="value"> 
                          <int>5</int> 
@@ -458,14 +458,14 @@
                      </object> 
                     </void> 
                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo2"/> 
+                     <object idref="PrimitiveTypeInfo1"/> 
                     </void> 
                    </object> 
                   </void> 
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo1"/> 
+                     <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
                     <void property="value"> 
                      <int>4</int> 
@@ -475,7 +475,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc">

                     <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo1"/> 
+                     <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
                     <void property="value"> 
                      <int>5</int> 
@@ -488,7 +488,7 @@
                  <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen</class>

                 </void> 
                 <void property="typeInfo"> 
-                 <object idref="PrimitiveTypeInfo1"/> 
+                 <object idref="PrimitiveTypeInfo0"/> 
                 </void> 
                </object> 
               </void> 
@@ -546,7 +546,11 @@
                <string>key</string> 
               </void> 
               <void property="type"> 
-               <object idref="PrimitiveTypeInfo0"/> 
+               <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">

+                <void property="typeName"> 
+                 <string>string</string> 
+                </void> 
+               </object> 
               </void> 
              </object> 
             </void> 
@@ -556,7 +560,7 @@
                <string>value</string> 
               </void> 
               <void property="type"> 
-               <object idref="PrimitiveTypeInfo0"/> 
+               <object idref="PrimitiveTypeInfo2"/> 
               </void> 
              </object> 
             </void> 
@@ -571,7 +575,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/src</string>

+       <string>file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src</string>

        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src</string> 
@@ -583,7 +587,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/src</string>

+       <string>file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src</string>

        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml Thu Jul  2 16:26:44 2009
@@ -23,6 +23,9 @@
            <object class="org.apache.hadoop.hive.ql.plan.moveWork"> 
             <void property="loadFileWork"> 
              <object class="org.apache.hadoop.hive.ql.plan.loadFileDesc"> 
+              <void property="columnTypes"> 
+               <string>string:string</string> 
+              </void> 
               <void property="columns"> 
                <string>unioninput.key,unioninput.value</string> 
               </void> 
@@ -30,7 +33,7 @@
                <boolean>true</boolean> 
               </void> 
               <void property="sourceDir"> 
-               <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/907054306/10000</string>

+               <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/1796533964/10000</string>

               </void> 
               <void property="targetDir"> 
                <string>../build/ql/test/data/warehouse/union.out</string> 
@@ -61,10 +64,10 @@
                <boolean>true</boolean> 
               </void> 
               <void property="sourceDir"> 
-               <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/602498110/10001</string>

+               <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/665057992/10001</string>

               </void> 
               <void property="targetDir"> 
-               <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/907054306/10000</string>

+               <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/1796533964/10000</string>

               </void> 
              </object> 
             </void> 
@@ -82,7 +85,7 @@
             <void property="aliasToWork"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/602498110/10001</string>

+               <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/665057992/10001</string>

                <object id="TableScanOperator0" class="org.apache.hadoop.hive.ql.exec.TableScanOperator">

                 <void property="childOperators"> 
                  <object class="java.util.ArrayList"> 
@@ -282,10 +285,10 @@
             <void property="pathToAliases"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/602498110/10001</string>

+               <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/665057992/10001</string>

                <object class="java.util.ArrayList"> 
                 <void method="add"> 
-                 <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/602498110/10001</string>

+                 <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/665057992/10001</string>

                 </void> 
                </object> 
               </void> 
@@ -294,7 +297,7 @@
             <void property="pathToPartitionInfo"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/602498110/10001</string>

+               <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/665057992/10001</string>

                <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
                 <void property="tableDesc"> 
                  <object id="tableDesc2" class="org.apache.hadoop.hive.ql.plan.tableDesc">

@@ -317,6 +320,10 @@
                      <string>serialization.format</string> 
                      <string>1</string> 
                     </void> 
+                    <void method="put"> 
+                     <string>columns.types</string> 
+                     <string>string:string</string> 
+                    </void> 
                    </object> 
                   </void> 
                  </object> 
@@ -334,7 +341,7 @@
                   <void property="conf"> 
                    <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                     <void property="dirName"> 
-                     <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/907054306/10000</string>

+                     <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/1796533964/10000</string>

                     </void> 
                     <void property="tableInfo"> 
                      <object idref="tableDesc2"/> 
@@ -503,7 +510,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src</string>

+            <string>file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src</string>

            </void> 
           </object> 
          </void> 
@@ -571,7 +578,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src</string>

+            <string>file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src</string>

            </void> 
           </object> 
          </void> 
@@ -618,7 +625,7 @@
                                    <int>1</int> 
                                   </void> 
                                   <void property="dirName"> 
-                                   <string>file:/data/users/pchakka/workspace/oshive/build/ql/tmp/602498110/10001</string>

+                                   <string>file:/data/users/njain/hive1/hive1/build/ql/tmp/665057992/10001</string>

                                   </void> 
                                   <void property="tableInfo"> 
                                    <object idref="tableDesc2"/> 
@@ -1458,7 +1465,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src</string>

+       <string>file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src</string>

        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>null-subquery1:unioninput-subquery1:src</string> 
@@ -1473,7 +1480,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src</string>

+       <string>file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src</string>

        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java (original)
+++ hadoop/hive/trunk/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java Thu Jul
 2 16:26:44 2009
@@ -70,6 +70,10 @@
   public static final String MAP_TYPE_NAME = "map";
 
   public static final String STRUCT_TYPE_NAME = "struct";
+  
+  public static final String LIST_COLUMNS = "columns";
+  
+  public static final String LIST_COLUMN_TYPES = "columns.types";
 
   public static final Set<String> PrimitiveTypes = new HashSet<String>();
   static {

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
(original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
Thu Jul  2 16:26:44 2009
@@ -67,6 +67,7 @@
   final public static byte[] DefaultSeparators = {(byte)1, (byte)2, (byte)3};
 
   private ObjectInspector cachedObjectInspector;
+  private boolean         useJSONSerialize; // use json to serialize
 
   public String toString() {
     return getClass().toString()
@@ -212,9 +213,9 @@
         .equalsIgnoreCase("true"));
 
     // Read the configuration parameters
-    String columnNameProperty = tbl.getProperty("columns");
+    String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS);
     // NOTE: if "columns.types" is missing, all columns will be of String type
-    String columnTypeProperty = tbl.getProperty("columns.types");
+    String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES);
 
     // Parse the configuration parameters
 
@@ -374,7 +375,7 @@
         if (!foi.getCategory().equals(Category.PRIMITIVE)
             && (declaredFields == null || 
                 declaredFields.get(i).getFieldObjectInspector().getCategory()
-                .equals(Category.PRIMITIVE))) {
+                .equals(Category.PRIMITIVE) || useJSONSerialize)) {
           serialize(serializeStream, SerDeUtils.getJSONString(f, foi),
               PrimitiveObjectInspectorFactory.javaStringObjectInspector,
               serdeParams.separators, 1, serdeParams.nullSequence,
@@ -493,4 +494,18 @@
     throw new RuntimeException("Unknown category type: "
         + objInspector.getCategory());
   }
+
+  /**
+   * @return the useJSONSerialize
+   */
+  public boolean isUseJSONSerialize() {
+    return useJSONSerialize;
+  }
+
+  /**
+   * @param useJSONSerialize the useJSONSerialize to set
+   */
+  public void setUseJSONSerialize(boolean useJSONSerialize) {
+    this.useJSONSerialize = useJSONSerialize;
+  }
 }

Modified: hadoop/hive/trunk/service/if/hive_service.thrift
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/if/hive_service.thrift?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/service/if/hive_service.thrift (original)
+++ hadoop/hive/trunk/service/if/hive_service.thrift Thu Jul  2 16:26:44 2009
@@ -50,6 +50,6 @@
   list<string> fetchAll() throws(1:HiveServerException ex)
 
   # Get the Thrift DDL string of the query result
-  string getSchema() throws(1:HiveServerException ex)
+  hive_metastore.Schema getSchema() throws(1:HiveServerException ex)
 
 }

Modified: hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
(original)
+++ hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
Thu Jul  2 16:26:44 2009
@@ -28,7 +28,7 @@
 
     public List<String> fetchAll() throws HiveServerException, TException;
 
-    public String getSchema() throws HiveServerException, TException;
+    public org.apache.hadoop.hive.metastore.api.Schema getSchema() throws HiveServerException,
TException;
 
   }
 
@@ -182,7 +182,7 @@
       throw new TApplicationException(TApplicationException.MISSING_RESULT, "fetchAll failed:
unknown result");
     }
 
-    public String getSchema() throws HiveServerException, TException
+    public org.apache.hadoop.hive.metastore.api.Schema getSchema() throws HiveServerException,
TException
     {
       send_getSchema();
       return recv_getSchema();
@@ -197,7 +197,7 @@
       oprot_.getTransport().flush();
     }
 
-    public String recv_getSchema() throws HiveServerException, TException
+    public org.apache.hadoop.hive.metastore.api.Schema recv_getSchema() throws HiveServerException,
TException
     {
       TMessage msg = iprot_.readMessageBegin();
       if (msg.type == TMessageType.EXCEPTION) {
@@ -1442,7 +1442,7 @@
   }
 
   public static class getSchema_result implements TBase, java.io.Serializable   {
-    private String success;
+    private org.apache.hadoop.hive.metastore.api.Schema success;
     private HiveServerException ex;
 
     public final Isset __isset = new Isset();
@@ -1455,7 +1455,7 @@
     }
 
     public getSchema_result(
-      String success,
+      org.apache.hadoop.hive.metastore.api.Schema success,
       HiveServerException ex)
     {
       this();
@@ -1465,16 +1465,17 @@
       this.__isset.ex = true;
     }
 
-    public String getSuccess() {
+    public org.apache.hadoop.hive.metastore.api.Schema getSuccess() {
       return this.success;
     }
 
-    public void setSuccess(String success) {
+    public void setSuccess(org.apache.hadoop.hive.metastore.api.Schema success) {
       this.success = success;
       this.__isset.success = true;
     }
 
     public void unsetSuccess() {
+      this.success = null;
       this.__isset.success = false;
     }
 
@@ -1541,8 +1542,9 @@
         switch (field.id)
         {
           case 0:
-            if (field.type == TType.STRING) {
-              this.success = iprot.readString();
+            if (field.type == TType.STRUCT) {
+              this.success = new org.apache.hadoop.hive.metastore.api.Schema();
+              this.success.read(iprot);
               this.__isset.success = true;
             } else { 
               TProtocolUtil.skip(iprot, field.type);
@@ -1574,10 +1576,10 @@
       if (this.__isset.success) {
         if (this.success != null) {
           field.name = "success";
-          field.type = TType.STRING;
+          field.type = TType.STRUCT;
           field.id = 0;
           oprot.writeFieldBegin(field);
-          oprot.writeString(this.success);
+          this.success.write(oprot);
           oprot.writeFieldEnd();
         }
       } else if (this.__isset.ex) {

Modified: hadoop/hive/trunk/service/src/gen-php/ThriftHive.php
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-php/ThriftHive.php?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-php/ThriftHive.php (original)
+++ hadoop/hive/trunk/service/src/gen-php/ThriftHive.php Thu Jul  2 16:26:44 2009
@@ -1003,7 +1003,8 @@
       self::$_TSPEC = array(
         0 => array(
           'var' => 'success',
-          'type' => TType::STRING,
+          'type' => TType::STRUCT,
+          'class' => 'metastore_Schema',
           ),
         1 => array(
           'var' => 'ex',
@@ -1042,8 +1043,9 @@
       switch ($fid)
       {
         case 0:
-          if ($ftype == TType::STRING) {
-            $xfer += $input->readString($this->success);
+          if ($ftype == TType::STRUCT) {
+            $this->success = new metastore_Schema();
+            $xfer += $this->success->read($input);
           } else {
             $xfer += $input->skip($ftype);
           }
@@ -1070,8 +1072,11 @@
     $xfer = 0;
     $xfer += $output->writeStructBegin('ThriftHive_getSchema_result');
     if ($this->success !== null) {
-      $xfer += $output->writeFieldBegin('success', TType::STRING, 0);
-      $xfer += $output->writeString($this->success);
+      if (!is_object($this->success)) {
+        throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+      }
+      $xfer += $output->writeFieldBegin('success', TType::STRUCT, 0);
+      $xfer += $this->success->write($output);
       $xfer += $output->writeFieldEnd();
     }
     if ($this->ex !== null) {

Modified: hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive-remote
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive-remote?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive-remote (original)
+++ hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive-remote Thu Jul  2 16:26:44
2009
@@ -25,7 +25,7 @@
   print '  string fetchOne()'
   print '   fetchN(i32 numRows)'
   print '   fetchAll()'
-  print '  string getSchema()'
+  print '  Schema getSchema()'
   print ''
   sys.exit(0)
 

Modified: hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive.py?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive.py (original)
+++ hadoop/hive/trunk/service/src/gen-py/hive_service/ThriftHive.py Thu Jul  2 16:26:44 2009
@@ -791,7 +791,7 @@
 class getSchema_result:
 
   thrift_spec = (
-    (0, TType.STRING, 'success', None, None, ), # 0
+    (0, TType.STRUCT, 'success', (hive_metastore.ttypes.Schema, hive_metastore.ttypes.Schema.thrift_spec),
None, ), # 0
     (1, TType.STRUCT, 'ex', (HiveServerException, HiveServerException.thrift_spec), None,
), # 1
   )
 
@@ -814,8 +814,9 @@
       if ftype == TType.STOP:
         break
       if fid == 0:
-        if ftype == TType.STRING:
-          self.success = iprot.readString();
+        if ftype == TType.STRUCT:
+          self.success = hive_metastore.ttypes.Schema()
+          self.success.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == 1:
@@ -835,8 +836,8 @@
       return
     oprot.writeStructBegin('getSchema_result')
     if self.success != None:
-      oprot.writeFieldBegin('success', TType.STRING, 0)
-      oprot.writeString(self.success)
+      oprot.writeFieldBegin('success', TType.STRUCT, 0)
+      self.success.write(oprot)
       oprot.writeFieldEnd()
     if self.ex != None:
       oprot.writeFieldBegin('ex', TType.STRUCT, 1)

Modified: hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Thu
Jul  2 16:26:44 2009
@@ -18,30 +18,16 @@
 
 package org.apache.hadoop.hive.service;
 
-import java.io.PrintStream;
 import java.util.List;
-import java.util.Map;
 import java.util.Vector;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
-import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.service.ThriftHive;
-import org.apache.hadoop.hive.service.ThriftHive.*;
 import org.apache.hadoop.hive.service.HiveServerException;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringUtils;
-
-import com.facebook.fb303.FacebookBase;
-import com.facebook.fb303.FacebookService;
-import com.facebook.fb303.fb_status;
+
 import com.facebook.thrift.TException;
 import com.facebook.thrift.TProcessor;
 import com.facebook.thrift.TProcessorFactory;
@@ -133,11 +119,11 @@
     /**
      * Return the schema of the query result
      */
-    public String getSchema() throws HiveServerException, TException {
+    public Schema getSchema() throws HiveServerException, TException {
       try {
-        String schema = driver.getSchema();
+        Schema schema = driver.getSchema();
         if (schema == null) {
-          schema = "";
+          schema = new Schema();
         }
         LOG.info("Returning schema: " + schema);
         return schema;

Modified: hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java?rev=790652&r1=790651&r2=790652&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
(original)
+++ hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
Thu Jul  2 16:26:44 2009
@@ -4,6 +4,9 @@
 
 import org.apache.hadoop.fs.Path;
 import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.service.HiveInterface;
 import org.apache.hadoop.hive.service.HiveClient;
 import org.apache.hadoop.hive.service.HiveServer;
@@ -77,7 +80,11 @@
       client.execute("select count(1) as cnt from " + tableName);
       String row = client.fetchOne();
       assertEquals(row, "500");
-      assertEquals("struct result { string cnt}#cnt#string", client.getSchema());
+      Schema schema = client.getSchema();
+      List<FieldSchema> listFields = schema.getFieldSchemas();
+      assertEquals(listFields.size(), 1);
+      assertEquals(listFields.get(0).getName(), "cnt");
+      assertEquals(listFields.get(0).getType(), "i64");
       client.execute("drop table " + tableName);
     }
     catch (Throwable t) {
@@ -171,7 +178,18 @@
     Properties dsp = new Properties();
     dsp.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName());
     dsp.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "result");
-    dsp.setProperty(Constants.SERIALIZATION_DDL, client.getSchema().split("#")[0]);
+    String serDDL = new String("struct result { ");
+    List<FieldSchema> schema = client.getSchema().getFieldSchemas();
+    for (int pos = 0; pos < schema.size(); pos++) {
+      if (pos != 0) 
+          serDDL = serDDL.concat(",");
+      serDDL = serDDL.concat(schema.get(pos).getType());
+      serDDL = serDDL.concat(" ");
+      serDDL = serDDL.concat(schema.get(pos).getName());
+    }
+    serDDL = serDDL.concat("}");
+
+    dsp.setProperty(Constants.SERIALIZATION_DDL, serDDL);
     dsp.setProperty(Constants.SERIALIZATION_LIB, ds.getClass().toString());
     dsp.setProperty(Constants.FIELD_DELIM, "9");
     ds.initialize(new Configuration(), dsp);
@@ -181,13 +199,25 @@
 
     assertEquals(o.getClass().toString(), "class java.util.ArrayList");
     List<?> lst = (List<?>)o;
-    assertEquals(lst.get(0), "238");
+    assertEquals(lst.get(0), 238);
 
     // TODO: serde doesn't like underscore  -- struct result { string _c0}
     sql = "select count(1) as c from " + tableName;
     client.execute(sql);
     row = client.fetchOne();
-    dsp.setProperty(Constants.SERIALIZATION_DDL, client.getSchema().split("#")[0]);
+
+    serDDL = new String("struct result { ");
+    schema = client.getSchema().getFieldSchemas();
+    for (int pos = 0; pos < schema.size(); pos++) {
+      if (pos != 0) 
+          serDDL = serDDL.concat(",");
+      serDDL = serDDL.concat(schema.get(pos).getType());
+      serDDL = serDDL.concat(" ");
+      serDDL = serDDL.concat(schema.get(pos).getName());
+    }
+    serDDL = serDDL.concat("}");
+
+    dsp.setProperty(Constants.SERIALIZATION_DDL, serDDL);
     // Need a new DynamicSerDe instance - re-initialization is not supported.
     ds = new DynamicSerDe();
     ds.initialize(new Configuration(), dsp);



Mime
View raw message