hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r788209 - in /hadoop/hive/trunk: CHANGES.txt service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
Date Wed, 24 Jun 2009 23:27:55 GMT
Author: zshao
Date: Wed Jun 24 23:27:55 2009
New Revision: 788209

URL: http://svn.apache.org/viewvc?rev=788209&view=rev
Log:
HIVE-573. Fix TestHiveServer. (Raghotham Murthy via zshao)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=788209&r1=788208&r2=788209&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Jun 24 23:27:55 2009
@@ -263,6 +263,8 @@
     HIVE-529. Some cleanup for join operator
     (Zheng Shao via namit)
 
+    HIVE-573. Fix TestHiveServer. (Raghotham Murthy via zshao)
+
 Release 0.3.1 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java?rev=788209&r1=788208&r2=788209&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
(original)
+++ hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
Wed Jun 24 23:27:55 2009
@@ -1,6 +1,7 @@
 package org.apache.hadoop.hive.service;
 
 import java.util.*;
+
 import org.apache.hadoop.fs.Path;
 import junit.framework.TestCase;
 import org.apache.hadoop.hive.service.HiveInterface;
@@ -12,7 +13,6 @@
 import com.facebook.thrift.transport.TTransport;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BytesWritable;
@@ -77,8 +77,7 @@
       client.execute("select count(1) as cnt from " + tableName);
       String row = client.fetchOne();
       assertEquals(row, "500");
-      String schema = client.getSchema();
-      assertEquals("struct result { string cnt}", schema);
+      assertEquals("struct result { string cnt}#cnt#string", client.getSchema());
       client.execute("drop table " + tableName);
     }
     catch (Throwable t) {
@@ -172,7 +171,7 @@
     Properties dsp = new Properties();
     dsp.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName());
     dsp.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "result");
-    dsp.setProperty(Constants.SERIALIZATION_DDL, client.getSchema());
+    dsp.setProperty(Constants.SERIALIZATION_DDL, client.getSchema().split("#")[0]);
     dsp.setProperty(Constants.SERIALIZATION_LIB, ds.getClass().toString());
     dsp.setProperty(Constants.FIELD_DELIM, "9");
     ds.initialize(new Configuration(), dsp);
@@ -188,7 +187,7 @@
     sql = "select count(1) as c from " + tableName;
     client.execute(sql);
     row = client.fetchOne();
-    dsp.setProperty(Constants.SERIALIZATION_DDL, client.getSchema());
+    dsp.setProperty(Constants.SERIALIZATION_DDL, client.getSchema().split("#")[0]);
     // Need a new DynamicSerDe instance - re-initialization is not supported.
     ds = new DynamicSerDe();
     ds.initialize(new Configuration(), dsp);



Mime
View raw message