hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r793758 [8/11] - in /hadoop/hive/trunk: ./ jdbc/src/java/org/apache/hadoop/hive/jdbc/ lib/ metastore/ metastore/if/ metastore/include/ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen-php/ metastore/src/gen...
Date Tue, 14 Jul 2009 00:01:08 GMT
Modified: hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py (original)
+++ hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py Tue Jul 14 00:01:05 2009
@@ -17,6 +17,11 @@
 
 
 class Version:
+  """
+  Attributes:
+   - version
+   - comments
+  """
 
   thrift_spec = (
     None, # 0
@@ -24,14 +29,9 @@
     (2, TType.STRING, 'comments', None, None, ), # 2
   )
 
-  def __init__(self, d=None):
-    self.version = None
-    self.comments = None
-    if isinstance(d, dict):
-      if 'version' in d:
-        self.version = d['version']
-      if 'comments' in d:
-        self.comments = d['comments']
+  def __init__(self, version=None, comments=None,):
+    self.version = version
+    self.comments = comments
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -73,11 +73,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -86,6 +85,12 @@
     return not (self == other)
 
 class FieldSchema:
+  """
+  Attributes:
+   - name
+   - type
+   - comment
+  """
 
   thrift_spec = (
     None, # 0
@@ -94,17 +99,10 @@
     (3, TType.STRING, 'comment', None, None, ), # 3
   )
 
-  def __init__(self, d=None):
-    self.name = None
-    self.type = None
-    self.comment = None
-    if isinstance(d, dict):
-      if 'name' in d:
-        self.name = d['name']
-      if 'type' in d:
-        self.type = d['type']
-      if 'comment' in d:
-        self.comment = d['comment']
+  def __init__(self, name=None, type=None, comment=None,):
+    self.name = name
+    self.type = type
+    self.comment = comment
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -155,11 +153,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -168,6 +165,13 @@
     return not (self == other)
 
 class Type:
+  """
+  Attributes:
+   - name
+   - type1
+   - type2
+   - fields
+  """
 
   thrift_spec = (
     None, # 0
@@ -177,20 +181,11 @@
     (4, TType.LIST, 'fields', (TType.STRUCT,(FieldSchema, FieldSchema.thrift_spec)), None, ), # 4
   )
 
-  def __init__(self, d=None):
-    self.name = None
-    self.type1 = None
-    self.type2 = None
-    self.fields = None
-    if isinstance(d, dict):
-      if 'name' in d:
-        self.name = d['name']
-      if 'type1' in d:
-        self.type1 = d['type1']
-      if 'type2' in d:
-        self.type2 = d['type2']
-      if 'fields' in d:
-        self.fields = d['fields']
+  def __init__(self, name=None, type1=None, type2=None, fields=None,):
+    self.name = name
+    self.type1 = type1
+    self.type2 = type2
+    self.fields = fields
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -259,11 +254,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -272,6 +266,11 @@
     return not (self == other)
 
 class Database:
+  """
+  Attributes:
+   - name
+   - description
+  """
 
   thrift_spec = (
     None, # 0
@@ -279,14 +278,9 @@
     (2, TType.STRING, 'description', None, None, ), # 2
   )
 
-  def __init__(self, d=None):
-    self.name = None
-    self.description = None
-    if isinstance(d, dict):
-      if 'name' in d:
-        self.name = d['name']
-      if 'description' in d:
-        self.description = d['description']
+  def __init__(self, name=None, description=None,):
+    self.name = name
+    self.description = description
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -328,11 +322,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -341,6 +334,12 @@
     return not (self == other)
 
 class SerDeInfo:
+  """
+  Attributes:
+   - name
+   - serializationLib
+   - parameters
+  """
 
   thrift_spec = (
     None, # 0
@@ -349,17 +348,10 @@
     (3, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 3
   )
 
-  def __init__(self, d=None):
-    self.name = None
-    self.serializationLib = None
-    self.parameters = None
-    if isinstance(d, dict):
-      if 'name' in d:
-        self.name = d['name']
-      if 'serializationLib' in d:
-        self.serializationLib = d['serializationLib']
-      if 'parameters' in d:
-        self.parameters = d['parameters']
+  def __init__(self, name=None, serializationLib=None, parameters=None,):
+    self.name = name
+    self.serializationLib = serializationLib
+    self.parameters = parameters
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -420,11 +412,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -433,6 +424,11 @@
     return not (self == other)
 
 class Order:
+  """
+  Attributes:
+   - col
+   - order
+  """
 
   thrift_spec = (
     None, # 0
@@ -440,14 +436,9 @@
     (2, TType.I32, 'order', None, None, ), # 2
   )
 
-  def __init__(self, d=None):
-    self.col = None
-    self.order = None
-    if isinstance(d, dict):
-      if 'col' in d:
-        self.col = d['col']
-      if 'order' in d:
-        self.order = d['order']
+  def __init__(self, col=None, order=None,):
+    self.col = col
+    self.order = order
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -489,11 +480,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -502,6 +492,19 @@
     return not (self == other)
 
 class StorageDescriptor:
+  """
+  Attributes:
+   - cols
+   - location
+   - inputFormat
+   - outputFormat
+   - compressed
+   - numBuckets
+   - serdeInfo
+   - bucketCols
+   - sortCols
+   - parameters
+  """
 
   thrift_spec = (
     None, # 0
@@ -517,38 +520,17 @@
     (10, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 10
   )
 
-  def __init__(self, d=None):
-    self.cols = None
-    self.location = None
-    self.inputFormat = None
-    self.outputFormat = None
-    self.compressed = None
-    self.numBuckets = None
-    self.serdeInfo = None
-    self.bucketCols = None
-    self.sortCols = None
-    self.parameters = None
-    if isinstance(d, dict):
-      if 'cols' in d:
-        self.cols = d['cols']
-      if 'location' in d:
-        self.location = d['location']
-      if 'inputFormat' in d:
-        self.inputFormat = d['inputFormat']
-      if 'outputFormat' in d:
-        self.outputFormat = d['outputFormat']
-      if 'compressed' in d:
-        self.compressed = d['compressed']
-      if 'numBuckets' in d:
-        self.numBuckets = d['numBuckets']
-      if 'serdeInfo' in d:
-        self.serdeInfo = d['serdeInfo']
-      if 'bucketCols' in d:
-        self.bucketCols = d['bucketCols']
-      if 'sortCols' in d:
-        self.sortCols = d['sortCols']
-      if 'parameters' in d:
-        self.parameters = d['parameters']
+  def __init__(self, cols=None, location=None, inputFormat=None, outputFormat=None, compressed=None, numBuckets=None, serdeInfo=None, bucketCols=None, sortCols=None, parameters=None,):
+    self.cols = cols
+    self.location = location
+    self.inputFormat = inputFormat
+    self.outputFormat = outputFormat
+    self.compressed = compressed
+    self.numBuckets = numBuckets
+    self.serdeInfo = serdeInfo
+    self.bucketCols = bucketCols
+    self.sortCols = sortCols
+    self.parameters = parameters
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -699,11 +681,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -712,6 +693,18 @@
     return not (self == other)
 
 class Table:
+  """
+  Attributes:
+   - tableName
+   - dbName
+   - owner
+   - createTime
+   - lastAccessTime
+   - retention
+   - sd
+   - partitionKeys
+   - parameters
+  """
 
   thrift_spec = (
     None, # 0
@@ -726,35 +719,16 @@
     (9, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 9
   )
 
-  def __init__(self, d=None):
-    self.tableName = None
-    self.dbName = None
-    self.owner = None
-    self.createTime = None
-    self.lastAccessTime = None
-    self.retention = None
-    self.sd = None
-    self.partitionKeys = None
-    self.parameters = None
-    if isinstance(d, dict):
-      if 'tableName' in d:
-        self.tableName = d['tableName']
-      if 'dbName' in d:
-        self.dbName = d['dbName']
-      if 'owner' in d:
-        self.owner = d['owner']
-      if 'createTime' in d:
-        self.createTime = d['createTime']
-      if 'lastAccessTime' in d:
-        self.lastAccessTime = d['lastAccessTime']
-      if 'retention' in d:
-        self.retention = d['retention']
-      if 'sd' in d:
-        self.sd = d['sd']
-      if 'partitionKeys' in d:
-        self.partitionKeys = d['partitionKeys']
-      if 'parameters' in d:
-        self.parameters = d['parameters']
+  def __init__(self, tableName=None, dbName=None, owner=None, createTime=None, lastAccessTime=None, retention=None, sd=None, partitionKeys=None, parameters=None,):
+    self.tableName = tableName
+    self.dbName = dbName
+    self.owner = owner
+    self.createTime = createTime
+    self.lastAccessTime = lastAccessTime
+    self.retention = retention
+    self.sd = sd
+    self.partitionKeys = partitionKeys
+    self.parameters = parameters
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -879,11 +853,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -892,6 +865,16 @@
     return not (self == other)
 
 class Partition:
+  """
+  Attributes:
+   - values
+   - dbName
+   - tableName
+   - createTime
+   - lastAccessTime
+   - sd
+   - parameters
+  """
 
   thrift_spec = (
     None, # 0
@@ -904,29 +887,14 @@
     (7, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 7
   )
 
-  def __init__(self, d=None):
-    self.values = None
-    self.dbName = None
-    self.tableName = None
-    self.createTime = None
-    self.lastAccessTime = None
-    self.sd = None
-    self.parameters = None
-    if isinstance(d, dict):
-      if 'values' in d:
-        self.values = d['values']
-      if 'dbName' in d:
-        self.dbName = d['dbName']
-      if 'tableName' in d:
-        self.tableName = d['tableName']
-      if 'createTime' in d:
-        self.createTime = d['createTime']
-      if 'lastAccessTime' in d:
-        self.lastAccessTime = d['lastAccessTime']
-      if 'sd' in d:
-        self.sd = d['sd']
-      if 'parameters' in d:
-        self.parameters = d['parameters']
+  def __init__(self, values=None, dbName=None, tableName=None, createTime=None, lastAccessTime=None, sd=None, parameters=None,):
+    self.values = values
+    self.dbName = dbName
+    self.tableName = tableName
+    self.createTime = createTime
+    self.lastAccessTime = lastAccessTime
+    self.sd = sd
+    self.parameters = parameters
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1032,11 +1000,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1045,6 +1012,14 @@
     return not (self == other)
 
 class Index:
+  """
+  Attributes:
+   - indexName
+   - indexType
+   - tableName
+   - dbName
+   - colNames
+  """
 
   thrift_spec = (
     None, # 0
@@ -1055,23 +1030,12 @@
     (5, TType.LIST, 'colNames', (TType.STRING,None), None, ), # 5
   )
 
-  def __init__(self, d=None):
-    self.indexName = None
-    self.indexType = None
-    self.tableName = None
-    self.dbName = None
-    self.colNames = None
-    if isinstance(d, dict):
-      if 'indexName' in d:
-        self.indexName = d['indexName']
-      if 'indexType' in d:
-        self.indexType = d['indexType']
-      if 'tableName' in d:
-        self.tableName = d['tableName']
-      if 'dbName' in d:
-        self.dbName = d['dbName']
-      if 'colNames' in d:
-        self.colNames = d['colNames']
+  def __init__(self, indexName=None, indexType=None, tableName=None, dbName=None, colNames=None,):
+    self.indexName = indexName
+    self.indexType = indexType
+    self.tableName = tableName
+    self.dbName = dbName
+    self.colNames = colNames
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1148,11 +1112,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1161,6 +1124,11 @@
     return not (self == other)
 
 class Schema:
+  """
+  Attributes:
+   - fieldSchemas
+   - properties
+  """
 
   thrift_spec = (
     None, # 0
@@ -1168,14 +1136,9 @@
     (2, TType.MAP, 'properties', (TType.STRING,None,TType.STRING,None), None, ), # 2
   )
 
-  def __init__(self, d=None):
-    self.fieldSchemas = None
-    self.properties = None
-    if isinstance(d, dict):
-      if 'fieldSchemas' in d:
-        self.fieldSchemas = d['fieldSchemas']
-      if 'properties' in d:
-        self.properties = d['properties']
+  def __init__(self, fieldSchemas=None, properties=None,):
+    self.fieldSchemas = fieldSchemas
+    self.properties = properties
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1236,11 +1199,10 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
-
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1249,13 +1211,18 @@
     return not (self == other)
 
 class MetaException(Exception):
+  """
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1266,7 +1233,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1282,17 +1249,19 @@
       return
     oprot.writeStructBegin('MetaException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1301,13 +1270,18 @@
     return not (self == other)
 
 class UnknownTableException(Exception):
+  """
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1318,7 +1292,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1334,17 +1308,19 @@
       return
     oprot.writeStructBegin('UnknownTableException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1353,13 +1329,18 @@
     return not (self == other)
 
 class UnknownDBException(Exception):
+  """
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1370,7 +1351,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1386,17 +1367,19 @@
       return
     oprot.writeStructBegin('UnknownDBException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1405,13 +1388,18 @@
     return not (self == other)
 
 class AlreadyExistsException(Exception):
+  """
+  Attributes:
+   - message
+  """
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1422,7 +1410,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1438,17 +1426,19 @@
       return
     oprot.writeStructBegin('AlreadyExistsException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1457,13 +1447,18 @@
     return not (self == other)
 
 class InvalidObjectException(Exception):
+  """
+  Attributes:
+   - message
+  """
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1474,7 +1469,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1490,17 +1485,19 @@
       return
     oprot.writeStructBegin('InvalidObjectException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1509,13 +1506,18 @@
     return not (self == other)
 
 class NoSuchObjectException(Exception):
+  """
+  Attributes:
+   - message
+  """
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1526,7 +1528,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1542,17 +1544,19 @@
       return
     oprot.writeStructBegin('NoSuchObjectException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1561,13 +1565,18 @@
     return not (self == other)
 
 class IndexAlreadyExistsException(Exception):
+  """
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1578,7 +1587,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1594,17 +1603,19 @@
       return
     oprot.writeStructBegin('IndexAlreadyExistsException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
@@ -1613,13 +1624,18 @@
     return not (self == other)
 
 class InvalidOperationException(Exception):
+  """
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
 
-  thrift_spec = None
-  def __init__(self, d=None):
-    self.message = None
-    if isinstance(d, dict):
-      if 'message' in d:
-        self.message = d['message']
+  def __init__(self, message=None,):
+    self.message = message
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1630,7 +1646,7 @@
       (fname, ftype, fid) = iprot.readFieldBegin()
       if ftype == TType.STOP:
         break
-      if fid == -1:
+      if fid == 1:
         if ftype == TType.STRING:
           self.message = iprot.readString();
         else:
@@ -1646,17 +1662,19 @@
       return
     oprot.writeStructBegin('InvalidOperationException')
     if self.message != None:
-      oprot.writeFieldBegin('message', TType.STRING, -1)
+      oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self): 
-    return str(self.__dict__)
+  def __str__(self):
+    return repr(self)
 
-  def __repr__(self): 
-    return repr(self.__dict__)
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 
   def __eq__(self, other):
     return isinstance(other, self.__class__) and self.__dict__ == other.__dict__

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Tue Jul 14 00:01:05 2009
@@ -50,20 +50,20 @@
 import com.facebook.fb303.FacebookBase;
 import com.facebook.fb303.FacebookService;
 import com.facebook.fb303.fb_status;
-import com.facebook.thrift.TException;
-import com.facebook.thrift.protocol.TBinaryProtocol;
-import com.facebook.thrift.server.TServer;
-import com.facebook.thrift.server.TThreadPoolServer;
-import com.facebook.thrift.transport.TServerSocket;
-import com.facebook.thrift.transport.TServerTransport;
-import com.facebook.thrift.transport.TTransportFactory;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.server.TServer;
+import org.apache.thrift.server.TThreadPoolServer;
+import org.apache.thrift.transport.TServerSocket;
+import org.apache.thrift.transport.TServerTransport;
+import org.apache.thrift.transport.TTransportFactory;
 
 /**
  * TODO:pc remove application logic to a separate interface. 
  */
 public class HiveMetaStore extends ThriftHiveMetastore {
   
-    public static class HMSHandler extends FacebookBase implements ThriftHiveMetastore.Iface{
+    public static class HMSHandler extends FacebookBase implements ThriftHiveMetastore.Iface {
       public static final Log LOG = LogFactory.getLog(HiveMetaStore.class.getName());
       private static boolean createDefaultDB = false;
       private String rawStoreClassName;
@@ -653,6 +653,10 @@
           throw new MetaException(e.getMessage());
         }
       }
+
+      public String getCpuProfile(int profileDurationInSec) throws TException {
+        return "";
+      }
   }
     
   /**

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Tue Jul 14 00:01:05 2009
@@ -38,12 +38,12 @@
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 
-import com.facebook.thrift.TException;
-import com.facebook.thrift.protocol.TBinaryProtocol;
-import com.facebook.thrift.protocol.TProtocol;
-import com.facebook.thrift.transport.TSocket;
-import com.facebook.thrift.transport.TTransport;
-import com.facebook.thrift.transport.TTransportException;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
 
 /**
  * Hive Metastore Client.

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Tue Jul 14 00:01:05 2009
@@ -30,7 +30,7 @@
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
 
-import com.facebook.thrift.TException;
+import org.apache.thrift.TException;
 
 /**
  * TODO Unnecessary when the server sides for both dbstore and filestore are merged

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java Tue Jul 14 00:01:05 2009
@@ -407,7 +407,7 @@
 
       fixedRowSize += javaObjectOverHead;
       Class<? extends UDAFEvaluator> agg = aggregationClasses[i];
-      Field[] fArr = agg.getDeclaredFields();
+      Field[] fArr = ObjectInspectorUtils.getDeclaredNonStaticFields(agg);
       for (Field f : fArr) {
         fixedRowSize += getSize(i, f.getType(), f);
       }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Tue Jul 14 00:01:05 2009
@@ -54,7 +54,7 @@
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.util.StringUtils;
 
-import com.facebook.thrift.TException;
+import org.apache.thrift.TException;
 
 /**
  * The Hive class contains information about this instance of Hive.

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java Tue Jul 14 00:01:05 2009
@@ -19,7 +19,7 @@
 import org.apache.hadoop.hive.ql.metadata.CheckResult.PartitionResult;
 import org.apache.hadoop.hive.conf.HiveConf;
 
-import com.facebook.thrift.TException;
+import org.apache.thrift.TException;
 
 /**
  * Verify that the information in the metastore matches what

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java Tue Jul 14 00:01:05 2009
@@ -37,9 +37,9 @@
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 
-import com.facebook.thrift.TException;
-import com.facebook.thrift.protocol.TBinaryProtocol;
-import com.facebook.thrift.transport.TMemoryBuffer;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.transport.TMemoryBuffer;
 
 /**
  * A Hive Table Partition: is a fundamental storage unit within a Table

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Tue Jul 14 00:01:05 2009
@@ -38,7 +38,7 @@
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
-import com.facebook.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TBinaryProtocol;
 
 public class PlanUtils {
 

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Tue Jul 14 00:01:05 2009
@@ -65,7 +65,7 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.MiniMRCluster;
 
-import com.facebook.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TBinaryProtocol;
 
 public class QTestUtil {
 

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java Tue Jul 14 00:01:05 2009
@@ -32,9 +32,9 @@
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.facebook.thrift.*;
-import com.facebook.thrift.transport.*;
-import com.facebook.thrift.protocol.*;
+import org.apache.thrift.*;
+import org.apache.thrift.transport.*;
+import org.apache.thrift.protocol.*;
 
 //import org.apache.hadoop.contrib.serialization.thrift.*;
 

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Tue Jul 14 00:01:05 2009
@@ -40,7 +40,7 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.StringUtils;
 
-import com.facebook.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TBinaryProtocol;
 
 public class TestHive extends TestCase {
   private Hive hm;

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java Tue Jul 14 00:01:05 2009
@@ -18,7 +18,7 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 
-import com.facebook.thrift.TException;
+import org.apache.thrift.TException;
 
 public class TestHiveMetaStoreChecker extends TestCase {
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientnegative/invalid_create_tbl1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/invalid_create_tbl1.q?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/invalid_create_tbl1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/invalid_create_tbl1.q Tue Jul 14 00:01:05 2009
@@ -2,6 +2,6 @@
 CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table' 
     PARTITIONED BY(aint DATETIME, country STRING) 
     CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
-    ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
+    ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'org.apache.thrift.protocol.TBinaryProtocol')
     STORED AS SEQUENCEFILE;
 DESCRIBE EXTENDED inv_valid_tbl1;

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out Tue Jul 14 00:01:05 2009
@@ -2,7 +2,7 @@
 query: CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table' 
     PARTITIONED BY(aint DATETIME, country STRING) 
     CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
-    ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
+    ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'org.apache.thrift.protocol.TBinaryProtocol')
     STORED AS SEQUENCEFILE
 FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition column name aint conflicts with table columns.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out Tue Jul 14 00:01:05 2009
@@ -60,9 +60,9 @@
                             type: bigint
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/src 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src 
       Path -> Partition:
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/src 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -77,7 +77,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/src
+                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -97,20 +97,20 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10002
+              directory: file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10002
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                   properties:
                     name binary_table
                     serialization.ddl struct binary_table { string _col0, i64 _col1}
-                    serialization.format com.facebook.thrift.protocol.TBinaryProtocol
+                    serialization.format org.apache.thrift.protocol.TBinaryProtocol
                   name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10002 
+        file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10002 
           Union
             Common Join Operator
               condition map:
@@ -149,7 +149,7 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10003
+                      directory: file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10003
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -163,10 +163,10 @@
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             file.inputformat org.apache.hadoop.mapred.TextInputFormat
                             file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/dest_j1
+                            location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/dest_j1
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest_j1
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10004 
+        file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10004 
           Union
             Common Join Operator
               condition map:
@@ -205,7 +205,7 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10003
+                      directory: file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10003
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -219,7 +219,7 @@
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             file.inputformat org.apache.hadoop.mapred.TextInputFormat
                             file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/dest_j1
+                            location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/dest_j1
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest_j1
       Local Work:
@@ -267,7 +267,7 @@
                         File Output Operator
                           compressed: false
                           GlobalTableId: 1
-                          directory: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10003
+                          directory: file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10003
                           table:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -281,15 +281,15 @@
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/dest_j1
+                                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/dest_j1
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: dest_j1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10002 
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10004 
+        file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10002 
+        file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10004 
       Path -> Partition:
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10002 
+        file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10002 
           Partition
           
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -297,9 +297,9 @@
               properties:
                 name binary_table
                 serialization.ddl struct binary_table { string _col0, i64 _col1}
-                serialization.format com.facebook.thrift.protocol.TBinaryProtocol
+                serialization.format org.apache.thrift.protocol.TBinaryProtocol
               name: binary_table
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10004 
+        file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10004 
           Partition
           
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -307,7 +307,7 @@
               properties:
                 name binary_table
                 serialization.ddl struct binary_table { string _col0, i64 _col1}
-                serialization.format com.facebook.thrift.protocol.TBinaryProtocol
+                serialization.format org.apache.thrift.protocol.TBinaryProtocol
               name: binary_table
 
   Stage: Stage-5
@@ -316,11 +316,11 @@
           Move Operator
             files:
                 hdfs directory: true
-                source: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10003
-                destination: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/1286743524/10000
+                source: file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10003
+                destination: file:/data/users/rmurthy/hive/build/ql/tmp/751585738/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10003 
+              file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10003 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -336,9 +336,9 @@
                           type: int
             Needs Tagging: false
             Path -> Alias:
-              file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10003 
+              file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10003 
             Path -> Partition:
-              file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10003 
+              file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10003 
                 Partition
                 
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -353,7 +353,7 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/dest_j1
+                      location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/dest_j1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_j1
             Reduce Operator Tree:
@@ -361,7 +361,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/1286743524/10000
+                  directory: file:/data/users/rmurthy/hive/build/ql/tmp/751585738/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -375,7 +375,7 @@
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/dest_j1
+                        location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/dest_j1
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest_j1
 
@@ -383,7 +383,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/1286743524/10000
+          source: file:/data/users/rmurthy/hive/build/ql/tmp/751585738/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -397,10 +397,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/dest_j1
+                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/dest_j1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest_j1
-          tmp directory: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/1286743524/10001
+          tmp directory: file:/data/users/rmurthy/hive/build/ql/tmp/751585738/10001
 
   Stage: Stage-6
     Map Reduce
@@ -439,9 +439,9 @@
                             type: bigint
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/src 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src 
       Path -> Partition:
-        file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/src 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -456,7 +456,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/test/data/warehouse/src
+                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -476,14 +476,14 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/2012040398/10004
+              directory: file:/data/users/rmurthy/hive/build/ql/tmp/1831027677/10004
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                   properties:
                     name binary_table
                     serialization.ddl struct binary_table { string _col0, i64 _col1}
-                    serialization.format com.facebook.thrift.protocol.TBinaryProtocol
+                    serialization.format org.apache.thrift.protocol.TBinaryProtocol
                   name: binary_table
 
 
@@ -500,7 +500,7 @@
 Output: default/dest_j1
 query: select * from dest_j1 x order by x.key
 Input: default/dest_j1
-Output: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/929906989/10000
+Output: file:/data/users/rmurthy/hive/build/ql/tmp/388644930/10000
 128		3
 146	val_146	2
 150	val_150	1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out Tue Jul 14 00:01:05 2009
@@ -58,12 +58,12 @@
                         type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
       Path -> Partition:
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -82,10 +82,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -104,10 +104,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
           Partition
             partition values:
               ds 2008-04-09
@@ -126,10 +126,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
+        file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
           Partition
             partition values:
               ds 2008-04-09
@@ -148,7 +148,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
       Reduce Operator Tree:
@@ -175,20 +175,20 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/njain/hive1/hive1/build/ql/tmp/306194856/10002
+                directory: file:/data/users/rmurthy/hive/build/ql/tmp/993281011/10002
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     properties:
                       name binary_table
                       serialization.ddl struct binary_table { string _col0, string _col1, string _col2, string _col3}
-                      serialization.format com.facebook.thrift.protocol.TBinaryProtocol
+                      serialization.format org.apache.thrift.protocol.TBinaryProtocol
                     name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive1/hive1/build/ql/tmp/306194856/10002 
+        file:/data/users/rmurthy/hive/build/ql/tmp/993281011/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -213,9 +213,9 @@
                     type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/hive1/hive1/build/ql/tmp/306194856/10002 
+        file:/data/users/rmurthy/hive/build/ql/tmp/993281011/10002 
       Path -> Partition:
-        file:/data/users/njain/hive1/hive1/build/ql/tmp/306194856/10002 
+        file:/data/users/rmurthy/hive/build/ql/tmp/993281011/10002 
           Partition
           
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -223,14 +223,14 @@
               properties:
                 name binary_table
                 serialization.ddl struct binary_table { string _col0, string _col1, string _col2, string _col3}
-                serialization.format com.facebook.thrift.protocol.TBinaryProtocol
+                serialization.format org.apache.thrift.protocol.TBinaryProtocol
               name: binary_table
       Reduce Operator Tree:
         Extract
           File Output Operator
             compressed: false
             GlobalTableId: 0
-            directory: file:/data/users/njain/hive1/hive1/build/ql/tmp/306194856/10001
+            directory: file:/data/users/rmurthy/hive/build/ql/tmp/993281011/10001
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -254,7 +254,7 @@
 Input: default/srcpart/ds=2008-04-08/hr=12
 Input: default/srcpart/ds=2008-04-09/hr=11
 Input: default/srcpart/ds=2008-04-09/hr=12
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1665685376/10000
+Output: file:/data/users/rmurthy/hive/build/ql/tmp/673725995/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml Tue Jul 14 00:01:05 2009
@@ -30,7 +30,7 @@
                <boolean>true</boolean> 
               </void> 
               <void property="sourceDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1323052942/10000</string> 
+               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/31102222/10000</string> 
               </void> 
               <void property="table"> 
                <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -83,7 +83,7 @@
                   </void> 
                   <void method="put"> 
                    <string>location</string> 
-                   <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/dest1</string> 
+                   <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/dest1</string> 
                   </void> 
                  </object> 
                 </void> 
@@ -93,7 +93,7 @@
                </object> 
               </void> 
               <void property="tmpDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1323052942/10001</string> 
+               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/31102222/10001</string> 
               </void> 
              </object> 
             </void> 
@@ -121,10 +121,10 @@
                <boolean>true</boolean> 
               </void> 
               <void property="sourceDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1243751271/10002</string> 
+               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/389289410/10002</string> 
               </void> 
               <void property="targetDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1323052942/10000</string> 
+               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/31102222/10000</string> 
               </void> 
              </object> 
             </void> 
@@ -142,7 +142,7 @@
             <void property="aliasToWork"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1243751271/10002</string> 
+               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/389289410/10002</string> 
                <object id="TableScanOperator0" class="org.apache.hadoop.hive.ql.exec.TableScanOperator"> 
                 <void property="childOperators"> 
                  <object class="java.util.ArrayList"> 
@@ -338,10 +338,10 @@
             <void property="pathToAliases"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1243751271/10002</string> 
+               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/389289410/10002</string> 
                <object class="java.util.ArrayList"> 
                 <void method="add"> 
-                 <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1243751271/10002</string> 
+                 <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/389289410/10002</string> 
                 </void> 
                </object> 
               </void> 
@@ -350,7 +350,7 @@
             <void property="pathToPartitionInfo"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1243751271/10002</string> 
+               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/389289410/10002</string> 
                <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
                 <void property="tableDesc"> 
                  <object idref="tableDesc0"/> 
@@ -368,7 +368,7 @@
                   <void property="conf"> 
                    <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                     <void property="dirName"> 
-                     <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1323052942/10000</string> 
+                     <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/31102222/10000</string> 
                     </void> 
                     <void property="tableInfo"> 
                      <object idref="tableDesc0"/> 
@@ -467,7 +467,7 @@
           <void property="conf"> 
            <object id="HiveConf0" class="org.apache.hadoop.hive.conf.HiveConf"> 
             <void property="auxJars"> 
-             <string>file:///data/users/njain/hive3/hive3/build/ql/test/test-udfs.jar</string> 
+             <string>file:///data/users/njain/hive_commit1/hive_commit1/build/ql/test/test-udfs.jar</string> 
             </void> 
            </object> 
           </void> 
@@ -501,7 +501,7 @@
                                <int>1</int> 
                               </void> 
                               <void property="dirName"> 
-                               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1243751271/10002</string> 
+                               <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/389289410/10002</string> 
                               </void> 
                               <void property="tableInfo"> 
                                <object idref="tableDesc0"/> 
@@ -1136,7 +1136,7 @@
          </object> 
         </void> 
         <void property="dir"> 
-         <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1243751271/10002</string> 
+         <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/389289410/10002</string> 
         </void> 
         <void property="listTasks"> 
          <object idref="ArrayList0"/> 
@@ -1204,7 +1204,7 @@
            </void> 
            <void method="put"> 
             <string>serialization.format</string> 
-            <string>com.facebook.thrift.protocol.TBinaryProtocol</string> 
+            <string>org.apache.thrift.protocol.TBinaryProtocol</string> 
            </void> 
            <void method="put"> 
             <string>serialization.class</string> 
@@ -1228,7 +1228,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/src_thrift</string> 
+            <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/src_thrift</string> 
            </void> 
           </object> 
          </void> 
@@ -1251,7 +1251,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src_thrift</string> 
@@ -1263,7 +1263,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml Tue Jul 14 00:01:05 2009
@@ -142,7 +142,7 @@
            </void> 
            <void method="put"> 
             <string>serialization.format</string> 
-            <string>com.facebook.thrift.protocol.TBinaryProtocol</string> 
+            <string>org.apache.thrift.protocol.TBinaryProtocol</string> 
            </void> 
            <void method="put"> 
             <string>serialization.class</string> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml Tue Jul 14 00:01:05 2009
@@ -44,7 +44,7 @@
            </void> 
            <void method="put"> 
             <string>serialization.format</string> 
-            <string>com.facebook.thrift.protocol.TBinaryProtocol</string> 
+            <string>org.apache.thrift.protocol.TBinaryProtocol</string> 
            </void> 
            <void method="put"> 
             <string>serialization.class</string> 
@@ -68,7 +68,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+            <string>file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src_thrift</string> 
            </void> 
           </object> 
          </void> 
@@ -96,7 +96,7 @@
                 <void property="conf"> 
                  <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                   <void property="dirName"> 
-                   <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1162019634/10001</string> 
+                   <string>file:/data/users/rmurthy/hive/build/ql/tmp/1776654577/10001</string> 
                   </void> 
                   <void property="tableInfo"> 
                    <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -494,7 +494,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src_thrift</string> 
@@ -506,7 +506,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml Tue Jul 14 00:01:05 2009
@@ -44,7 +44,7 @@
            </void> 
            <void method="put"> 
             <string>serialization.format</string> 
-            <string>com.facebook.thrift.protocol.TBinaryProtocol</string> 
+            <string>org.apache.thrift.protocol.TBinaryProtocol</string> 
            </void> 
            <void method="put"> 
             <string>serialization.class</string> 
@@ -68,7 +68,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+            <string>file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src_thrift</string> 
            </void> 
           </object> 
          </void> 
@@ -104,7 +104,7 @@
                         <void property="conf"> 
                          <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                           <void property="dirName"> 
-                           <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1852875945/10001</string> 
+                           <string>file:/data/users/rmurthy/hive/build/ql/tmp/1050479798/10001</string> 
                           </void> 
                           <void property="tableInfo"> 
                            <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -688,7 +688,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src_thrift</string> 
@@ -700,7 +700,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/rmurthy/hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml Tue Jul 14 00:01:05 2009
@@ -736,7 +736,7 @@
                 </void> 
                 <void method="put"> 
                  <string>serialization.format</string> 
-                 <string>com.facebook.thrift.protocol.TBinaryProtocol</string> 
+                 <string>org.apache.thrift.protocol.TBinaryProtocol</string> 
                 </void> 
                </object> 
               </void> 

Modified: hadoop/hive/trunk/serde/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/build.xml?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/build.xml (original)
+++ hadoop/hive/trunk/serde/build.xml Tue Jul 14 00:01:05 2009
@@ -59,12 +59,16 @@
   <target name="thriftif">
     <echo>Executing thrift (which needs to be in your path) to build java serde Constants... </echo>
     <exec executable="thrift"  failonerror="true" dir=".">
-      <arg line="--gen py -php --gen java -o ${src.dir} if/serde.thrift " />
+      <arg line="--gen py --gen php --gen java -o ${src.dir} if/serde.thrift " />
     </exec>
-    <echo>Executing thrift (which needs to be in your path) to build thrift test classes... </echo>
+    <echo>Executing thrift (which needs to be in your path) to build complex.thrift test classes... </echo>
     <exec executable="thrift"  failonerror="true" dir=".">
       <arg line="--gen java -o ${src.dir} if/test/complex.thrift " />
     </exec>
+    <echo>Executing thrift (which needs to be in your path) to build testthrift.thrift classes... </echo>
+    <exec executable="thrift"  failonerror="true" dir=".">
+      <arg line="--gen java -o ${src.dir} if/test/testthrift.thrift " />
+    </exec>
   </target>
 
   <target name="gen-testdata" depends="compile-test,test-jar">

Modified: hadoop/hive/trunk/serde/if/serde.thrift
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/if/serde.thrift?rev=793758&r1=793757&r2=793758&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/if/serde.thrift (original)
+++ hadoop/hive/trunk/serde/if/serde.thrift Tue Jul 14 00:01:05 2009
@@ -39,6 +39,9 @@
 const string MAP_TYPE_NAME  = "map";
 const string STRUCT_TYPE_NAME  = "struct";
 
+const string LIST_COLUMNS = "columns";
+const string LIST_COLUMN_TYPES = "columns.types";
+
 const set<string> PrimitiveTypes  = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME  DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME ],
 const set<string> CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ],
 



Mime
View raw message