incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tra...@apache.org
Subject svn commit: r1383152 [21/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ ...
Date Mon, 10 Sep 2012 23:29:03 GMT
Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevisionList.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevisionList.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevisionList.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevisionList.java Mon Sep 10 23:28:55 2012
@@ -36,337 +36,334 @@ import java.util.List;
 import java.util.Map;
 
 public class StoreFamilyRevisionList implements org.apache.thrift.TBase<StoreFamilyRevisionList, StoreFamilyRevisionList._Fields>, java.io.Serializable, Cloneable {
-  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StoreFamilyRevisionList");
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StoreFamilyRevisionList");
 
-  private static final org.apache.thrift.protocol.TField REVISION_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("revisionList", org.apache.thrift.protocol.TType.LIST, (short)1);
+    private static final org.apache.thrift.protocol.TField REVISION_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("revisionList", org.apache.thrift.protocol.TType.LIST, (short) 1);
 
-  public List<StoreFamilyRevision> revisionList; // required
+    public List<StoreFamilyRevision> revisionList; // required
 
-  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    REVISION_LIST((short)1, "revisionList");
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+        REVISION_LIST((short) 1, "revisionList");
 
-    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+        private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+        static {
+            for (_Fields field : EnumSet.allOf(_Fields.class)) {
+                byName.put(field.getFieldName(), field);
+            }
+        }
+
+        /**
+         * Find the _Fields constant that matches fieldId, or null if its not found.
+         */
+        public static _Fields findByThriftId(int fieldId) {
+            switch (fieldId) {
+            case 1: // REVISION_LIST
+                return REVISION_LIST;
+            default:
+                return null;
+            }
+        }
+
+        /**
+         * Find the _Fields constant that matches fieldId, throwing an exception
+         * if it is not found.
+         */
+        public static _Fields findByThriftIdOrThrow(int fieldId) {
+            _Fields fields = findByThriftId(fieldId);
+            if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+            return fields;
+        }
+
+        /**
+         * Find the _Fields constant that matches name, or null if its not found.
+         */
+        public static _Fields findByName(String name) {
+            return byName.get(name);
+        }
+
+        private final short _thriftId;
+        private final String _fieldName;
+
+        _Fields(short thriftId, String fieldName) {
+            _thriftId = thriftId;
+            _fieldName = fieldName;
+        }
+
+        public short getThriftFieldId() {
+            return _thriftId;
+        }
+
+        public String getFieldName() {
+            return _fieldName;
+        }
+    }
+
+    // isset id assignments
+
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 
     static {
-      for (_Fields field : EnumSet.allOf(_Fields.class)) {
-        byName.put(field.getFieldName(), field);
-      }
+        Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+        tmpMap.put(_Fields.REVISION_LIST, new org.apache.thrift.meta_data.FieldMetaData("revisionList", org.apache.thrift.TFieldRequirementType.DEFAULT,
+            new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+                new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, StoreFamilyRevision.class))));
+        metaDataMap = Collections.unmodifiableMap(tmpMap);
+        org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(StoreFamilyRevisionList.class, metaDataMap);
     }
 
-    /**
-     * Find the _Fields constant that matches fieldId, or null if its not found.
-     */
-    public static _Fields findByThriftId(int fieldId) {
-      switch(fieldId) {
-        case 1: // REVISION_LIST
-          return REVISION_LIST;
-        default:
-          return null;
-      }
+    public StoreFamilyRevisionList() {
     }
 
-    /**
-     * Find the _Fields constant that matches fieldId, throwing an exception
-     * if it is not found.
-     */
-    public static _Fields findByThriftIdOrThrow(int fieldId) {
-      _Fields fields = findByThriftId(fieldId);
-      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-      return fields;
+    public StoreFamilyRevisionList(
+        List<StoreFamilyRevision> revisionList) {
+        this();
+        this.revisionList = revisionList;
     }
 
     /**
-     * Find the _Fields constant that matches name, or null if its not found.
+     * Performs a deep copy on <i>other</i>.
      */
-    public static _Fields findByName(String name) {
-      return byName.get(name);
+    public StoreFamilyRevisionList(StoreFamilyRevisionList other) {
+        if (other.isSetRevisionList()) {
+            List<StoreFamilyRevision> __this__revisionList = new ArrayList<StoreFamilyRevision>();
+            for (StoreFamilyRevision other_element : other.revisionList) {
+                __this__revisionList.add(new StoreFamilyRevision(other_element));
+            }
+            this.revisionList = __this__revisionList;
+        }
+    }
+
+    public StoreFamilyRevisionList deepCopy() {
+        return new StoreFamilyRevisionList(this);
     }
 
-    private final short _thriftId;
-    private final String _fieldName;
+    @Override
+    public void clear() {
+        this.revisionList = null;
+    }
 
-    _Fields(short thriftId, String fieldName) {
-      _thriftId = thriftId;
-      _fieldName = fieldName;
-    }
-
-    public short getThriftFieldId() {
-      return _thriftId;
-    }
-
-    public String getFieldName() {
-      return _fieldName;
-    }
-  }
-
-  // isset id assignments
-
-  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-  static {
-    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.REVISION_LIST, new org.apache.thrift.meta_data.FieldMetaData("revisionList", org.apache.thrift.TFieldRequirementType.DEFAULT,
-        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, StoreFamilyRevision.class))));
-    metaDataMap = Collections.unmodifiableMap(tmpMap);
-    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(StoreFamilyRevisionList.class, metaDataMap);
-  }
-
-  public StoreFamilyRevisionList() {
-  }
-
-  public StoreFamilyRevisionList(
-    List<StoreFamilyRevision> revisionList)
-  {
-    this();
-    this.revisionList = revisionList;
-  }
-
-  /**
-   * Performs a deep copy on <i>other</i>.
-   */
-  public StoreFamilyRevisionList(StoreFamilyRevisionList other) {
-    if (other.isSetRevisionList()) {
-      List<StoreFamilyRevision> __this__revisionList = new ArrayList<StoreFamilyRevision>();
-      for (StoreFamilyRevision other_element : other.revisionList) {
-        __this__revisionList.add(new StoreFamilyRevision(other_element));
-      }
-      this.revisionList = __this__revisionList;
-    }
-  }
-
-  public StoreFamilyRevisionList deepCopy() {
-    return new StoreFamilyRevisionList(this);
-  }
-
-  @Override
-  public void clear() {
-    this.revisionList = null;
-  }
-
-  public int getRevisionListSize() {
-    return (this.revisionList == null) ? 0 : this.revisionList.size();
-  }
-
-  public java.util.Iterator<StoreFamilyRevision> getRevisionListIterator() {
-    return (this.revisionList == null) ? null : this.revisionList.iterator();
-  }
-
-  public void addToRevisionList(StoreFamilyRevision elem) {
-    if (this.revisionList == null) {
-      this.revisionList = new ArrayList<StoreFamilyRevision>();
-    }
-    this.revisionList.add(elem);
-  }
-
-  public List<StoreFamilyRevision> getRevisionList() {
-    return this.revisionList;
-  }
-
-  public StoreFamilyRevisionList setRevisionList(List<StoreFamilyRevision> revisionList) {
-    this.revisionList = revisionList;
-    return this;
-  }
-
-  public void unsetRevisionList() {
-    this.revisionList = null;
-  }
-
-  /** Returns true if field revisionList is set (has been assigned a value) and false otherwise */
-  public boolean isSetRevisionList() {
-    return this.revisionList != null;
-  }
-
-  public void setRevisionListIsSet(boolean value) {
-    if (!value) {
-      this.revisionList = null;
-    }
-  }
-
-  public void setFieldValue(_Fields field, Object value) {
-    switch (field) {
-    case REVISION_LIST:
-      if (value == null) {
-        unsetRevisionList();
-      } else {
-        setRevisionList((List<StoreFamilyRevision>)value);
-      }
-      break;
-
-    }
-  }
-
-  public Object getFieldValue(_Fields field) {
-    switch (field) {
-    case REVISION_LIST:
-      return getRevisionList();
-
-    }
-    throw new IllegalStateException();
-  }
-
-  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-  public boolean isSet(_Fields field) {
-    if (field == null) {
-      throw new IllegalArgumentException();
-    }
-
-    switch (field) {
-    case REVISION_LIST:
-      return isSetRevisionList();
-    }
-    throw new IllegalStateException();
-  }
-
-  @Override
-  public boolean equals(Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof StoreFamilyRevisionList)
-      return this.equals((StoreFamilyRevisionList)that);
-    return false;
-  }
-
-  public boolean equals(StoreFamilyRevisionList that) {
-    if (that == null)
-      return false;
-
-    boolean this_present_revisionList = true && this.isSetRevisionList();
-    boolean that_present_revisionList = true && that.isSetRevisionList();
-    if (this_present_revisionList || that_present_revisionList) {
-      if (!(this_present_revisionList && that_present_revisionList))
-        return false;
-      if (!this.revisionList.equals(that.revisionList))
+    public int getRevisionListSize() {
+        return (this.revisionList == null) ? 0 : this.revisionList.size();
+    }
+
+    public java.util.Iterator<StoreFamilyRevision> getRevisionListIterator() {
+        return (this.revisionList == null) ? null : this.revisionList.iterator();
+    }
+
+    public void addToRevisionList(StoreFamilyRevision elem) {
+        if (this.revisionList == null) {
+            this.revisionList = new ArrayList<StoreFamilyRevision>();
+        }
+        this.revisionList.add(elem);
+    }
+
+    public List<StoreFamilyRevision> getRevisionList() {
+        return this.revisionList;
+    }
+
+    public StoreFamilyRevisionList setRevisionList(List<StoreFamilyRevision> revisionList) {
+        this.revisionList = revisionList;
+        return this;
+    }
+
+    public void unsetRevisionList() {
+        this.revisionList = null;
+    }
+
+    /** Returns true if field revisionList is set (has been assigned a value) and false otherwise */
+    public boolean isSetRevisionList() {
+        return this.revisionList != null;
+    }
+
+    public void setRevisionListIsSet(boolean value) {
+        if (!value) {
+            this.revisionList = null;
+        }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+        switch (field) {
+        case REVISION_LIST:
+            if (value == null) {
+                unsetRevisionList();
+            } else {
+                setRevisionList((List<StoreFamilyRevision>) value);
+            }
+            break;
+
+        }
+    }
+
+    public Object getFieldValue(_Fields field) {
+        switch (field) {
+        case REVISION_LIST:
+            return getRevisionList();
+
+        }
+        throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+        if (field == null) {
+            throw new IllegalArgumentException();
+        }
+
+        switch (field) {
+        case REVISION_LIST:
+            return isSetRevisionList();
+        }
+        throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+        if (that == null)
+            return false;
+        if (that instanceof StoreFamilyRevisionList)
+            return this.equals((StoreFamilyRevisionList) that);
         return false;
     }
 
-    return true;
-  }
+    public boolean equals(StoreFamilyRevisionList that) {
+        if (that == null)
+            return false;
+
+        boolean this_present_revisionList = true && this.isSetRevisionList();
+        boolean that_present_revisionList = true && that.isSetRevisionList();
+        if (this_present_revisionList || that_present_revisionList) {
+            if (!(this_present_revisionList && that_present_revisionList))
+                return false;
+            if (!this.revisionList.equals(that.revisionList))
+                return false;
+        }
+
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        return 0;
+    }
+
+    public int compareTo(StoreFamilyRevisionList other) {
+        if (!getClass().equals(other.getClass())) {
+            return getClass().getName().compareTo(other.getClass().getName());
+        }
+
+        int lastComparison = 0;
+        StoreFamilyRevisionList typedOther = (StoreFamilyRevisionList) other;
+
+        lastComparison = Boolean.valueOf(isSetRevisionList()).compareTo(typedOther.isSetRevisionList());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetRevisionList()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.revisionList, typedOther.revisionList);
+            if (lastComparison != 0) {
+                return lastComparison;
+            }
+        }
+        return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+        return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField field;
+        iprot.readStructBegin();
+        while (true) {
+            field = iprot.readFieldBegin();
+            if (field.type == org.apache.thrift.protocol.TType.STOP) {
+                break;
+            }
+            switch (field.id) {
+            case 1: // REVISION_LIST
+                if (field.type == org.apache.thrift.protocol.TType.LIST) {
+                    {
+                        org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
+                        this.revisionList = new ArrayList<StoreFamilyRevision>(_list0.size);
+                        for (int _i1 = 0; _i1 < _list0.size; ++_i1) {
+                            StoreFamilyRevision _elem2; // required
+                            _elem2 = new StoreFamilyRevision();
+                            _elem2.read(iprot);
+                            this.revisionList.add(_elem2);
+                        }
+                        iprot.readListEnd();
+                    }
+                } else {
+                    org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+                }
+                break;
+            default:
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            }
+            iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+
+        // check for required fields of primitive type, which can't be checked in the validate method
+        validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+        validate();
 
-  @Override
-  public int hashCode() {
-    return 0;
-  }
-
-  public int compareTo(StoreFamilyRevisionList other) {
-    if (!getClass().equals(other.getClass())) {
-      return getClass().getName().compareTo(other.getClass().getName());
-    }
-
-    int lastComparison = 0;
-    StoreFamilyRevisionList typedOther = (StoreFamilyRevisionList)other;
-
-    lastComparison = Boolean.valueOf(isSetRevisionList()).compareTo(typedOther.isSetRevisionList());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetRevisionList()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.revisionList, typedOther.revisionList);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    return 0;
-  }
-
-  public _Fields fieldForId(int fieldId) {
-    return _Fields.findByThriftId(fieldId);
-  }
-
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-    org.apache.thrift.protocol.TField field;
-    iprot.readStructBegin();
-    while (true)
-    {
-      field = iprot.readFieldBegin();
-      if (field.type == org.apache.thrift.protocol.TType.STOP) {
-        break;
-      }
-      switch (field.id) {
-        case 1: // REVISION_LIST
-          if (field.type == org.apache.thrift.protocol.TType.LIST) {
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (this.revisionList != null) {
+            oprot.writeFieldBegin(REVISION_LIST_FIELD_DESC);
             {
-              org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
-              this.revisionList = new ArrayList<StoreFamilyRevision>(_list0.size);
-              for (int _i1 = 0; _i1 < _list0.size; ++_i1)
-              {
-                StoreFamilyRevision _elem2; // required
-                _elem2 = new StoreFamilyRevision();
-                _elem2.read(iprot);
-                this.revisionList.add(_elem2);
-              }
-              iprot.readListEnd();
+                oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, this.revisionList.size()));
+                for (StoreFamilyRevision _iter3 : this.revisionList) {
+                    _iter3.write(oprot);
+                }
+                oprot.writeListEnd();
             }
-          } else {
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        default:
-          org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-      }
-      iprot.readFieldEnd();
-    }
-    iprot.readStructEnd();
-
-    // check for required fields of primitive type, which can't be checked in the validate method
-    validate();
-  }
-
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-    validate();
-
-    oprot.writeStructBegin(STRUCT_DESC);
-    if (this.revisionList != null) {
-      oprot.writeFieldBegin(REVISION_LIST_FIELD_DESC);
-      {
-        oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, this.revisionList.size()));
-        for (StoreFamilyRevision _iter3 : this.revisionList)
-        {
-          _iter3.write(oprot);
-        }
-        oprot.writeListEnd();
-      }
-      oprot.writeFieldEnd();
-    }
-    oprot.writeFieldStop();
-    oprot.writeStructEnd();
-  }
-
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder("StoreFamilyRevisionList(");
-    boolean first = true;
-
-    sb.append("revisionList:");
-    if (this.revisionList == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.revisionList);
-    }
-    first = false;
-    sb.append(")");
-    return sb.toString();
-  }
-
-  public void validate() throws org.apache.thrift.TException {
-    // check for required fields
-  }
-
-  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-    try {
-      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-    try {
-      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
+            oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder sb = new StringBuilder("StoreFamilyRevisionList(");
+        boolean first = true;
+
+        sb.append("revisionList:");
+        if (this.revisionList == null) {
+            sb.append("null");
+        } else {
+            sb.append(this.revisionList);
+        }
+        first = false;
+        sb.append(")");
+        return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+        // check for required fields
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+        try {
+            write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
+        }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+        try {
+            read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
+        }
     }
-  }
 
 }
 

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java Mon Sep 10 23:28:55 2012
@@ -35,109 +35,109 @@ import org.apache.hadoop.hive.ql.securit
  * authorization functionality for HBase tables.
  */
 class HBaseAuthorizationProvider implements HiveAuthorizationProvider {
-    
+
     @Override
     public Configuration getConf() {
         return null;
     }
-    
+
     @Override
     public void setConf(Configuration conf) {
     }
-    
+
     /*
-     * (non-Javadoc)
-     * 
-     * @see
-     * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
-     * #init(org.apache.hadoop.conf.Configuration)
-     */
+    * (non-Javadoc)
+    *
+    * @see
+    * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+    * #init(org.apache.hadoop.conf.Configuration)
+    */
     @Override
     public void init(Configuration conf) throws HiveException {
     }
-    
+
     @Override
     public HiveAuthenticationProvider getAuthenticator() {
         return null;
     }
-    
+
     @Override
     public void setAuthenticator(HiveAuthenticationProvider authenticator) {
     }
-    
+
     /*
-     * (non-Javadoc)
-     * 
-     * @see
-     * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
-     * #authorize(org.apache.hadoop.hive.ql.security.authorization.Privilege[],
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
-     */
+    * (non-Javadoc)
+    *
+    * @see
+    * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+    * #authorize(org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+    */
     @Override
     public void authorize(Privilege[] readRequiredPriv,
-            Privilege[] writeRequiredPriv) throws HiveException,
-            AuthorizationException {
+                          Privilege[] writeRequiredPriv) throws HiveException,
+        AuthorizationException {
     }
-    
+
     /*
-     * (non-Javadoc)
-     * 
-     * @see
-     * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
-     * #authorize(org.apache.hadoop.hive.metastore.api.Database,
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
-     */
+    * (non-Javadoc)
+    *
+    * @see
+    * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+    * #authorize(org.apache.hadoop.hive.metastore.api.Database,
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+    */
     @Override
     public void authorize(Database db, Privilege[] readRequiredPriv,
-            Privilege[] writeRequiredPriv) throws HiveException,
-            AuthorizationException {
+                          Privilege[] writeRequiredPriv) throws HiveException,
+        AuthorizationException {
     }
-    
+
     /*
-     * (non-Javadoc)
-     * 
-     * @see
-     * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
-     * #authorize(org.apache.hadoop.hive.ql.metadata.Table,
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
-     */
+    * (non-Javadoc)
+    *
+    * @see
+    * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+    * #authorize(org.apache.hadoop.hive.ql.metadata.Table,
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+    */
     @Override
     public void authorize(Table table, Privilege[] readRequiredPriv,
-            Privilege[] writeRequiredPriv) throws HiveException,
-            AuthorizationException {
+                          Privilege[] writeRequiredPriv) throws HiveException,
+        AuthorizationException {
     }
-    
+
     /*
-     * (non-Javadoc)
-     * 
-     * @see
-     * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
-     * #authorize(org.apache.hadoop.hive.ql.metadata.Partition,
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
-     */
+    * (non-Javadoc)
+    *
+    * @see
+    * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+    * #authorize(org.apache.hadoop.hive.ql.metadata.Partition,
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+    */
     @Override
     public void authorize(Partition part, Privilege[] readRequiredPriv,
-            Privilege[] writeRequiredPriv) throws HiveException,
-            AuthorizationException {
+                          Privilege[] writeRequiredPriv) throws HiveException,
+        AuthorizationException {
     }
-    
+
     /*
-     * (non-Javadoc)
-     * 
-     * @see
-     * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
-     * #authorize(org.apache.hadoop.hive.ql.metadata.Table,
-     * org.apache.hadoop.hive.ql.metadata.Partition, java.util.List,
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
-     * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
-     */
+    * (non-Javadoc)
+    *
+    * @see
+    * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider
+    * #authorize(org.apache.hadoop.hive.ql.metadata.Table,
+    * org.apache.hadoop.hive.ql.metadata.Partition, java.util.List,
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[],
+    * org.apache.hadoop.hive.ql.security.authorization.Privilege[])
+    */
     @Override
     public void authorize(Table table, Partition part, List<String> columns,
-            Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
-            throws HiveException, AuthorizationException {
+                          Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+        throws HiveException, AuthorizationException {
     }
-    
+
 }

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBaseOutputFormat.java Mon Sep 10 23:28:55 2012
@@ -36,14 +36,14 @@ import org.apache.hcatalog.common.HCatUt
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
 
 public class HBaseBaseOutputFormat implements OutputFormat<WritableComparable<?>, Put>,
-        HiveOutputFormat<WritableComparable<?>, Put> {
+    HiveOutputFormat<WritableComparable<?>, Put> {
 
     @Override
     public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(
-            JobConf jc, Path finalOutPath,
-            Class<? extends Writable> valueClass, boolean isCompressed,
-            Properties tableProperties, Progressable progress)
-            throws IOException {
+        JobConf jc, Path finalOutPath,
+        Class<? extends Writable> valueClass, boolean isCompressed,
+        Properties tableProperties, Progressable progress)
+        throws IOException {
         throw new UnsupportedOperationException("Not implemented");
     }
 
@@ -55,13 +55,13 @@ public class HBaseBaseOutputFormat imple
 
     @Override
     public RecordWriter<WritableComparable<?>, Put> getRecordWriter(FileSystem ignored,
-            JobConf job, String name, Progressable progress) throws IOException {
+                                                                    JobConf job, String name, Progressable progress) throws IOException {
         OutputFormat<WritableComparable<?>, Put> outputFormat = getOutputFormat(job);
         return outputFormat.getRecordWriter(ignored, job, name, progress);
     }
 
     private OutputFormat<WritableComparable<?>, Put> getOutputFormat(JobConf job)
-            throws IOException {
+        throws IOException {
         String outputInfo = job.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
         OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(outputInfo);
         OutputFormat<WritableComparable<?>, Put> outputFormat = null;

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java Mon Sep 10 23:28:55 2012
@@ -51,7 +51,7 @@ import org.apache.hcatalog.hbase.snapsho
 class HBaseBulkOutputFormat extends HBaseBaseOutputFormat {
 
     private final static ImmutableBytesWritable EMPTY_LIST = new ImmutableBytesWritable(
-            new byte[0]);
+        new byte[0]);
     private SequenceFileOutputFormat<WritableComparable<?>, Put> baseOutputFormat;
 
     public HBaseBulkOutputFormat() {
@@ -60,7 +60,7 @@ class HBaseBulkOutputFormat extends HBas
 
     @Override
     public void checkOutputSpecs(FileSystem ignored, JobConf job)
-            throws IOException {
+        throws IOException {
         baseOutputFormat.checkOutputSpecs(ignored, job);
         HBaseUtil.addHBaseDelegationToken(job);
         addJTDelegationToken(job);
@@ -68,13 +68,13 @@ class HBaseBulkOutputFormat extends HBas
 
     @Override
     public RecordWriter<WritableComparable<?>, Put> getRecordWriter(
-            FileSystem ignored, JobConf job, String name, Progressable progress)
-            throws IOException {
+        FileSystem ignored, JobConf job, String name, Progressable progress)
+        throws IOException {
         job.setOutputKeyClass(ImmutableBytesWritable.class);
         job.setOutputValueClass(Put.class);
         long version = HBaseRevisionManagerUtil.getOutputRevision(job);
         return new HBaseBulkRecordWriter(baseOutputFormat.getRecordWriter(
-                ignored, job, name, progress), version);
+            ignored, job, name, progress), version);
     }
 
     private void addJTDelegationToken(JobConf job) throws IOException {
@@ -84,7 +84,7 @@ class HBaseBulkOutputFormat extends HBas
             JobClient jobClient = new JobClient(new JobConf(job));
             try {
                 job.getCredentials().addToken(new Text("my mr token"),
-                        jobClient.getDelegationToken(null));
+                    jobClient.getDelegationToken(null));
             } catch (InterruptedException e) {
                 throw new IOException("Error while getting JT delegation token", e);
             }
@@ -92,21 +92,21 @@ class HBaseBulkOutputFormat extends HBas
     }
 
     private static class HBaseBulkRecordWriter implements
-            RecordWriter<WritableComparable<?>, Put> {
+        RecordWriter<WritableComparable<?>, Put> {
 
         private RecordWriter<WritableComparable<?>, Put> baseWriter;
         private final Long outputVersion;
 
         public HBaseBulkRecordWriter(
-                RecordWriter<WritableComparable<?>, Put> baseWriter,
-                Long outputVersion) {
+            RecordWriter<WritableComparable<?>, Put> baseWriter,
+            Long outputVersion) {
             this.baseWriter = baseWriter;
             this.outputVersion = outputVersion;
         }
 
         @Override
         public void write(WritableComparable<?> key, Put value)
-                throws IOException {
+            throws IOException {
             Put put = value;
             if (outputVersion != null) {
                 put = new Put(value.getRow(), outputVersion.longValue());
@@ -136,19 +136,19 @@ class HBaseBulkOutputFormat extends HBas
 
         @Override
         public void abortTask(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
             baseOutputCommitter.abortTask(taskContext);
         }
 
         @Override
         public void commitTask(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
             baseOutputCommitter.commitTask(taskContext);
         }
 
         @Override
         public boolean needsTaskCommit(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
             return baseOutputCommitter.needsTaskCommit(taskContext);
         }
 
@@ -159,20 +159,20 @@ class HBaseBulkOutputFormat extends HBas
 
         @Override
         public void setupTask(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
             baseOutputCommitter.setupTask(taskContext);
         }
 
         @Override
         public void abortJob(JobContext jobContext, int status)
-                throws IOException {
+            throws IOException {
             baseOutputCommitter.abortJob(jobContext, status);
             RevisionManager rm = null;
             try {
                 rm = HBaseRevisionManagerUtil
-                        .getOpenedRevisionManager(jobContext.getConfiguration());
+                    .getOpenedRevisionManager(jobContext.getConfiguration());
                 rm.abortWriteTransaction(HBaseRevisionManagerUtil
-                        .getWriteTransaction(jobContext.getConfiguration()));
+                    .getWriteTransaction(jobContext.getConfiguration()));
             } finally {
                 cleanIntermediate(jobContext);
                 if (rm != null)
@@ -189,18 +189,18 @@ class HBaseBulkOutputFormat extends HBas
                 Path srcPath = FileOutputFormat.getOutputPath(jobContext.getJobConf());
                 if (!FileSystem.get(conf).exists(srcPath)) {
                     throw new IOException("Failed to bulk import hfiles. " +
-                    		"Intermediate data directory is cleaned up or missing. " +
-                    		"Please look at the bulk import job if it exists for failure reason");
+                        "Intermediate data directory is cleaned up or missing. " +
+                        "Please look at the bulk import job if it exists for failure reason");
                 }
                 Path destPath = new Path(srcPath.getParent(), srcPath.getName() + "_hfiles");
                 boolean success = ImportSequenceFile.runJob(jobContext,
-                                conf.get(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY),
-                                srcPath,
-                                destPath);
-                if(!success) {
+                    conf.get(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY),
+                    srcPath,
+                    destPath);
+                if (!success) {
                     cleanIntermediate(jobContext);
                     throw new IOException("Failed to bulk import hfiles." +
-                    		" Please look at the bulk import job for failure reason");
+                        " Please look at the bulk import job for failure reason");
                 }
                 rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(conf);
                 rm.commitWriteTransaction(HBaseRevisionManagerUtil.getWriteTransaction(conf));
@@ -212,7 +212,7 @@ class HBaseBulkOutputFormat extends HBas
         }
 
         private void cleanIntermediate(JobContext jobContext)
-                throws IOException {
+            throws IOException {
             FileSystem fs = FileSystem.get(jobContext.getConfiguration());
             fs.delete(FileOutputFormat.getOutputPath(jobContext.getJobConf()), true);
         }

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseConstants.java Mon Sep 10 23:28:55 2012
@@ -26,13 +26,13 @@ import org.apache.hcatalog.common.HCatCo
 class HBaseConstants {
 
     /** key used to store write transaction object */
-    public static final String PROPERTY_WRITE_TXN_KEY = HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX+".hbase.mapreduce.writeTxn";
+    public static final String PROPERTY_WRITE_TXN_KEY = HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".hbase.mapreduce.writeTxn";
 
     /** key used to define the name of the table to write to */
-    public static final String PROPERTY_OUTPUT_TABLE_NAME_KEY = HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX+".hbase.mapreduce.outputTableName";
+    public static final String PROPERTY_OUTPUT_TABLE_NAME_KEY = HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".hbase.mapreduce.outputTableName";
 
     /** key used to define whether bulk storage output format will be used or not  */
-    public static final String PROPERTY_BULK_OUTPUT_MODE_KEY = HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX+".hbase.output.bulkMode";
+    public static final String PROPERTY_BULK_OUTPUT_MODE_KEY = HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".hbase.output.bulkMode";
 
     /** key used to define the hbase table snapshot. */
     public static final String PROPERTY_TABLE_SNAPSHOT_KEY = HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + "hbase.table.snapshot";

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java Mon Sep 10 23:28:55 2012
@@ -53,36 +53,36 @@ class HBaseDirectOutputFormat extends HB
 
     @Override
     public RecordWriter<WritableComparable<?>, Put> getRecordWriter(FileSystem ignored,
-            JobConf job, String name, Progressable progress)
-            throws IOException {
+                                                                    JobConf job, String name, Progressable progress)
+        throws IOException {
         long version = HBaseRevisionManagerUtil.getOutputRevision(job);
         return new HBaseDirectRecordWriter(outputFormat.getRecordWriter(ignored, job, name,
-                progress), version);
+            progress), version);
     }
 
     @Override
     public void checkOutputSpecs(FileSystem ignored, JobConf job)
-            throws IOException {
+        throws IOException {
         outputFormat.checkOutputSpecs(ignored, job);
         HBaseUtil.addHBaseDelegationToken(job);
     }
 
     private static class HBaseDirectRecordWriter implements
-            RecordWriter<WritableComparable<?>, Put> {
+        RecordWriter<WritableComparable<?>, Put> {
 
         private RecordWriter<WritableComparable<?>, Put> baseWriter;
         private final Long outputVersion;
 
         public HBaseDirectRecordWriter(
-                RecordWriter<WritableComparable<?>, Put> baseWriter,
-                Long outputVersion) {
+            RecordWriter<WritableComparable<?>, Put> baseWriter,
+            Long outputVersion) {
             this.baseWriter = baseWriter;
             this.outputVersion = outputVersion;
         }
 
         @Override
         public void write(WritableComparable<?> key, Put value)
-                throws IOException {
+            throws IOException {
             Put put = value;
             if (outputVersion != null) {
                 put = new Put(value.getRow(), outputVersion.longValue());
@@ -109,17 +109,17 @@ class HBaseDirectOutputFormat extends HB
 
         @Override
         public void abortTask(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
         }
 
         @Override
         public void commitTask(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
         }
 
         @Override
         public boolean needsTaskCommit(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
             return false;
         }
 
@@ -129,19 +129,19 @@ class HBaseDirectOutputFormat extends HB
 
         @Override
         public void setupTask(TaskAttemptContext taskContext)
-                throws IOException {
+            throws IOException {
         }
 
         @Override
         public void abortJob(JobContext jobContext, int status)
-                throws IOException {
+            throws IOException {
             super.abortJob(jobContext, status);
             RevisionManager rm = null;
             try {
                 rm = HBaseRevisionManagerUtil
-                        .getOpenedRevisionManager(jobContext.getConfiguration());
+                    .getOpenedRevisionManager(jobContext.getConfiguration());
                 Transaction writeTransaction = HBaseRevisionManagerUtil
-                        .getWriteTransaction(jobContext.getConfiguration());
+                    .getWriteTransaction(jobContext.getConfiguration());
                 rm.abortWriteTransaction(writeTransaction);
             } finally {
                 if (rm != null)
@@ -154,9 +154,9 @@ class HBaseDirectOutputFormat extends HB
             RevisionManager rm = null;
             try {
                 rm = HBaseRevisionManagerUtil
-                        .getOpenedRevisionManager(jobContext.getConfiguration());
+                    .getOpenedRevisionManager(jobContext.getConfiguration());
                 rm.commitWriteTransaction(HBaseRevisionManagerUtil.getWriteTransaction(jobContext
-                        .getConfiguration()));
+                    .getConfiguration()));
             } finally {
                 if (rm != null)
                     rm.close();

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java Mon Sep 10 23:28:55 2012
@@ -106,21 +106,21 @@ public class HBaseHCatStorageHandler ext
             //do it here
             if (jobConf instanceof JobConf) { //Should be the case
                 HBaseUtil.addHBaseDelegationToken(copyOfConf);
-                ((JobConf)jobConf).getCredentials().addAll(copyOfConf.getCredentials());
+                ((JobConf) jobConf).getCredentials().addAll(copyOfConf.getCredentials());
             }
 
             String outputSchema = jobConf.get(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA);
             jobProperties.put(TableInputFormat.SCAN_COLUMNS, getScanColumns(tableInfo, outputSchema));
 
             String serSnapshot = (String) inputJobInfo.getProperties().get(
-                    HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY);
+                HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY);
             if (serSnapshot == null) {
                 HCatTableSnapshot snapshot =
-                        HBaseRevisionManagerUtil.createSnapshot(
-                            RevisionManagerConfiguration.create(copyOfConf),
-                            qualifiedTableName, tableInfo);
+                    HBaseRevisionManagerUtil.createSnapshot(
+                        RevisionManagerConfiguration.create(copyOfConf),
+                        qualifiedTableName, tableInfo);
                 jobProperties.put(HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY,
-                        HCatUtil.serialize(snapshot));
+                    HCatUtil.serialize(snapshot));
             }
 
             //This adds it directly to the jobConf. Setting in jobProperties does not get propagated
@@ -155,21 +155,21 @@ public class HBaseHCatStorageHandler ext
             HBaseConfiguration.addHbaseResources(copyOfConf);
 
             String txnString = outputJobInfo.getProperties().getProperty(
-                    HBaseConstants.PROPERTY_WRITE_TXN_KEY);
+                HBaseConstants.PROPERTY_WRITE_TXN_KEY);
             Transaction txn = null;
             if (txnString == null) {
                 txn = HBaseRevisionManagerUtil.beginWriteTransaction(qualifiedTableName, tableInfo,
-                        RevisionManagerConfiguration.create(copyOfConf));
+                    RevisionManagerConfiguration.create(copyOfConf));
                 String serializedTxn = HCatUtil.serialize(txn);
                 outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY,
-                        serializedTxn);
+                    serializedTxn);
             } else {
                 txn = (Transaction) HCatUtil.deserialize(txnString);
             }
             if (isBulkMode(outputJobInfo)) {
                 String tableLocation = tableInfo.getTableLocation();
                 String location = new Path(tableLocation, "REVISION_" + txn.getRevisionNumber())
-                        .toString();
+                    .toString();
                 outputJobInfo.getProperties().setProperty(PROPERTY_INT_OUTPUT_LOCATION, location);
                 // We are writing out an intermediate sequenceFile hence
                 // location is not passed in OutputJobInfo.getLocation()
@@ -199,7 +199,7 @@ public class HBaseHCatStorageHandler ext
     */
     @Override
     public HiveAuthorizationProvider getAuthorizationProvider()
-            throws HiveException {
+        throws HiveException {
 
         HBaseAuthorizationProvider hbaseAuth = new HBaseAuthorizationProvider();
         hbaseAuth.init(getConf());
@@ -230,7 +230,7 @@ public class HBaseHCatStorageHandler ext
      */
     @Override
     public void commitDropTable(Table tbl, boolean deleteData)
-            throws MetaException {
+        throws MetaException {
         checkDeleteTable(tbl);
 
     }
@@ -256,20 +256,20 @@ public class HBaseHCatStorageHandler ext
         try {
             String tableName = getFullyQualifiedHBaseTableName(tbl);
             String hbaseColumnsMapping = tbl.getParameters().get(
-                    HBaseSerDe.HBASE_COLUMNS_MAPPING);
+                HBaseSerDe.HBASE_COLUMNS_MAPPING);
 
             if (hbaseColumnsMapping == null) {
                 throw new MetaException(
-                        "No hbase.columns.mapping defined in table"
-                                + " properties.");
+                    "No hbase.columns.mapping defined in table"
+                        + " properties.");
             }
 
             List<String> hbaseColumnFamilies = new ArrayList<String>();
             List<String> hbaseColumnQualifiers = new ArrayList<String>();
             List<byte[]> hbaseColumnFamiliesBytes = new ArrayList<byte[]>();
             int iKey = HBaseUtil.parseColumnMapping(hbaseColumnsMapping,
-                    hbaseColumnFamilies, hbaseColumnFamiliesBytes,
-                    hbaseColumnQualifiers, null);
+                hbaseColumnFamilies, hbaseColumnFamiliesBytes,
+                hbaseColumnQualifiers, null);
 
             HTableDescriptor tableDesc;
             Set<String> uniqueColumnFamilies = new HashSet<String>();
@@ -283,7 +283,7 @@ public class HBaseHCatStorageHandler ext
 
                     for (String columnFamily : uniqueColumnFamilies) {
                         HColumnDescriptor familyDesc = new HColumnDescriptor(Bytes
-                                .toBytes(columnFamily));
+                            .toBytes(columnFamily));
                         familyDesc.setMaxVersions(Integer.MAX_VALUE);
                         tableDesc.addFamily(familyDesc);
                     }
@@ -292,20 +292,20 @@ public class HBaseHCatStorageHandler ext
                 } else {
                     // an external table
                     throw new MetaException("HBase table " + tableName
-                            + " doesn't exist while the table is "
-                            + "declared as an external table.");
+                        + " doesn't exist while the table is "
+                        + "declared as an external table.");
                 }
 
             } else {
                 if (!isExternal) {
                     throw new MetaException("Table " + tableName
-                            + " already exists within HBase."
-                            + " Use CREATE EXTERNAL TABLE instead to"
-                            + " register it in HCatalog.");
+                        + " already exists within HBase."
+                        + " Use CREATE EXTERNAL TABLE instead to"
+                        + " register it in HCatalog.");
                 }
                 // make sure the schema mapping is right
                 tableDesc = getHBaseAdmin().getTableDescriptor(
-                        Bytes.toBytes(tableName));
+                    Bytes.toBytes(tableName));
 
                 for (int i = 0; i < hbaseColumnFamilies.size(); i++) {
                     if (i == iKey) {
@@ -314,8 +314,8 @@ public class HBaseHCatStorageHandler ext
 
                     if (!tableDesc.hasFamily(hbaseColumnFamiliesBytes.get(i))) {
                         throw new MetaException("Column Family "
-                                + hbaseColumnFamilies.get(i)
-                                + " is not defined in hbase table " + tableName);
+                            + hbaseColumnFamilies.get(i)
+                            + " is not defined in hbase table " + tableName);
                     }
                 }
             }
@@ -401,7 +401,7 @@ public class HBaseHCatStorageHandler ext
         String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME);
         if (tableName == null) {
             tableName = tbl.getSd().getSerdeInfo().getParameters()
-                    .get(HBaseSerDe.HBASE_TABLE_NAME);
+                .get(HBaseSerDe.HBASE_TABLE_NAME);
         }
         if (tableName == null) {
             if (tbl.getDbName().equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
@@ -414,14 +414,14 @@ public class HBaseHCatStorageHandler ext
         return tableName;
     }
 
-    static String getFullyQualifiedHBaseTableName(HCatTableInfo tableInfo){
+    static String getFullyQualifiedHBaseTableName(HCatTableInfo tableInfo) {
         String qualifiedName = tableInfo.getStorerInfo().getProperties()
-                .getProperty(HBaseSerDe.HBASE_TABLE_NAME);
+            .getProperty(HBaseSerDe.HBASE_TABLE_NAME);
         if (qualifiedName == null) {
             String databaseName = tableInfo.getDatabaseName();
             String tableName = tableInfo.getTableName();
             if ((databaseName == null)
-                    || (databaseName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME))) {
+                || (databaseName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME))) {
                 qualifiedName = tableName;
             } else {
                 qualifiedName = databaseName + "." + tableName;
@@ -451,7 +451,7 @@ public class HBaseHCatStorageHandler ext
     */
     @Override
     public Class<? extends SerDe> getSerDeClass()
-            throws UnsupportedOperationException {
+        throws UnsupportedOperationException {
         return HBaseSerDe.class;
     }
 
@@ -514,28 +514,28 @@ public class HBaseHCatStorageHandler ext
      */
     private void addOutputDependencyJars(Configuration conf) throws IOException {
         TableMapReduceUtil.addDependencyJars(conf,
-                //ZK
-                ZooKeeper.class,
-                //HBase
-                HTable.class,
-                //Hive
-                HiveException.class,
-                //HCatalog jar
-                HCatOutputFormat.class,
-                //hcat hbase storage handler jar
-                HBaseHCatStorageHandler.class,
-                //hive hbase storage handler jar
-                HBaseSerDe.class,
-                //hive jar
-                Table.class,
-                //libthrift jar
-                TBase.class,
-                //hbase jar
-                Bytes.class,
-                //thrift-fb303 .jar
-                FacebookBase.class,
-                //guava jar
-                ThreadFactoryBuilder.class);
+            //ZK
+            ZooKeeper.class,
+            //HBase
+            HTable.class,
+            //Hive
+            HiveException.class,
+            //HCatalog jar
+            HCatOutputFormat.class,
+            //hcat hbase storage handler jar
+            HBaseHCatStorageHandler.class,
+            //hive hbase storage handler jar
+            HBaseSerDe.class,
+            //hive jar
+            Table.class,
+            //libthrift jar
+            TBase.class,
+            //hbase jar
+            Bytes.class,
+            //thrift-fb303 .jar
+            FacebookBase.class,
+            //guava jar
+            ThreadFactoryBuilder.class);
     }
 
     /**
@@ -558,15 +558,15 @@ public class HBaseHCatStorageHandler ext
     public static boolean isBulkMode(OutputJobInfo outputJobInfo) {
         //Default is false
         String bulkMode = outputJobInfo.getTableInfo().getStorerInfo().getProperties()
-                .getProperty(HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY,
-                        "false");
+            .getProperty(HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY,
+                "false");
         return "true".equals(bulkMode);
     }
 
     private String getScanColumns(HCatTableInfo tableInfo, String outputColSchema) throws IOException {
         StringBuilder builder = new StringBuilder();
         String hbaseColumnMapping = tableInfo.getStorerInfo().getProperties()
-                .getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING);
+            .getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING);
         if (outputColSchema == null) {
             String[] splits = hbaseColumnMapping.split("[,]");
             for (int i = 0; i < splits.length; i++) {
@@ -578,14 +578,14 @@ public class HBaseHCatStorageHandler ext
             HCatSchema tableSchema = tableInfo.getDataColumns();
             List<String> outputFieldNames = outputSchema.getFieldNames();
             List<Integer> outputColumnMapping = new ArrayList<Integer>();
-            for(String fieldName: outputFieldNames){
+            for (String fieldName : outputFieldNames) {
                 int position = tableSchema.getPosition(fieldName);
                 outputColumnMapping.add(position);
             }
             List<String> columnFamilies = new ArrayList<String>();
             List<String> columnQualifiers = new ArrayList<String>();
             HBaseUtil.parseColumnMapping(hbaseColumnMapping, columnFamilies, null,
-                    columnQualifiers, null);
+                columnQualifiers, null);
             for (int i = 0; i < outputColumnMapping.size(); i++) {
                 int cfIndex = outputColumnMapping.get(i);
                 String cf = columnFamilies.get(cfIndex);

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java Mon Sep 10 23:28:55 2012
@@ -66,8 +66,8 @@ class HBaseInputFormat implements InputF
      */
     @Override
     public RecordReader<ImmutableBytesWritable, Result> getRecordReader(
-            InputSplit split, JobConf job, Reporter reporter)
-            throws IOException {
+        InputSplit split, JobConf job, Reporter reporter)
+        throws IOException {
         String jobString = job.get(HCatConstants.HCAT_KEY_JOB_INFO);
         InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(jobString);
 
@@ -103,20 +103,20 @@ class HBaseInputFormat implements InputF
      */
     @Override
     public org.apache.hadoop.mapred.InputSplit[] getSplits(JobConf job, int numSplits)
-            throws IOException {
+        throws IOException {
         inputFormat.setConf(job);
         return convertSplits(inputFormat.getSplits(HCatMapRedUtil.createJobContext(job, null,
-                Reporter.NULL)));
+            Reporter.NULL)));
     }
 
     private InputSplit[] convertSplits(List<org.apache.hadoop.mapreduce.InputSplit> splits) {
         InputSplit[] converted = new InputSplit[splits.size()];
         for (int i = 0; i < splits.size(); i++) {
             org.apache.hadoop.hbase.mapreduce.TableSplit tableSplit =
-                    (org.apache.hadoop.hbase.mapreduce.TableSplit) splits.get(i);
+                (org.apache.hadoop.hbase.mapreduce.TableSplit) splits.get(i);
             TableSplit newTableSplit = new TableSplit(tableSplit.getTableName(),
-                    tableSplit.getStartRow(),
-                    tableSplit.getEndRow(), tableSplit.getRegionLocation());
+                tableSplit.getStartRow(),
+                tableSplit.getEndRow(), tableSplit.getRegionLocation());
             converted[i] = newTableSplit;
         }
         return converted;

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java Mon Sep 10 23:28:55 2012
@@ -65,7 +65,7 @@ class HBaseRevisionManagerUtil {
      * @throws IOException Signals that an I/O exception has occurred.
      */
     static HCatTableSnapshot createSnapshot(Configuration jobConf,
-            String hbaseTableName, HCatTableInfo tableInfo ) throws IOException {
+                                            String hbaseTableName, HCatTableInfo tableInfo) throws IOException {
 
         RevisionManager rm = null;
         TableSnapshot snpt;
@@ -90,8 +90,8 @@ class HBaseRevisionManagerUtil {
      * @throws IOException Signals that an I/O exception has occurred.
      */
     static HCatTableSnapshot createSnapshot(Configuration jobConf,
-            String tableName, long revision)
-            throws IOException {
+                                            String tableName, long revision)
+        throws IOException {
 
         TableSnapshot snpt;
         RevisionManager rm = null;
@@ -103,14 +103,14 @@ class HBaseRevisionManagerUtil {
         }
 
         String inputJobString = jobConf.get(HCatConstants.HCAT_KEY_JOB_INFO);
-        if(inputJobString == null){
+        if (inputJobString == null) {
             throw new IOException(
-                    "InputJobInfo information not found in JobContext. "
-                            + "HCatInputFormat.setInput() not called?");
+                "InputJobInfo information not found in JobContext. "
+                    + "HCatInputFormat.setInput() not called?");
         }
         InputJobInfo inputInfo = (InputJobInfo) HCatUtil.deserialize(inputJobString);
         HCatTableSnapshot hcatSnapshot = HBaseRevisionManagerUtil
-                .convertSnapshot(snpt, inputInfo.getTableInfo());
+            .convertSnapshot(snpt, inputInfo.getTableInfo());
 
         return hcatSnapshot;
     }
@@ -123,7 +123,7 @@ class HBaseRevisionManagerUtil {
      * @throws IOException
      */
     static RevisionManager getOpenedRevisionManager(Configuration jobConf) throws IOException {
-      return RevisionManagerFactory.getOpenedRevisionManager(jobConf);
+        return RevisionManagerFactory.getOpenedRevisionManager(jobConf);
     }
 
     static void closeRevisionManagerQuietly(RevisionManager rm) {
@@ -138,14 +138,14 @@ class HBaseRevisionManagerUtil {
 
 
     static HCatTableSnapshot convertSnapshot(TableSnapshot hbaseSnapshot,
-            HCatTableInfo hcatTableInfo) throws IOException {
+                                             HCatTableInfo hcatTableInfo) throws IOException {
 
         HCatSchema hcatTableSchema = hcatTableInfo.getDataColumns();
         Map<String, String> hcatHbaseColMap = getHCatHBaseColumnMapping(hcatTableInfo);
         HashMap<String, Long> revisionMap = new HashMap<String, Long>();
 
         for (HCatFieldSchema fSchema : hcatTableSchema.getFields()) {
-            if(hcatHbaseColMap.containsKey(fSchema.getName())){
+            if (hcatHbaseColMap.containsKey(fSchema.getName())) {
                 String colFamily = hcatHbaseColMap.get(fSchema.getName());
                 long revisionID = hbaseSnapshot.getRevision(colFamily);
                 revisionMap.put(fSchema.getName(), revisionID);
@@ -153,12 +153,12 @@ class HBaseRevisionManagerUtil {
         }
 
         HCatTableSnapshot hcatSnapshot = new HCatTableSnapshot(
-                 hcatTableInfo.getDatabaseName(), hcatTableInfo.getTableName(),revisionMap,hbaseSnapshot.getLatestRevision());
+            hcatTableInfo.getDatabaseName(), hcatTableInfo.getTableName(), revisionMap, hbaseSnapshot.getLatestRevision());
         return hcatSnapshot;
     }
 
     static TableSnapshot convertSnapshot(HCatTableSnapshot hcatSnapshot,
-            HCatTableInfo hcatTableInfo) throws IOException {
+                                         HCatTableInfo hcatTableInfo) throws IOException {
 
         HCatSchema hcatTableSchema = hcatTableInfo.getDataColumns();
         Map<String, Long> revisionMap = new HashMap<String, Long>();
@@ -172,8 +172,8 @@ class HBaseRevisionManagerUtil {
         }
 
         String fullyQualifiedName = hcatSnapshot.getDatabaseName() + "."
-                + hcatSnapshot.getTableName();
-        return new TableSnapshot(fullyQualifiedName, revisionMap,hcatSnapshot.getLatestRevision());
+            + hcatSnapshot.getTableName();
+        return new TableSnapshot(fullyQualifiedName, revisionMap, hcatSnapshot.getLatestRevision());
 
     }
 
@@ -186,13 +186,13 @@ class HBaseRevisionManagerUtil {
      * @throws IOException
      */
     static Transaction beginWriteTransaction(String qualifiedTableName,
-            HCatTableInfo tableInfo, Configuration jobConf) throws IOException {
+                                             HCatTableInfo tableInfo, Configuration jobConf) throws IOException {
         Transaction txn;
         RevisionManager rm = null;
         try {
             rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(jobConf);
             String hBaseColumns = tableInfo.getStorerInfo().getProperties()
-                    .getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING);
+                .getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING);
             String[] splits = hBaseColumns.split("[,:]");
             Set<String> families = new HashSet<String>();
             for (int i = 0; i < splits.length; i += 2) {
@@ -207,13 +207,13 @@ class HBaseRevisionManagerUtil {
     }
 
     static Transaction getWriteTransaction(Configuration conf) throws IOException {
-        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
+        OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
         return (Transaction) HCatUtil.deserialize(outputJobInfo.getProperties()
-                                                               .getProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY));
+            .getProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY));
     }
 
     static void setWriteTransaction(Configuration conf, Transaction txn) throws IOException {
-        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
+        OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
         outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY, HCatUtil.serialize(txn));
         conf.set(HCatConstants.HCAT_KEY_OUTPUT_INFO, HCatUtil.serialize(outputJobInfo));
     }
@@ -228,19 +228,19 @@ class HBaseRevisionManagerUtil {
         return getWriteTransaction(conf).getRevisionNumber();
     }
 
-    private static Map<String, String> getHCatHBaseColumnMapping( HCatTableInfo hcatTableInfo)
-            throws IOException {
+    private static Map<String, String> getHCatHBaseColumnMapping(HCatTableInfo hcatTableInfo)
+        throws IOException {
 
         HCatSchema hcatTableSchema = hcatTableInfo.getDataColumns();
         StorerInfo storeInfo = hcatTableInfo.getStorerInfo();
         String hbaseColumnMapping = storeInfo.getProperties().getProperty(
-                HBaseSerDe.HBASE_COLUMNS_MAPPING);
+            HBaseSerDe.HBASE_COLUMNS_MAPPING);
 
         Map<String, String> hcatHbaseColMap = new HashMap<String, String>();
         List<String> columnFamilies = new ArrayList<String>();
         List<String> columnQualifiers = new ArrayList<String>();
         HBaseUtil.parseColumnMapping(hbaseColumnMapping, columnFamilies,
-                null, columnQualifiers, null);
+            null, columnQualifiers, null);
 
         for (HCatFieldSchema column : hcatTableSchema.getFields()) {
             int fieldPos = hcatTableSchema.getPosition(column.getName());

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java Mon Sep 10 23:28:55 2012
@@ -28,7 +28,7 @@ import org.apache.hadoop.mapred.JobConf;
 
 class HBaseUtil {
 
-    private HBaseUtil(){
+    private HBaseUtil() {
     }
 
     /**
@@ -47,97 +47,97 @@ class HBaseUtil {
     static int parseColumnMapping(
         String columnMapping,
         List<String> colFamilies,
-        List<byte []> colFamiliesBytes,
+        List<byte[]> colFamiliesBytes,
         List<String> colQualifiers,
-        List<byte []> colQualifiersBytes) throws IOException {
+        List<byte[]> colQualifiersBytes) throws IOException {
 
-      int rowKeyIndex = -1;
+        int rowKeyIndex = -1;
 
-      if (colFamilies == null || colQualifiers == null) {
-        throw new IllegalArgumentException("Error: caller must pass in lists for the column families " +
-            "and qualifiers.");
-      }
-
-      colFamilies.clear();
-      colQualifiers.clear();
+        if (colFamilies == null || colQualifiers == null) {
+            throw new IllegalArgumentException("Error: caller must pass in lists for the column families " +
+                "and qualifiers.");
+        }
 
-      if (columnMapping == null) {
-        throw new IllegalArgumentException("Error: hbase.columns.mapping missing for this HBase table.");
-      }
+        colFamilies.clear();
+        colQualifiers.clear();
 
-      if (columnMapping.equals("") || columnMapping.equals(HBaseSerDe.HBASE_KEY_COL)) {
-        throw new IllegalArgumentException("Error: hbase.columns.mapping specifies only the HBase table"
-            + " row key. A valid Hive-HBase table must specify at least one additional column.");
-      }
+        if (columnMapping == null) {
+            throw new IllegalArgumentException("Error: hbase.columns.mapping missing for this HBase table.");
+        }
 
-      String [] mapping = columnMapping.split(",");
+        if (columnMapping.equals("") || columnMapping.equals(HBaseSerDe.HBASE_KEY_COL)) {
+            throw new IllegalArgumentException("Error: hbase.columns.mapping specifies only the HBase table"
+                + " row key. A valid Hive-HBase table must specify at least one additional column.");
+        }
 
-      for (int i = 0; i < mapping.length; i++) {
-        String elem = mapping[i];
-        int idxFirst = elem.indexOf(":");
-        int idxLast = elem.lastIndexOf(":");
+        String[] mapping = columnMapping.split(",");
 
-        if (idxFirst < 0 || !(idxFirst == idxLast)) {
-          throw new IllegalArgumentException("Error: the HBase columns mapping contains a badly formed " +
-              "column family, column qualifier specification.");
-        }
+        for (int i = 0; i < mapping.length; i++) {
+            String elem = mapping[i];
+            int idxFirst = elem.indexOf(":");
+            int idxLast = elem.lastIndexOf(":");
 
-        if (elem.equals(HBaseSerDe.HBASE_KEY_COL)) {
-          rowKeyIndex = i;
-          colFamilies.add(elem);
-          colQualifiers.add(null);
-        } else {
-          String [] parts = elem.split(":");
-          assert(parts.length > 0 && parts.length <= 2);
-          colFamilies.add(parts[0]);
+            if (idxFirst < 0 || !(idxFirst == idxLast)) {
+                throw new IllegalArgumentException("Error: the HBase columns mapping contains a badly formed " +
+                    "column family, column qualifier specification.");
+            }
 
-          if (parts.length == 2) {
-            colQualifiers.add(parts[1]);
-          } else {
-            colQualifiers.add(null);
-          }
+            if (elem.equals(HBaseSerDe.HBASE_KEY_COL)) {
+                rowKeyIndex = i;
+                colFamilies.add(elem);
+                colQualifiers.add(null);
+            } else {
+                String[] parts = elem.split(":");
+                assert (parts.length > 0 && parts.length <= 2);
+                colFamilies.add(parts[0]);
+
+                if (parts.length == 2) {
+                    colQualifiers.add(parts[1]);
+                } else {
+                    colQualifiers.add(null);
+                }
+            }
         }
-      }
 
-      if (rowKeyIndex == -1) {
-        colFamilies.add(0, HBaseSerDe.HBASE_KEY_COL);
-        colQualifiers.add(0, null);
-        rowKeyIndex = 0;
-      }
+        if (rowKeyIndex == -1) {
+            colFamilies.add(0, HBaseSerDe.HBASE_KEY_COL);
+            colQualifiers.add(0, null);
+            rowKeyIndex = 0;
+        }
 
-      if (colFamilies.size() != colQualifiers.size()) {
-        throw new IOException("Error in parsing the hbase columns mapping.");
-      }
+        if (colFamilies.size() != colQualifiers.size()) {
+            throw new IOException("Error in parsing the hbase columns mapping.");
+        }
 
-      // populate the corresponding byte [] if the client has passed in a non-null list
-      if (colFamiliesBytes != null) {
-        colFamiliesBytes.clear();
+        // populate the corresponding byte [] if the client has passed in a non-null list
+        if (colFamiliesBytes != null) {
+            colFamiliesBytes.clear();
 
-        for (String fam : colFamilies) {
-          colFamiliesBytes.add(Bytes.toBytes(fam));
+            for (String fam : colFamilies) {
+                colFamiliesBytes.add(Bytes.toBytes(fam));
+            }
         }
-      }
 
-      if (colQualifiersBytes != null) {
-        colQualifiersBytes.clear();
+        if (colQualifiersBytes != null) {
+            colQualifiersBytes.clear();
 
-        for (String qual : colQualifiers) {
-          if (qual == null) {
-            colQualifiersBytes.add(null);
-          } else {
-            colQualifiersBytes.add(Bytes.toBytes(qual));
-          }
+            for (String qual : colQualifiers) {
+                if (qual == null) {
+                    colQualifiersBytes.add(null);
+                } else {
+                    colQualifiersBytes.add(Bytes.toBytes(qual));
+                }
+            }
         }
-      }
 
-      if (colFamiliesBytes != null && colQualifiersBytes != null) {
-        if (colFamiliesBytes.size() != colQualifiersBytes.size()) {
-          throw new IOException("Error in caching the bytes for the hbase column families " +
-              "and qualifiers.");
+        if (colFamiliesBytes != null && colQualifiersBytes != null) {
+            if (colFamiliesBytes.size() != colQualifiersBytes.size()) {
+                throw new IOException("Error in caching the bytes for the hbase column families " +
+                    "and qualifiers.");
+            }
         }
-      }
 
-      return rowKeyIndex;
+        return rowKeyIndex;
     }
 
     /**

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HCatTableSnapshot.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HCatTableSnapshot.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HCatTableSnapshot.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HCatTableSnapshot.java Mon Sep 10 23:28:55 2012
@@ -27,7 +27,7 @@ import java.util.Map;
  * record readers to obtain knowledge about the revisions of a
  * column to be filtered.
  */
-public class HCatTableSnapshot implements Serializable{
+public class HCatTableSnapshot implements Serializable {
 
     private static final long serialVersionUID = 1L;
     private String tableName;
@@ -52,15 +52,15 @@ public class HCatTableSnapshot implement
     /**
      * @return The name of the database to which the table snapshot belongs.
      */
-    public String getDatabaseName(){
+    public String getDatabaseName() {
         return this.databaseName;
     }
 
     /**
      * @return The revision number of a column in a snapshot.
      */
-    long getRevision(String column){
-        if(columnMap.containsKey(column))
+    long getRevision(String column) {
+        if (columnMap.containsKey(column))
             return this.columnMap.get(column);
         return latestRevision;
     }
@@ -71,7 +71,7 @@ public class HCatTableSnapshot implement
      * @param column The data column of the table
      * @return true, if successful
      */
-    boolean containsColumn(String column){
+    boolean containsColumn(String column) {
         return this.columnMap.containsKey(column);
     }
 
@@ -84,8 +84,8 @@ public class HCatTableSnapshot implement
 
     @Override
     public String toString() {
-        String snapshot = " Database Name: " + this.databaseName +" Table Name : " + tableName +
-                 "Latest Revision: "+latestRevision+" Column revision : " + columnMap.toString();
+        String snapshot = " Database Name: " + this.databaseName + " Table Name : " + tableName +
+            "Latest Revision: " + latestRevision + " Column revision : " + columnMap.toString();
         return snapshot;
     }
 }

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HbaseSnapshotRecordReader.java Mon Sep 10 23:28:55 2012
@@ -56,8 +56,8 @@ class HbaseSnapshotRecordReader implemen
     private final Configuration conf;
     private final int maxRevisions = 1;
     private ResultScanner scanner;
-    private Scan  scan;
-    private HTable  htable;
+    private Scan scan;
+    private HTable htable;
     private TableSnapshot snapshot;
     private Iterator<Result> resultItr;
     private Set<Long> allAbortedTransactions;
@@ -69,9 +69,9 @@ class HbaseSnapshotRecordReader implemen
         this.conf = conf;
         String snapshotString = conf.get(HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY);
         HCatTableSnapshot hcatSnapshot = (HCatTableSnapshot) HCatUtil
-                .deserialize(snapshotString);
+            .deserialize(snapshotString);
         this.snapshot = HBaseRevisionManagerUtil.convertSnapshot(hcatSnapshot,
-                inpJobInfo.getTableInfo());
+            inpJobInfo.getTableInfo());
     }
 
     public void init() throws IOException {
@@ -104,7 +104,7 @@ class HbaseSnapshotRecordReader implemen
             for (byte[] familyKey : families) {
                 String family = Bytes.toString(familyKey);
                 List<FamilyRevision> abortedWriteTransactions = rm.getAbortedWriteTransactions(
-                        tableName, family);
+                    tableName, family);
                 if (abortedWriteTransactions != null) {
                     for (FamilyRevision revision : abortedWriteTransactions) {
                         abortedTransactions.add(revision.getRevision());
@@ -172,7 +172,7 @@ class HbaseSnapshotRecordReader implemen
     public boolean next(ImmutableBytesWritable key, Result value) throws IOException {
         if (this.resultItr == null) {
             LOG.warn("The HBase result iterator is found null. It is possible"
-                    + " that the record reader has already been closed.");
+                + " that the record reader has already been closed.");
         } else {
             while (resultItr.hasNext()) {
                 Result temp = resultItr.next();
@@ -233,7 +233,7 @@ class HbaseSnapshotRecordReader implemen
             }
         }
 
-        if(finalKeyVals.size() == 0){
+        if (finalKeyVals.size() == 0) {
             return null;
         } else {
             KeyValue[] kvArray = new KeyValue[finalKeyVals.size()];



Mime
View raw message