Return-Path: Delivered-To: apmail-hadoop-hbase-commits-archive@minotaur.apache.org Received: (qmail 38290 invoked from network); 13 Jul 2009 07:06:02 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.3) by minotaur.apache.org with SMTP; 13 Jul 2009 07:06:02 -0000 Received: (qmail 21461 invoked by uid 500); 13 Jul 2009 07:06:12 -0000 Delivered-To: apmail-hadoop-hbase-commits-archive@hadoop.apache.org Received: (qmail 21409 invoked by uid 500); 13 Jul 2009 07:06:12 -0000 Mailing-List: contact hbase-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hbase-dev@hadoop.apache.org Delivered-To: mailing list hbase-commits@hadoop.apache.org Received: (qmail 21400 invoked by uid 99); 13 Jul 2009 07:06:12 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 13 Jul 2009 07:06:11 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 13 Jul 2009 07:06:08 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 372E42388876; Mon, 13 Jul 2009 07:05:47 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit Subject: svn commit: r793472 - in /hadoop/hbase/trunk_on_hadoop-0.18.3: CHANGES.txt src/java/org/apache/hadoop/hbase/client/Scan.java src/test/org/apache/hadoop/hbase/TestSerialization.java Date: Mon, 13 Jul 2009 07:05:47 -0000 To: hbase-commits@hadoop.apache.org From: apurtell@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20090713070547.372E42388876@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: apurtell Date: Mon Jul 13 07:05:46 2009 New Revision: 793472 URL: http://svn.apache.org/viewvc?rev=793472&view=rev Log: HBASE-1646 Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Scan.java hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt?rev=793472&r1=793471&r2=793472&view=diff ============================================================================== --- hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt (original) +++ hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt Mon Jul 13 07:05:46 2009 @@ -257,6 +257,7 @@ HBASE-1644 Result.row is cached in getRow; this breaks MapReduce (Doğacan Güney via Stack) HBASE-1639 clean checkout with empty hbase-site.xml, zk won't start + HBASE-1646 Scan-s can't set a Filter (Doğacan Güney via Stack) IMPROVEMENTS HBASE-1089 Add count of regions on filesystem to master UI; add percentage Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Scan.java URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Scan.java?rev=793472&r1=793471&r2=793472&view=diff ============================================================================== --- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Scan.java (original) +++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Scan.java Mon Jul 13 07:05:46 2009 @@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.RowFilterInterface; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableFactories; import java.io.DataInput; import java.io.DataOutput; @@ -476,21 +476,31 @@ return sb.toString(); } + @SuppressWarnings("unchecked") + private Writable createForName(String className) { + try { + Class clazz = + (Class) Class.forName(className); + return WritableFactories.newInstance(clazz, new Configuration()); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Can't find class " + className); + } + } + //Writable public void readFields(final DataInput in) throws IOException { this.startRow = Bytes.readByteArray(in); this.stopRow = Bytes.readByteArray(in); this.maxVersions = in.readInt(); - boolean hasFilter = in.readBoolean(); - if(hasFilter) { - this.filter = (Filter)HbaseObjectWritable.readObject(in, - new Configuration()); - } - boolean hasOldFilter = in.readBoolean(); - if (hasOldFilter) { - this.oldFilter = (RowFilterInterface)HbaseObjectWritable.readObject(in, - new Configuration()); + if(in.readBoolean()) { + this.filter = (Filter)createForName(Bytes.toString(Bytes.readByteArray(in))); + this.filter.readFields(in); + } + if (in.readBoolean()) { + this.oldFilter = + (RowFilterInterface)createForName(Bytes.toString(Bytes.readByteArray(in))); + this.oldFilter.readFields(in); } this.tr = new TimeRange(); tr.readFields(in); @@ -518,15 +528,15 @@ out.writeBoolean(false); } else { out.writeBoolean(true); - HbaseObjectWritable.writeObject(out, this.filter, - Filter.class, null); + Bytes.writeByteArray(out, Bytes.toBytes(filter.getClass().getName())); + filter.write(out); } if (this.oldFilter == null) { out.writeBoolean(false); } else { out.writeBoolean(true); - HbaseObjectWritable.writeObject(out, this.oldFilter, - RowFilterInterface.class, null); + Bytes.writeByteArray(out, Bytes.toBytes(oldFilter.getClass().getName())); + oldFilter.write(out); } tr.write(out); out.writeInt(familyMap.size()); Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java?rev=793472&r1=793471&r2=793472&view=diff ============================================================================== --- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java (original) +++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java Mon Jul 13 07:05:46 2009 @@ -22,12 +22,11 @@ import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.NavigableSet; +import java.util.Set; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; @@ -35,6 +34,8 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RowLock; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.io.BatchOperation; import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.Cell; @@ -44,7 +45,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.DataInputBuffer; -import org.apache.hadoop.io.Writable; /** * Test HBase Writables serializations @@ -370,6 +370,15 @@ for(byte[] column : set){ assertTrue(desSet.contains(column)); } + + // Test filters are serialized properly. + scan = new Scan(startRow); + byte [] prefix = Bytes.toBytes(getName()); + scan.setFilter(new PrefixFilter(prefix)); + sb = Writables.getBytes(scan); + desScan = (Scan)Writables.getWritable(sb, new Scan()); + Filter f = desScan.getFilter(); + assertTrue(f instanceof PrefixFilter); } assertEquals(scan.getMaxVersions(), desScan.getMaxVersions());