directory-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From seelm...@apache.org
Subject svn commit: r912434 [2/2] - in /directory/sandbox/seelmann/hbase-partition/src: main/java/org/apache/directory/server/core/partition/hbase/ main/java/org/apache/directory/server/core/partition/hbase/cursor/ main/java/org/apache/directory/server/core/pa...
Date Sun, 21 Feb 2010 22:59:20 GMT
Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java Sun Feb 21 22:59:19 2010
@@ -28,11 +28,11 @@
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -59,10 +59,10 @@
      * 
      * @throws Exception the exception
      */
-    public static byte[] decrement( HTablePool pool, String tableName, byte[] row, byte[] family, byte[] qualifier )
+    public static byte[] decrement( HBaseTablePool pool, String tableName, byte[] row, byte[] family, byte[] qualifier )
         throws Exception
     {
-        byte[] decrement = increment( pool, tableName, row, family, qualifier, -1L );
+        byte[] decrement = internIncrement( pool, tableName, row, family, qualifier, -1L );
         if ( LOG.isDebugEnabled() )
         {
             LOG.debug( "Decrement " + tableName + ":" + Utils.getPrintableString( row ) + ":"
@@ -87,10 +87,10 @@
      * 
      * @throws Exception the exception
      */
-    public static byte[] increment( HTablePool pool, String tableName, byte[] row, byte[] family, byte[] qualifier )
+    public static byte[] increment( HBaseTablePool pool, String tableName, byte[] row, byte[] family, byte[] qualifier )
         throws Exception
     {
-        byte[] increment = increment( pool, tableName, row, family, qualifier, 1L );
+        byte[] increment = internIncrement( pool, tableName, row, family, qualifier, 1L );
         if ( LOG.isDebugEnabled() )
         {
             LOG.debug( "Increment " + tableName + ":" + Utils.getPrintableString( row ) + ":"
@@ -102,10 +102,39 @@
     }
 
 
-    private static byte[] increment( HTablePool pool, String tableName, byte[] row, byte[] family, byte[] qualifier,
+    /**
+     * Increments a value.
+     * 
+     * @param pool the table pool
+     * @param tableName the table name
+     * @param row the row
+     * @param family the family
+     * @param qualifier the qualifier
+     * @param amount the amount
+     * 
+     * @return the byte[] the incremented value
+     * 
+     * @throws Exception the exception
+     */
+    public static byte[] increment( HBaseTablePool pool, String tableName, byte[] row, byte[] family, byte[] qualifier,
         long amount ) throws Exception
     {
-        HTable table = pool.getTable( tableName );
+        byte[] increment = internIncrement( pool, tableName, row, family, qualifier, amount );
+        if ( LOG.isDebugEnabled() )
+        {
+            LOG.debug( "Increment " + tableName + ":" + Utils.getPrintableString( row ) + ":"
+                + Utils.getPrintableString( family ) + ":" + Utils.getPrintableString( qualifier ) + "="
+                + Utils.getPrintableString( increment ) );
+            logStack( 3 );
+        }
+        return increment;
+    }
+
+
+    public static byte[] internIncrement( HBaseTablePool pool, String tableName, byte[] row, byte[] family,
+        byte[] qualifier, long amount ) throws Exception
+    {
+        HTable table = pool.getTable();
         try
         {
             long id = table.incrementColumnValue( row, family, qualifier, amount, false );
@@ -133,10 +162,10 @@
      * 
      * @throws Exception the exception
      */
-    public static Long getLongValue( HTablePool pool, String tableName, byte[] row, byte[] family, byte[] qualifier,
-        Long defaultValue ) throws Exception
+    public static Long getLongValue( HBaseTablePool pool, String tableName, byte[] row, byte[] family,
+        byte[] qualifier, Long defaultValue ) throws Exception
     {
-        HTable table = pool.getTable( tableName );
+        HTable table = pool.getTable();
         try
         {
             Get get = new Get( row );
@@ -178,7 +207,7 @@
      * 
      * @throws Exception the exception
      */
-    public static void put( HTablePool pool, String tableName, Put put ) throws Exception
+    public static void put( HBaseTablePool pool, String tableName, Put put ) throws Exception
     {
         if ( LOG.isDebugEnabled() )
         {
@@ -188,11 +217,21 @@
             logStack( 2 );
         }
 
-        HTable table = pool.getTable( tableName );
+        HTable table = pool.getTable();
         try
         {
+            put.setWriteToWAL( pool.isStrongConsistency() );
+            int writeBufferCount = table.getWriteBuffer().size();
+
             table.put( put );
             RPC_COUNT++;
+
+            if ( LOG.isDebugEnabled() )
+            {
+                LOG.debug( "Put stats: table=" + tableName + " writeToWAL=" + put.getWriteToWAL() + ", autoFlush="
+                    + table.isAutoFlush() + ", writeBufferSize=" + table.getWriteBufferSize() + ", writeBuffer.size="
+                    + writeBufferCount + "->" + table.getWriteBuffer().size() );
+            }
         }
         finally
         {
@@ -210,7 +249,7 @@
      * 
      * @throws Exception the exception
      */
-    public static void delete( HTablePool pool, String tableName, Delete delete ) throws Exception
+    public static void delete( HBaseTablePool pool, String tableName, Delete delete ) throws Exception
     {
         if ( LOG.isDebugEnabled() )
         {
@@ -228,7 +267,7 @@
             }
         }
 
-        HTable table = pool.getTable( tableName );
+        HTable table = pool.getTable();
         try
         {
             table.delete( delete );
@@ -252,8 +291,10 @@
      * 
      * @throws Exception the exception
      */
-    public static Result get( HTablePool pool, String tableName, Get get ) throws Exception
+    public static Result get( HBaseTablePool pool, String tableName, Get get ) throws Exception
     {
+        HTable table = pool.getTable();
+
         if ( LOG.isDebugEnabled() )
         {
             String row = Utils.getPrintableString( get.getRow() );
@@ -262,10 +303,10 @@
             logStack( 2 );
         }
 
-        HTable table = pool.getTable( tableName );
         try
         {
             Result result = table.get( get );
+            LOG.debug( "Get finished" );
             RPC_COUNT++;
             return result;
         }
@@ -287,7 +328,7 @@
      * 
      * @throws Exception the exception
      */
-    public static boolean exists( HTablePool pool, String tableName, Get get ) throws Exception
+    public static boolean exists( HBaseTablePool pool, String tableName, Get get ) throws Exception
     {
         if ( LOG.isDebugEnabled() )
         {
@@ -296,7 +337,7 @@
             logStack( 2 );
         }
 
-        HTable table = pool.getTable( tableName );
+        HTable table = pool.getTable();
         try
         {
             boolean exists = table.exists( get );
@@ -321,7 +362,7 @@
      * 
      * @throws Exception the exception
      */
-    public static ResultScanner getScanner( HTablePool pool, String tableName, Scan scan ) throws Exception
+    public static ResultScanner getScanner( HBaseTablePool pool, String tableName, Scan scan ) throws Exception
     {
         if ( LOG.isDebugEnabled() )
         {
@@ -333,7 +374,7 @@
             logStack( 2 );
         }
 
-        HTable table = pool.getTable( tableName );
+        HTable table = pool.getTable();
         try
         {
             ResultScanner scanner = table.getScanner( scan );
@@ -369,7 +410,7 @@
                 HColumnDescriptor columnDescriptor = new HColumnDescriptor( family );
                 columnDescriptor.setMaxVersions( 1 );
                 //columnDescriptor.setInMemory( true );
-                //columnDescriptor.setCompressionType( Algorithm.LZO );
+                columnDescriptor.setCompressionType( Algorithm.LZO );
                 descriptor.addFamily( columnDescriptor );
 
             }

Added: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTablePool.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTablePool.java?rev=912434&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTablePool.java (added)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTablePool.java Sun Feb 21 22:59:19 2010
@@ -0,0 +1,131 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.table;
+
+
+import java.util.LinkedList;
+import java.util.Queue;
+
+import org.apache.directory.server.core.partition.hbase.HBaseStore;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
+
+
+/**
+ * A simple pool for {@link HTable}s.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+public class HBaseTablePool
+{
+
+    protected String tableName;
+    protected HBaseConfiguration configuration;
+    protected boolean strongConsistency;
+    protected Queue<HTable> available;
+    protected Queue<HTable> used;
+
+
+    /**
+     * Creates a new instance of HBaseTablePool.
+     *
+     * @param tableName the table name
+     * @param configuration the HBase configuration
+     */
+    public HBaseTablePool( String tableName, HBaseConfiguration configuration )
+    {
+        this.tableName = tableName;
+        this.configuration = configuration;
+        this.strongConsistency = configuration.getBoolean( HBaseStore.STRONG_CONSISTENCY_PROPERTY, true );
+        this.available = new LinkedList<HTable>();
+        this.used = new LinkedList<HTable>();
+    }
+
+
+    /**
+     * Gets the table from the pool.
+     * 
+     * @return the table
+     * 
+     * @throws Exception the exception
+     */
+    public synchronized HTable getTable() throws Exception
+    {
+        HTable table;
+        if ( !available.isEmpty() )
+        {
+            table = available.remove();
+        }
+        else
+        {
+            table = new HTable( configuration, tableName );
+            table.setAutoFlush( strongConsistency );
+        }
+        used.add( table );
+        return table;
+    }
+
+
+    /**
+     * Puts the table back to the pool.
+     * 
+     * @param table the table
+     * 
+     * @throws Exception the exception
+     */
+    public synchronized void putTable( HTable table ) throws Exception
+    {
+        used.remove( table );
+        available.add( table );
+    }
+
+
+    /**
+     * Closes all pooled tables.
+     * 
+     * @throws Exception the exception
+     */
+    public synchronized void close() throws Exception
+    {
+        for ( HTable table : available )
+        {
+            table.close();
+        }
+        available.clear();
+        for ( HTable table : used )
+        {
+            table.close();
+        }
+        used.clear();
+    }
+
+
+    /**
+     * Checks if is strong consistency.
+     * 
+     * @return true, if is strong consistency
+     */
+    public boolean isStrongConsistency()
+    {
+        return strongConsistency;
+    }
+
+}

Modified: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java Sun Feb 21 22:59:19 2010
@@ -25,6 +25,7 @@
 import org.apache.hadoop.hbase.HBaseClusterTestCase;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.junit.Ignore;
 
 
@@ -40,6 +41,8 @@
 public class HBaseClusterTestCaseAdapter extends HBaseClusterTestCase
 {
 
+    protected MiniMRCluster mrCluster;
+    
     /**
      * Instantiates a new HBaseClusterTestCase.
      * 
@@ -57,6 +60,8 @@
         conf.set( TEST_DIRECTORY_KEY, testDir.getAbsolutePath() );
         // ...for hdfs
         System.setProperty( "test.build.data", testDir.getAbsolutePath() );
+        // ...for map/reduce
+        System.setProperty("hadoop.log.dir", testDir.getAbsolutePath() + "/log");
 
         // setup local file system if no DFS is used
         if ( !startDfs )
@@ -78,6 +83,13 @@
         }
 
         super.setUp();
+        
+        if(startDfs)
+        {
+            // These are needed for the new and improved Map/Reduce framework
+            conf.set("mapred.output.dir", conf.get("hadoop.tmp.dir"));
+            mrCluster = new MiniMRCluster(2, fs.getUri().toString(), 1);
+        }
 
         // opening the META table ensures that cluster is running
         //Thread.sleep( 10000 );
@@ -89,6 +101,11 @@
     public void tearDown() throws Exception
     {
         super.tearDown();
+        
+        if(startDfs)
+        {
+            mrCluster.shutdown();
+        }
     }
 
 

Modified: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java Sun Feb 21 22:59:19 2010
@@ -30,6 +30,7 @@
 import org.apache.directory.shared.ldap.name.LdapDN;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
@@ -61,7 +62,7 @@
     })
 @CreateLdapServer(transports =
     { @CreateTransport(protocol = "LDAP", port = 10389, nbThreads=48) })
-//@Ignore
+@Ignore
 public class HBaseDistributedRunner extends AbstractLdapTestUnit
 {
     protected CoreSession session;

Modified: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java Sun Feb 21 22:59:19 2010
@@ -33,6 +33,7 @@
 import org.apache.directory.shared.ldap.name.LdapDN;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
@@ -75,7 +76,7 @@
     })
 @CreateLdapServer(transports =
     { @CreateTransport(protocol = "LDAP", port = 10389, nbThreads=16) })
-//@Ignore
+@Ignore
 public class HBaseEmbeddedRunner extends AbstractLdapTestUnit
 {
     protected CoreSession session;

Added: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/GetPerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/GetPerformanceEvaluation.java?rev=912434&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/GetPerformanceEvaluation.java (added)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/GetPerformanceEvaluation.java Sun Feb 21 22:59:19 2010
@@ -0,0 +1,86 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.it.mapreduce;
+
+
+import org.apache.commons.lang.math.RandomUtils;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+
+
+public class GetPerformanceEvaluation extends Thread
+{
+
+    static HBaseConfiguration CONF = new HBaseConfiguration();
+    static int THREADS = 10;
+    static int COUNT = 1000;
+    
+    public static void main( String[] args ) throws Exception
+    {
+        for ( int i = 0; i < THREADS; i++ )
+        {
+            new GetPerformanceEvaluation().start();
+        }
+    }
+
+
+    public void run()
+    {
+        try
+        {
+            HTable table = new HTable( CONF, "apacheds_example_master" );
+            // warm up
+            get( table );
+            get( table );
+            get( table );
+
+            long t1 = System.currentTimeMillis();
+            for ( int i = 0; i < COUNT; i++ )
+            {
+                get( table );
+            }
+            long t2 = System.currentTimeMillis();
+            long t = t2 - t1;
+            int rate = COUNT * 1000 / ( int ) t;
+            System.out.println( "--> " + t + " ms, " + rate + "/s" );
+        }
+        catch ( Exception e )
+        {
+            e.printStackTrace();
+        }
+    }
+
+
+    private void get( HTable table ) throws Exception
+    {
+        long l = ( long ) RandomUtils.nextInt( 1000000 );
+        //long l = 771664L + RandomUtils.nextInt( 1000 );
+        Get get = new Get( Bytes.toBytes( l ) ); // 25/s
+        //get.addColumn( Bytes.toBytes( "treeInfo" ), Bytes.toBytes( "upRdn" ) ); // 100/s
+        //byte[] oc0 = Bytes.add( Bytes.toBytes( "objectclass" ), Bytes.toBytes( 0 ) );
+        //get.addColumn( Bytes.toBytes( "upAttributes" ), oc0 ); // 50/s
+        Result result = table.get( get );
+        //System.out.println(result.getBytes().getSize());
+    }
+
+}
\ No newline at end of file

Added: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/LdifImportAndIndexIT.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/LdifImportAndIndexIT.java?rev=912434&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/LdifImportAndIndexIT.java (added)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/LdifImportAndIndexIT.java Sun Feb 21 22:59:19 2010
@@ -0,0 +1,160 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.it.mapreduce;
+
+
+import org.apache.directory.server.annotations.CreateLdapServer;
+import org.apache.directory.server.annotations.CreateTransport;
+import org.apache.directory.server.core.annotations.CreateDS;
+import org.apache.directory.server.core.annotations.CreateIndex;
+import org.apache.directory.server.core.annotations.CreatePartition;
+import org.apache.directory.server.core.integ.FrameworkRunner;
+import org.apache.directory.server.core.partition.hbase.HBaseDistributedPartition;
+import org.apache.directory.server.core.partition.hbase.Utils;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserColumnIndex;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserRowIndex;
+import org.apache.directory.server.core.partition.hbase.it.AbstractHBasePartitionIT;
+import org.apache.directory.server.core.partition.hbase.mapreduce.IndexBuilder;
+import org.apache.directory.server.core.partition.hbase.mapreduce.LdifImporter;
+import org.apache.directory.server.core.partition.hbase.mapreduce.LdifInputFormat;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * 
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+@RunWith(FrameworkRunner.class)
+@CreateDS(name = "example", enableChangeLog = false, partitions =
+    { @CreatePartition(name = "example", suffix = "dc=example,dc=com", type = HBaseDistributedPartition.class, indexes =
+        { @CreateIndex(attribute = "cn", type = HBaseUserColumnIndex.class),
+            @CreateIndex(attribute = "uid", type = HBaseUserColumnIndex.class),
+            @CreateIndex(attribute = "dc", type = HBaseUserRowIndex.class),
+            @CreateIndex(attribute = "o", type = HBaseUserRowIndex.class),
+            @CreateIndex(attribute = "ou", type = HBaseUserRowIndex.class),
+            @CreateIndex(attribute = "objectClass", type = HBaseUserRowIndex.class) }) })
+@CreateLdapServer(transports =
+    { @CreateTransport(protocol = "LDAP", port = 10389, nbThreads = 16) })
+public class LdifImportAndIndexIT extends AbstractHBasePartitionIT
+{
+
+    @Test
+    public void testLdifImport() throws Exception
+    {
+        // import master data
+        for ( int i = 2; i < 5; i++ )
+        {
+            long t0 = System.currentTimeMillis();
+            Configuration conf = new Configuration();
+            conf.set( LdifImporter.NAME_COMPONENT_COUNT, "" + i );
+            conf.set( LdifImporter.SUFFIX, "dc=example,dc=com" );
+            conf.set( LdifImporter.TABLE_PREFIX, "apacheds_example_" );
+
+            Job job = new Job( conf, "LDIF Import" );
+            job.setJarByClass( LdifImporter.class );
+
+            FileInputFormat.addInputPath(job, new Path("src/test/resources/testdata-5.ldif"));
+            //FileInputFormat.addInputPath( job, new Path( "src/test/resources/testdata-10000.ldif" ) );
+            job.setInputFormatClass( LdifInputFormat.class );
+            job.setMapperClass( LdifImporter.class );
+
+            TableMapReduceUtil.initTableReducerJob( ".META.", null, job );
+            job.setNumReduceTasks( 0 );
+
+            boolean status = job.waitForCompletion( true );
+            long t1 = System.currentTimeMillis();
+            long t = t1 - t0;
+            System.out.println( "Import " + status + " in " + t + "ms." );
+            //System.out.println( job.getCounters() );
+            System.out.println();
+        }
+
+        // build indices
+        long t0 = System.currentTimeMillis();
+        Configuration conf = new Configuration();
+        //conf.set( LdifImporter.NAME_COMPONENT_COUNT, ""+i );
+        conf.set( IndexBuilder.SUFFIX, "dc=example,dc=com" );
+        conf.set( IndexBuilder.TABLE_PREFIX, "apacheds_example_" );
+        conf.set( IndexBuilder.COLUMN_INDICES, "cn,uid" );
+        conf.set( IndexBuilder.ROW_INDICES, "dc,o,ou,objectClass" );
+
+        Job job = new Job( conf, "Indexing" );
+        job.setJarByClass( IndexBuilder.class );
+
+        TableMapReduceUtil.initTableMapperJob( "apacheds_example_master", new Scan( Bytes.toBytes( 1L ) ),
+            IndexBuilder.class, null, null, job );
+        job.setOutputFormatClass( NullOutputFormat.class );
+        job.setNumReduceTasks( 0 );
+
+        boolean status = job.waitForCompletion( true );
+        long t1 = System.currentTimeMillis();
+        long t = t1 - t0;
+        System.out.println( "Index " + status + " in " + t + "ms." );
+        //System.out.println( job.getCounters() );
+        System.out.println();
+
+        //
+        //        ClonedServerEntry entry = ldapServer.getDirectoryService().getAdminSession().lookup(
+        //            new LdapDN( "dc=example,dc=com" ) );
+        //        assertNotNull( entry );
+        //
+        //        System.out.println( "ApacheDS started on port " + ldapServer.getPort() + ", press any key to shutdown..." );
+        //        System.in.read();
+    }
+
+
+    private void dump() throws Exception
+    {
+        HTable masterHTable = new HTable( "apacheds_example_master" );
+        Get masterGet = new Get( Bytes.toBytes( 0L ) );
+        System.out.println( masterHTable.exists( masterGet ) );
+        masterGet = new Get( Bytes.toBytes( 1L ) );
+        System.out.println( masterHTable.exists( masterGet ) );
+        ResultScanner masterScanner = masterHTable.getScanner( new Scan() );
+        Result masterResult;
+        while ( ( masterResult = masterScanner.next() ) != null )
+        {
+            System.out.println( "master: " + Utils.getPrintableString( masterResult.getRow() ) );
+        }
+
+        HTable treeHTable = new HTable( "apacheds_example_tree" );
+        ResultScanner treeScanner = treeHTable.getScanner( new Scan() );
+        Result treeResult;
+        while ( ( treeResult = treeScanner.next() ) != null )
+        {
+            System.out.println( "tree: " + Utils.getPrintableString( treeResult.getRow() ) );
+        }
+    }
+
+}
\ No newline at end of file

Added: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java?rev=912434&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java (added)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java Sun Feb 21 22:59:19 2010
@@ -0,0 +1,152 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.it.mapreduce;
+
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.directory.server.core.partition.hbase.mapreduce.IndexBuilder;
+import org.apache.directory.server.core.partition.hbase.mapreduce.LdifImporter;
+import org.apache.directory.server.core.partition.hbase.mapreduce.LdifInputFormat;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+
+
+/**
+ * 
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+public class RemoteLdifImport
+{
+
+    public static void main( String[] args ) throws Exception
+    {
+        importLdif( "/user/stefan/input/ldif/com-example-10000.ldif", 2 );
+        importLdif( "/user/stefan/input/ldif/com-example-10000.ldif", 3 );
+        importLdif( "/user/stefan/input/ldif/com-example-10000.ldif", 4 );
+        importLdif( "/user/stefan/input/ldif/com-example-100000.ldif", 3 );
+        importLdif( "/user/stefan/input/ldif/com-example-100000.ldif", 4 );
+        importLdif( "/user/stefan/input/ldif/com-example-1000000.ldif", 3 );
+        importLdif( "/user/stefan/input/ldif/com-example-1000000.ldif", 4 );
+
+        cleanIndexTables();
+
+        buildIndices();
+    }
+
+
+    private static void importLdif( String file, int nameComponentCount ) throws IOException, InterruptedException,
+        ClassNotFoundException
+    {
+        long t0 = System.currentTimeMillis();
+        Configuration conf = new Configuration();
+        conf.set( LdifImporter.NAME_COMPONENT_COUNT, "" + nameComponentCount );
+        conf.set( LdifImporter.SUFFIX, "dc=example,dc=com" );
+        conf.set( LdifImporter.TABLE_PREFIX, "apacheds_example_" );
+        
+        conf.set( "mapred.map.max.attempts", "1" );
+        conf.set( "mapred.map.tasks.speculative.execution", "false" );
+
+        Job job = new Job( conf, "LDIF Import" );
+        job.setJarByClass( LdifImporter.class );
+
+        FileInputFormat.addInputPath( job, new Path( file ) );
+        job.setInputFormatClass( LdifInputFormat.class );
+        job.setMapperClass( LdifImporter.class );
+
+        TableMapReduceUtil.initTableReducerJob( ".META.", null, job );
+        job.setNumReduceTasks( 0 );
+
+        boolean status = job.waitForCompletion( true );
+        long t1 = System.currentTimeMillis();
+        long t = t1 - t0;
+        System.out.println( "Import " + status + " in " + t + "ms." );
+        System.out.println( job.getCounters() );
+        System.out.println();
+    }
+
+
+    private static void cleanIndexTables() throws MasterNotRunningException, IOException
+    {
+        // delete tree and index tables
+        List<String> tableNames = new ArrayList<String>();
+        tableNames.add( "apacheds_example_tree" );
+        tableNames.add( "apacheds_example_index_cn" );
+        tableNames.add( "apacheds_example_index_uid" );
+        tableNames.add( "apacheds_example_index_dc" );
+        tableNames.add( "apacheds_example_index_o" );
+        tableNames.add( "apacheds_example_index_ou" );
+        tableNames.add( "apacheds_example_index_objectClass" );
+        HBaseAdmin admin = new HBaseAdmin( new HBaseConfiguration() );
+        for ( String tableName : tableNames )
+        {
+            System.out.println( "Deleting table " + tableName );
+            if ( admin.tableExists( tableName ) )
+            {
+                admin.disableTable( tableName );
+                admin.deleteTable( tableName );
+            }
+        }
+    }
+
+
+    private static void buildIndices() throws IOException, InterruptedException, ClassNotFoundException
+    {
+        // build indices
+        long t0 = System.currentTimeMillis();
+        Configuration conf = new Configuration();
+        //conf.set( LdifImporter.NAME_COMPONENT_COUNT, ""+i );
+        conf.set( IndexBuilder.SUFFIX, "dc=example,dc=com" );
+        conf.set( IndexBuilder.TABLE_PREFIX, "apacheds_example_" );
+        conf.set( IndexBuilder.COLUMN_INDICES, "cn,uid" );
+        conf.set( IndexBuilder.ROW_INDICES, "dc,o,ou,objectClass" );
+
+        conf.set( "mapred.map.max.attempts", "1" );
+        conf.set( "mapred.map.tasks.speculative.execution", "false" );
+
+        Job job = new Job( conf, "Indexing" );
+        job.setJarByClass( IndexBuilder.class );
+
+        TableMapReduceUtil.initTableMapperJob( "apacheds_example_master", new Scan( Bytes.toBytes( 1L ) ),
+            IndexBuilder.class, null, null, job );
+        job.setOutputFormatClass( NullOutputFormat.class );
+        job.setNumReduceTasks( 0 );
+
+        boolean status = job.waitForCompletion( true );
+        long t1 = System.currentTimeMillis();
+        long t = t1 - t0;
+        System.out.println( "Index " + status + " in " + t + "ms." );
+        System.out.println( job.getCounters() );
+        System.out.println();
+    }
+
+}
\ No newline at end of file

Added: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java?rev=912434&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java (added)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java Sun Feb 21 22:59:19 2010
@@ -0,0 +1,101 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.it.mapreduce;
+
+import org.apache.directory.server.annotations.CreateLdapServer;
+import org.apache.directory.server.annotations.CreateTransport;
+import org.apache.directory.server.core.DirectoryService;
+import org.apache.directory.server.core.annotations.CreateDS;
+import org.apache.directory.server.core.annotations.CreateIndex;
+import org.apache.directory.server.core.annotations.CreatePartition;
+import org.apache.directory.server.core.factory.DSAnnotationProcessor;
+import org.apache.directory.server.core.partition.hbase.HBaseDistributedPartition;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserColumnIndex;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserRowIndex;
+import org.apache.directory.server.factory.ServerAnnotationProcessor;
+import org.apache.directory.server.ldap.LdapServer;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+
+
+/**
+ * Starts up an ApacheDS LDAP server.
+ * 
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+@CreateDS(
+    name = "example", 
+    enableChangeLog = false,
+    partitions =
+    { 
+        @CreatePartition(
+            name = "example", 
+            suffix = "dc=example,dc=com",
+            type = HBaseDistributedPartition.class,
+            indexes = {
+                @CreateIndex( attribute="cn", type = HBaseUserColumnIndex.class ),
+                @CreateIndex( attribute="uid", type = HBaseUserColumnIndex.class ),
+                
+                @CreateIndex( attribute="dc", type = HBaseUserRowIndex.class ),
+                @CreateIndex( attribute="o", type = HBaseUserRowIndex.class ),
+                @CreateIndex( attribute="ou", type = HBaseUserRowIndex.class ),
+
+                @CreateIndex( attribute="objectClass", type = HBaseUserRowIndex.class )
+            }
+        )
+    })
+@CreateLdapServer(transports =
+    { @CreateTransport(protocol = "LDAP", port = 10389, nbThreads=48) })
+public class RemoteRunner
+{
+
+    public static void main( String[] args ) throws Exception
+    {
+        DirectoryService service = DSAnnotationProcessor.getDirectoryService();
+        LdapServer ldapServer = ServerAnnotationProcessor.getLdapServer( service, 10389 );
+        System.out.println( "ApacheDS started on port " + ldapServer.getPort() + ", press any key to shutdown..." );
+        System.in.read();
+        ldapServer.stop();
+        service.shutdown();
+    }
+
+    
+    private static void compact() throws Exception
+    {
+        HBaseAdmin admin = new HBaseAdmin( new HBaseConfiguration() );
+        String[] TABLES =
+            { "apacheds_example_master", "apacheds_example_tree", "apacheds_example_index_objectClass",
+                "apacheds_example_index_dc", "apacheds_example_index_o", "apacheds_example_index_ou", "apacheds_example_index_uid",
+                "apacheds_example_index_cn" };
+        for ( String table : TABLES )
+        {
+            if ( admin.tableExists( table ) )
+            {
+                // admin.flush( table );
+                // Thread.sleep( 10000 );
+                // admin.compact( table );
+                // Thread.sleep( 10000 );
+                admin.majorCompact( table );
+                Thread.sleep( 10000 );
+            }
+        }
+    }
+}
\ No newline at end of file

Modified: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java Sun Feb 21 22:59:19 2010
@@ -29,6 +29,7 @@
 
 import org.apache.directory.server.core.entry.DefaultServerEntry;
 import org.apache.directory.server.core.partition.hbase.HBaseClusterTestCaseAdapter;
+import org.apache.directory.server.core.partition.hbase.HBaseStore;
 import org.apache.directory.server.core.partition.hbase.it.AbstractHBasePartitionIT;
 import org.apache.directory.shared.ldap.csn.CsnFactory;
 import org.apache.directory.shared.ldap.name.LdapDN;
@@ -54,6 +55,7 @@
     protected static final CsnFactory CSN_FACTORY = new CsnFactory( 0 );
     protected static HBaseClusterTestCaseAdapter adapter;
     protected static SchemaManager schemaManager;
+    protected static HBaseStore store;
     protected static LdapDN suffixDn;
 
 
@@ -108,6 +110,12 @@
 
         suffixDn = new LdapDN( "o=Good Times Co." );
         suffixDn.normalize( schemaManager.getNormalizerMapping() );
+
+        store = new HBaseStore();
+        store.setSuffixDn( suffixDn.getName() );
+        store.setCacheSize( 100 );
+        store.setTablePrefix( TABLE_PREFIX );
+        store.init( schemaManager );
     }
 
 

Modified: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java Sun Feb 21 22:59:19 2010
@@ -28,6 +28,8 @@
 import java.util.NavigableMap;
 
 import org.apache.directory.server.core.entry.DefaultServerEntry;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserColumnIndex;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserRowIndex;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
@@ -88,12 +90,38 @@
             }
         }
 
-        objectClassIndexTable = new HBaseRowIndexTable( "2.5.4.0", schemaManager, TABLE_PREFIX, adapter.conf, 100 );
-        jpegPhotoIndexTable = new HBaseRowIndexTable( "0.9.2342.19200300.100.1.60", schemaManager, TABLE_PREFIX,
-            adapter.conf, 100 );
-        cnIndexTable = new HBaseColumnIndexTable( "2.5.4.3", schemaManager, TABLE_PREFIX, adapter.conf, 100 );
-        userCertificateIndexTable = new HBaseColumnIndexTable( "2.5.4.36", schemaManager, TABLE_PREFIX, adapter.conf,
-            100 );
+//        objectClassIndexTable = new HBaseRowIndexTable( "2.5.4.0", store, 100 );
+//        jpegPhotoIndexTable = new HBaseRowIndexTable( "0.9.2342.19200300.100.1.60", store, 100 );
+//        cnIndexTable = new HBaseColumnIndexTable( "2.5.4.3", schemaManager, TABLE_PREFIX, adapter.conf, 100 );
+//        userCertificateIndexTable = new HBaseColumnIndexTable( "2.5.4.36", schemaManager, TABLE_PREFIX, adapter.conf,
+//            100 );
+        
+        HBaseUserRowIndex index = new HBaseUserRowIndex();
+        index.setAttributeId( "2.5.4.0" );
+        index.setCacheSize( 100 );
+        index.setStore( store );
+        objectClassIndexTable = index.getIndexTable();
+        store.addIndex( index );
+        
+        index = new HBaseUserRowIndex();
+        index.setAttributeId( "0.9.2342.19200300.100.1.60" );
+        index.setStore( store );
+        jpegPhotoIndexTable = index.getIndexTable();
+        store.addIndex( index );
+        
+        HBaseUserColumnIndex index2 = new HBaseUserColumnIndex();
+        index2.setAttributeId( "2.5.4.3" );
+        index2.setStore( store );
+        cnIndexTable = index2.getIndexTable();
+        store.addIndex( index );
+        
+        index2 = new HBaseUserColumnIndex();
+        index2.setAttributeId( "2.5.4.36" );
+        index2.setStore( store );
+        userCertificateIndexTable = index2.getIndexTable();
+        store.addIndex( index );
+        
+        
     }
 
 
@@ -118,20 +146,12 @@
             .toBytes( 1L ) ) ) ) );
         assertTrue( objectClassIndexHTable.exists( equalGet ) );
         Result equalResult = objectClassIndexHTable.get( equalGet );
-        Get countGet = new Get( Bytes.toBytes( "#organization" ) );
-        assertTrue( objectClassIndexHTable.exists( countGet ) );
-        Result countResult = objectClassIndexHTable.get( countGet );
 
         NavigableMap<byte[], byte[]> equalInfoMap = equalResult.getFamilyMap( Bytes.toBytes( "info" ) );
         assertNotNull( equalInfoMap );
         assertEquals( 1, equalInfoMap.size() );
         assertEquals( 1L, Bytes.toLong( equalInfoMap.get( Bytes.toBytes( "id" ) ) ) );
 
-        NavigableMap<byte[], byte[]> countInfoMap = countResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( countInfoMap );
-        assertEquals( 1, countInfoMap.size() );
-        assertEquals( 1L, Bytes.toLong( countInfoMap.get( Bytes.toBytes( "count" ) ) ) );
-
         // 2nd entry
         entry = buildOuSalesEntry();
         objectClassIndexTable.add( entry.get( "objectClass" ).get( 0 ).getBytes(), 2L );
@@ -159,26 +179,12 @@
         assertTrue( objectClassIndexHTable.exists( new Get( Bytes.add( Bytes.toBytes( "=top" ), Bytes.toBytes( Base64
             .encodeBytes( Bytes.toBytes( 3L ) ) ) ) ) ) );
 
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#organization" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#organizationalunit" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#top" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#organizationalperson" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#person" ) ) ) );
-
-        assertTrue( cnIndexHTable.exists( new Get( Bytes.toBytes( "#johnny \u65E5\u672C walker" ) ) ) );
-
-        Result topCountResult = objectClassIndexHTable.get( new Get( Bytes.toBytes( "#top" ) ) );
-        NavigableMap<byte[], byte[]> topCountInfoMap = topCountResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( topCountInfoMap );
-        assertEquals( 1, topCountInfoMap.size() );
-        assertEquals( 2L, Bytes.toLong( topCountInfoMap.get( Bytes.toBytes( "count" ) ) ) );
+        assertTrue( cnIndexHTable.exists( new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) ) ) );
 
-        Result cnCountResult = cnIndexHTable.get( new Get( Bytes.toBytes( "#johnny \u65E5\u672C walker" ) ) );
+        Result cnCountResult = cnIndexHTable.get( new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) ) );
         NavigableMap<byte[], byte[]> cnCountInfoMap = cnCountResult.getFamilyMap( Bytes.toBytes( "info" ) );
         assertNotNull( cnCountInfoMap );
-        assertEquals( 2, cnCountInfoMap.size() );
-        assertEquals( 1L, Bytes.toLong( cnCountInfoMap.get( Bytes.toBytes( "count" ) ) ) );
-        assertEquals( 3L, Bytes.toLong( cnCountInfoMap.get( Bytes.toBytes( 3L ) ) ) );
+        assertEquals( 1, cnCountInfoMap.size() );
     }
 
 
@@ -223,25 +229,7 @@
         assertFalse( objectClassIndexHTable.exists( new Get( Bytes.add( Bytes.toBytes( "=top" ), Bytes.toBytes( Base64
             .encodeBytes( Bytes.toBytes( 3L ) ) ) ) ) ) );
 
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#organization" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#organizationalunit" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#top" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#organizationalperson" ) ) ) );
-        assertTrue( objectClassIndexHTable.exists( new Get( Bytes.toBytes( "#person" ) ) ) );
-
-        assertTrue( cnIndexHTable.exists( new Get( Bytes.toBytes( "#johnny \u65E5\u672C walker" ) ) ) );
-
-        Result topCountResult = objectClassIndexHTable.get( new Get( Bytes.toBytes( "#top" ) ) );
-        NavigableMap<byte[], byte[]> topCountInfoMap = topCountResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( topCountInfoMap );
-        assertEquals( 1, topCountInfoMap.size() );
-        assertEquals( 1L, Bytes.toLong( topCountInfoMap.get( Bytes.toBytes( "count" ) ) ) );
-
-        Result cnCountResult = cnIndexHTable.get( new Get( Bytes.toBytes( "#johnny \u65E5\u672C walker" ) ) );
-        NavigableMap<byte[], byte[]> cnCountInfoMap = cnCountResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( cnCountInfoMap );
-        assertEquals( 1, cnCountInfoMap.size() );
-        assertEquals( 0L, Bytes.toLong( cnCountInfoMap.get( Bytes.toBytes( "count" ) ) ) );
+        assertFalse( cnIndexHTable.exists( new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) ) ) );
 
         // detete 2nd and 1st entry
         objectClassIndexTable.drop( ouSalesEntry.get( "objectClass" ).get( 0 ).getBytes(), 2L );
@@ -427,24 +415,24 @@
             { '=', ( byte ) 0xFF, ( byte ) 0xFF }, scanKey ) );
 
         scanKey = cnIndexTable.getScanKey( "bar", Bytes.toBytes( 0L ) );
-        assertEquals( "#bar", Bytes.toString( scanKey ) );
+        assertEquals( "=bar", Bytes.toString( scanKey ) );
 
         scanKey = cnIndexTable.getScanKey( "foo", Bytes.toBytes( Long.MAX_VALUE ) );
-        assertEquals( "#foo", Bytes.toString( scanKey ) );
+        assertEquals( "=foo", Bytes.toString( scanKey ) );
 
         scanKey = cnIndexTable.getScanKey( "foobar", null );
-        assertEquals( "#foobar", Bytes.toString( scanKey ) );
+        assertEquals( "=foobar", Bytes.toString( scanKey ) );
 
         scanKey = cnIndexTable.getScanKey( "", Bytes.toBytes( 1L ) );
-        assertEquals( "#", Bytes.toString( scanKey ) );
+        assertEquals( "=", Bytes.toString( scanKey ) );
 
         scanKey = cnIndexTable.getScanKey( HBaseIndexTable.FULL_SCAN_START, HBaseIndexTable.VALUE_SCAN_START );
         assertTrue( Bytes.equals( new byte[]
-            { '#', 0x00, 0x00 }, scanKey ) );
+            { '=', 0x00, 0x00 }, scanKey ) );
 
         scanKey = cnIndexTable.getScanKey( HBaseIndexTable.FULL_SCAN_STOP, HBaseIndexTable.VALUE_SCAN_STOP );
         assertTrue( Bytes.equals( new byte[]
-            { '#', ( byte ) 0xFF, ( byte ) 0xFF }, scanKey ) );
+            { '=', ( byte ) 0xFF, ( byte ) 0xFF }, scanKey ) );
     }
 
 
@@ -459,19 +447,11 @@
         assertTrue( Bytes.equals( new byte[]
             { 0x00, 0x01 }, ( byte[] ) value ) );
 
-        value = objectClassIndexTable.getValueFromCountKey( Bytes.toBytes( "#foo" ) );
-        assertEquals( "foo", value );
-
-        value = jpegPhotoIndexTable.getValueFromCountKey( new byte[]
-            { '#', ( byte ) 0xEF, ( byte ) 0xFF } );
-        assertTrue( Bytes.equals( new byte[]
-            { ( byte ) 0xEF, ( byte ) 0xFF }, ( byte[] ) value ) );
-
-        value = cnIndexTable.getValueFromCountKey( Bytes.toBytes( "#foobar" ) );
+        value = cnIndexTable.extractValueFromEqualsKey( Bytes.toBytes( "=foobar" ) );
         assertEquals( "foobar", value );
 
-        value = userCertificateIndexTable.getValueFromCountKey( new byte[]
-            { '#', 0x00, 0x01 } );
+        value = userCertificateIndexTable.extractValueFromEqualsKey( new byte[]
+            { '=', 0x00, 0x01 } );
         assertTrue( Bytes.equals( new byte[]
             { 0x00, 0x01 }, ( byte[] ) value ) );
     }

Modified: directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTableTest.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTableTest.java?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTableTest.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTableTest.java Sun Feb 21 22:59:19 2010
@@ -82,10 +82,8 @@
             }
         }
 
-        objectClassPresenceIndexTable = new HBasePresenceIndexTable( "2.5.4.0", schemaManager, TABLE_PREFIX,
-            adapter.conf, 100 );
-        jpegPhotoPresenceIndexTable = new HBasePresenceIndexTable( "0.9.2342.19200300.100.1.60", schemaManager,
-            TABLE_PREFIX, adapter.conf, 100 );
+        objectClassPresenceIndexTable = new HBasePresenceIndexTable( "2.5.4.0", store, 100 );
+        jpegPhotoPresenceIndexTable = new HBasePresenceIndexTable( "0.9.2342.19200300.100.1.60", store, 100 );
     }
 
 
@@ -108,20 +106,12 @@
             .toBytes( 1L ) ) ) ) );
         assertTrue( objectClassPresenceIndexHTable.exists( presenceGet ) );
         Result presenceResult = objectClassPresenceIndexHTable.get( presenceGet );
-        Get countGet = new Get( Bytes.toBytes( "!" ) );
-        assertTrue( objectClassPresenceIndexHTable.exists( countGet ) );
-        Result countResult = objectClassPresenceIndexHTable.get( countGet );
 
         NavigableMap<byte[], byte[]> presenceInfoMap = presenceResult.getFamilyMap( Bytes.toBytes( "info" ) );
         assertNotNull( presenceInfoMap );
         assertEquals( 1, presenceInfoMap.size() );
         assertEquals( 1L, Bytes.toLong( presenceInfoMap.get( Bytes.toBytes( "id" ) ) ) );
 
-        NavigableMap<byte[], byte[]> countInfoMap = countResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( countInfoMap );
-        assertEquals( 1, countInfoMap.size() );
-        assertEquals( 1L, Bytes.toLong( countInfoMap.get( Bytes.toBytes( "count" ) ) ) );
-
         // 2nd entry
         objectClassPresenceIndexTable.add( 2L );
 
@@ -139,22 +129,6 @@
             .toBytes( Base64.encodeBytes( Bytes.toBytes( 3L ) ) ) ) ) ) );
         assertTrue( jpegPhotoPresenceIndexHTable.exists( new Get( Bytes.add( Bytes.toBytes( "*" ), Bytes
             .toBytes( Base64.encodeBytes( Bytes.toBytes( 3L ) ) ) ) ) ) );
-
-        countGet = new Get( Bytes.toBytes( "!" ) );
-        assertTrue( objectClassPresenceIndexHTable.exists( countGet ) );
-        countResult = objectClassPresenceIndexHTable.get( countGet );
-        countInfoMap = countResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( countInfoMap );
-        assertEquals( 1, countInfoMap.size() );
-        assertEquals( 3L, Bytes.toLong( countInfoMap.get( Bytes.toBytes( "count" ) ) ) );
-
-        countGet = new Get( Bytes.toBytes( "!" ) );
-        assertTrue( jpegPhotoPresenceIndexHTable.exists( countGet ) );
-        countResult = jpegPhotoPresenceIndexHTable.get( countGet );
-        countInfoMap = countResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( countInfoMap );
-        assertEquals( 1, countInfoMap.size() );
-        assertEquals( 1L, Bytes.toLong( countInfoMap.get( Bytes.toBytes( "count" ) ) ) );
     }
 
 
@@ -185,22 +159,6 @@
         assertFalse( jpegPhotoPresenceIndexHTable.exists( new Get( Bytes.add( Bytes.toBytes( "*" ), Bytes
             .toBytes( Base64.encodeBytes( Bytes.toBytes( 3L ) ) ) ) ) ) );
 
-        Get countGet = new Get( Bytes.toBytes( "!" ) );
-        assertTrue( objectClassPresenceIndexHTable.exists( countGet ) );
-        Result countResult = objectClassPresenceIndexHTable.get( countGet );
-        NavigableMap<byte[], byte[]> countInfoMap = countResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( countInfoMap );
-        assertEquals( 1, countInfoMap.size() );
-        assertEquals( 2L, Bytes.toLong( countInfoMap.get( Bytes.toBytes( "count" ) ) ) );
-
-        countGet = new Get( Bytes.toBytes( "!" ) );
-        assertTrue( jpegPhotoPresenceIndexHTable.exists( countGet ) );
-        countResult = jpegPhotoPresenceIndexHTable.get( countGet );
-        countInfoMap = countResult.getFamilyMap( Bytes.toBytes( "info" ) );
-        assertNotNull( countInfoMap );
-        assertEquals( 1, countInfoMap.size() );
-        assertEquals( 0L, Bytes.toLong( countInfoMap.get( Bytes.toBytes( "count" ) ) ) );
-
         // detete 2nd and 1st entry
         objectClassPresenceIndexTable.drop( 2L );
         objectClassPresenceIndexTable.drop( 1L );

Modified: directory/sandbox/seelmann/hbase-partition/src/test/resources/log4j.properties
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/resources/log4j.properties?rev=912434&r1=912433&r2=912434&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/resources/log4j.properties (original)
+++ directory/sandbox/seelmann/hbase-partition/src/test/resources/log4j.properties Sun Feb 21 22:59:19 2010
@@ -46,5 +46,6 @@
 #log4j.logger.org.apache.directory.server.core.partition.hbase.table=TRACE
 #log4j.logger.org.apache.directory.server.core.partition.hbase.table.HBaseMasterTable$MasterTableKey=DEBUG
 #log4j.logger.org.apache.hadoop.hbase.client=DEBUG
+log4j.logger.org.apache.hadoop=ERROR
 log4j.logger.org.apache.hadoop.metrics=FATAL
 log4j.logger.org.apache.hadoop.hbase.metrics=FATAL
\ No newline at end of file

Added: directory/sandbox/seelmann/hbase-partition/src/test/resources/testdata-5.ldif
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/test/resources/testdata-5.ldif?rev=912434&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/test/resources/testdata-5.ldif (added)
+++ directory/sandbox/seelmann/hbase-partition/src/test/resources/testdata-5.ldif Sun Feb 21 22:59:19 2010
@@ -0,0 +1,40 @@
+
+
+
+dn: uid=hnelson,ou=users,dc=example,dc=com
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+objectClass: person
+objectClass: top
+uid: hnelson
+sn: Nelson
+givenName: Horation
+cn: Horation Nelson
+
+
+
+dn: dc=example,dc=com
+objectClass: domain
+objectClass: top
+dc: example
+
+dn: ou=groups,dc=example,dc=com
+objectClass: organizationalUnit
+objectClass: top
+ou: groups
+
+dn: uid=hhornblo,ou=users,dc=example,dc=com
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+objectClass: person
+objectClass: top
+uid: hhornblo
+sn: Hornblower
+givenName: Horation
+cn: Horatio Hornblower
+
+
+dn: ou=users,dc=example,dc=com
+objectClass: organizationalUnit
+objectClass: top
+ou: users



Mime
View raw message