cassandra-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jbel...@apache.org
Subject svn commit: r1026200 [5/11] - in /cassandra/trunk: ./ interface/ interface/thrift/gen-java/org/apache/cassandra/thrift/ src/java/org/apache/cassandra/auth/ src/java/org/apache/cassandra/avro/ src/java/org/apache/cassandra/cli/ src/java/org/apache/cassa...
Date Fri, 22 Oct 2010 03:23:31 GMT
Modified: cassandra/trunk/src/java/org/apache/cassandra/cli/CliClient.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/cli/CliClient.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/cli/CliClient.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/cli/CliClient.java Fri Oct 22 03:23:26 2010
@@ -19,19 +19,59 @@ package org.apache.cassandra.cli;
 
 import java.io.UnsupportedEncodingException;
 import java.math.BigInteger;
-import java.util.*;
-
-import org.apache.cassandra.config.ConfigurationException;
-import org.apache.cassandra.utils.UUIDGen;
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.commons.lang.StringUtils;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
 
 import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.Tree;
 import org.apache.cassandra.auth.SimpleAuthenticator;
-import org.apache.cassandra.db.marshal.*;
-import org.apache.cassandra.thrift.*;
+import org.apache.cassandra.config.ConfigurationException;
+import org.apache.cassandra.db.marshal.AbstractType;
+import org.apache.cassandra.db.marshal.AsciiType;
+import org.apache.cassandra.db.marshal.BytesType;
+import org.apache.cassandra.db.marshal.IntegerType;
+import org.apache.cassandra.db.marshal.LexicalUUIDType;
+import org.apache.cassandra.db.marshal.LongType;
+import org.apache.cassandra.db.marshal.TimeUUIDType;
+import org.apache.cassandra.db.marshal.UTF8Type;
+import org.apache.cassandra.thrift.AuthenticationException;
+import org.apache.cassandra.thrift.AuthenticationRequest;
+import org.apache.cassandra.thrift.AuthorizationException;
+import org.apache.cassandra.thrift.Cassandra;
+import org.apache.cassandra.thrift.CfDef;
+import org.apache.cassandra.thrift.Column;
+import org.apache.cassandra.thrift.ColumnDef;
+import org.apache.cassandra.thrift.ColumnOrSuperColumn;
+import org.apache.cassandra.thrift.ColumnParent;
+import org.apache.cassandra.thrift.ColumnPath;
+import org.apache.cassandra.thrift.ConsistencyLevel;
+import org.apache.cassandra.thrift.IndexClause;
+import org.apache.cassandra.thrift.IndexExpression;
+import org.apache.cassandra.thrift.IndexOperator;
+import org.apache.cassandra.thrift.IndexType;
+import org.apache.cassandra.thrift.InvalidRequestException;
+import org.apache.cassandra.thrift.KeyRange;
+import org.apache.cassandra.thrift.KeySlice;
+import org.apache.cassandra.thrift.KsDef;
+import org.apache.cassandra.thrift.NotFoundException;
+import org.apache.cassandra.thrift.SlicePredicate;
+import org.apache.cassandra.thrift.SliceRange;
+import org.apache.cassandra.thrift.SuperColumn;
+import org.apache.cassandra.thrift.TimedOutException;
+import org.apache.cassandra.thrift.UnavailableException;
+import org.apache.cassandra.utils.ByteBufferUtil;
 import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.UUIDGen;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.thrift.TException;
 
 // Cli Client Side Library
@@ -515,7 +555,7 @@ public class CliClient 
        
        if (columnSpecCnt == 0)
        {
-           colParent = new ColumnParent(columnFamily).setSuper_column(null);
+           colParent = new ColumnParent(columnFamily).setSuper_column((ByteBuffer)null);
        }
        else
        {
@@ -523,10 +563,10 @@ public class CliClient 
            colParent = new ColumnParent(columnFamily).setSuper_column(CliCompiler.getColumn(columnFamilySpec, 0).getBytes("UTF-8"));
        }
 
-       SliceRange range = new SliceRange(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.EMPTY_BYTE_ARRAY, false, Integer.MAX_VALUE);
+       SliceRange range = new SliceRange(FBUtilities.EMPTY_BYTE_BUFFER, FBUtilities.EMPTY_BYTE_BUFFER, false, Integer.MAX_VALUE);
        SlicePredicate predicate = new SlicePredicate().setColumn_names(null).setSlice_range(range);
        
-       int count = thriftClient_.get_count(key.getBytes(), colParent, predicate, ConsistencyLevel.ONE);
+       int count = thriftClient_.get_count(ByteBuffer.wrap(key.getBytes("UTF-8")), colParent, predicate, ConsistencyLevel.ONE);
        css_.out.printf("%d columns\n", count);
     }
     
@@ -584,7 +624,14 @@ public class CliClient 
             columnName = CliCompiler.getColumn(columnFamilySpec, 1).getBytes("UTF-8");
         }
 
-        thriftClient_.remove(key.getBytes(), new ColumnPath(columnFamily).setSuper_column(superColumnName).setColumn(columnName),
+        ColumnPath path = new ColumnPath(columnFamily);
+        if(superColumnName != null)
+            path.setSuper_column(superColumnName);
+        
+        if(columnName != null)
+            path.setColumn(columnName);
+        
+        thriftClient_.remove(ByteBuffer.wrap(key.getBytes("UTF-8")), path,
                              FBUtilities.timestampMicros(), ConsistencyLevel.ONE);
         css_.out.println(String.format("%s removed.", (columnSpecCnt == 0) ? "row" : "column"));
     }
@@ -592,9 +639,13 @@ public class CliClient 
     private void doSlice(String keyspace, String key, String columnFamily, byte[] superColumnName)
             throws InvalidRequestException, UnavailableException, TimedOutException, TException, UnsupportedEncodingException, IllegalAccessException, NotFoundException, InstantiationException, NoSuchFieldException
     {
-        SliceRange range = new SliceRange(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.EMPTY_BYTE_ARRAY, true, 1000000);
-        List<ColumnOrSuperColumn> columns = thriftClient_.get_slice(key.getBytes(),
-                                                                    new ColumnParent(columnFamily).setSuper_column(superColumnName),
+        
+        ColumnParent parent = new ColumnParent(columnFamily);
+        if(superColumnName != null)
+            parent.setSuper_column(superColumnName);
+                
+        SliceRange range = new SliceRange(FBUtilities.EMPTY_BYTE_BUFFER, FBUtilities.EMPTY_BYTE_BUFFER, true, 1000000);
+        List<ColumnOrSuperColumn> columns = thriftClient_.get_slice(ByteBuffer.wrap(key.getBytes("UTF-8")),parent,
                                                                     new SlicePredicate().setColumn_names(null).setSlice_range(range), ConsistencyLevel.ONE);
         int size = columns.size();
 
@@ -623,7 +674,7 @@ public class CliClient 
                 Column column = cosc.column;
                 validator = getValidatorForValue(cfDef, column.getName());
                 css_.out.printf("=> (column=%s, value=%s, timestamp=%d)\n", formatColumnName(keyspace, columnFamily, column),
-                                validator.getString(column.value), column.timestamp);
+                               validator.getString(column.value), column.timestamp);
             }
         }
         
@@ -720,15 +771,16 @@ public class CliClient 
             return;
         }
 
-        byte[] columnNameInBytes = columnNameAsByteArray(columnName, columnFamily);
-        AbstractType validator = getValidatorForValue(columnFamilyDef, columnNameInBytes);
+        ByteBuffer columnNameInBytes = columnNameAsByteArray(columnName, columnFamily);
+        AbstractType validator = getValidatorForValue(columnFamilyDef, columnNameInBytes.array());
         
         // Perform a get()
-        ColumnPath path = new ColumnPath(columnFamily).setSuper_column(superColumnName).setColumn(columnNameInBytes);
-        Column column = thriftClient_.get(key.getBytes(), path, ConsistencyLevel.ONE).column;
+        ColumnPath path = new ColumnPath(columnFamily);
+        if(superColumnName != null) path.setSuper_column(superColumnName);
+        if(columnNameInBytes != null) path.setColumn(columnNameInBytes);
+        Column column = thriftClient_.get(ByteBuffer.wrap(key.getBytes("UTF-8")), path, ConsistencyLevel.ONE).column;
 
-        byte[] columnValue = column.getValue();
-        
+        byte[] columnValue = column.getValue();       
         String valueAsString;
         
         // we have ^(CONVERT_TO_TYPE <type>) inside of GET statement
@@ -744,13 +796,13 @@ public class CliClient 
             AbstractType valueValidator = getFormatTypeForColumn(typeName);
 
             // setting value for output
-            valueAsString = valueValidator.getString(columnValue);
+            valueAsString = valueValidator.getString(ByteBuffer.wrap(columnValue));
             // updating column value validator class
             updateColumnMetaData(columnFamilyDef, columnNameInBytes, valueValidator.getClass().getName());
         }
         else
         {
-            valueAsString = (validator == null) ? new String(columnValue, "UTF-8") : validator.getString(columnValue);
+            valueAsString = (validator == null) ? new String(columnValue, "UTF-8") : validator.getString(ByteBuffer.wrap(columnValue));
         }
 
         // print results
@@ -795,8 +847,8 @@ public class CliClient 
 
             try
             {
-                byte[] value;
-                byte[] columnName = columnNameAsByteArray(columnNameString, columnFamily);
+                ByteBuffer value;
+                ByteBuffer columnName = columnNameAsByteArray(columnNameString, columnFamily);
 
                 if (valueTree.getType() == CliParser.FUNCTION_CALL)
                 {
@@ -894,8 +946,8 @@ public class CliClient 
         }
 
 
-        byte[] columnNameInBytes = columnNameAsByteArray(columnName, columnFamily);
-        byte[] columnValueInBytes;
+        ByteBuffer columnNameInBytes = columnNameAsByteArray(columnName, columnFamily);
+        ByteBuffer columnValueInBytes;
 
         switch (valueTree.getType())
         {
@@ -906,8 +958,12 @@ public class CliClient 
             columnValueInBytes = columnValueAsByteArray(columnNameInBytes, columnFamily, value);
         }
 
+        ColumnParent parent = new ColumnParent(columnFamily);
+        if(superColumnName != null)
+            parent.setSuper_column(superColumnName);
+        
         // do the insert
-        thriftClient_.insert(key.getBytes(), new ColumnParent(columnFamily).setSuper_column(superColumnName),
+        thriftClient_.insert(ByteBuffer.wrap(key.getBytes("UTF-8")), parent,
                              new Column(columnNameInBytes, columnValueInBytes, FBUtilities.timestampMicros()), ConsistencyLevel.ONE);
         
         css_.out.println("Value inserted.");
@@ -1275,6 +1331,7 @@ public class CliClient 
 
         if (limitCount < keySlices.size())
         {
+
             // limitCount could be Integer.MAX_VALUE
             toIndex = limitCount;
         }
@@ -1452,7 +1509,7 @@ public class CliClient 
                     css_.out.println(leftSpace + "Column Metadata:");
                     for (ColumnDef columnDef : cf_def.getColumn_metadata())
                     {
-                        String columnName = columnNameValidator.getString(columnDef.getName());
+                        String columnName = columnNameValidator.getString(columnDef.name);
 
                         css_.out.println(leftSpace + "  Column Name: " + columnName);
                         css_.out.println(columnLeftSpace + "Validation Class: " + columnDef.getValidation_class());
@@ -1656,7 +1713,7 @@ public class CliClient 
      * @return byte[] - object in the byte array representation
      * @throws UnsupportedEncodingException - raised but String.getBytes(encoding)
      */
-    private byte[] getBytesAccordingToType(String object, AbstractType comparator) throws UnsupportedEncodingException
+    private ByteBuffer getBytesAccordingToType(String object, AbstractType comparator) throws UnsupportedEncodingException
     {
         if (comparator instanceof LongType)
         {
@@ -1679,7 +1736,7 @@ public class CliClient 
             if (comparator instanceof TimeUUIDType && uuid.version() != 1)
                 throw new IllegalArgumentException("TimeUUID supports only version 1 UUIDs");    
 
-            return UUIDGen.decompose(uuid);    
+            return ByteBuffer.wrap(UUIDGen.decompose(uuid));    
         }
         else if (comparator instanceof IntegerType)
         {
@@ -1694,15 +1751,15 @@ public class CliClient 
                 throw new RuntimeException("'" + object + "' could not be translated into an IntegerType.");
             }
 
-            return integerType.toByteArray();
+            return ByteBuffer.wrap(integerType.toByteArray());
         }
         else if (comparator instanceof AsciiType)
         {
-            return object.getBytes("US-ASCII");
+            return ByteBuffer.wrap(object.getBytes("US-ASCII"));
         }
         else
         {
-            return object.getBytes("UTF-8");
+            return ByteBuffer.wrap(object.getBytes("UTF-8"));
         }
     }
     
@@ -1716,7 +1773,7 @@ public class CliClient 
      * @throws IllegalAccessException - raised from getFormatTypeForColumn call
      * @throws UnsupportedEncodingException - raised from getBytes() calls
      */
-    private byte[] columnNameAsByteArray(String column, String columnFamily) throws NoSuchFieldException, InstantiationException, IllegalAccessException, UnsupportedEncodingException
+    private ByteBuffer columnNameAsByteArray(String column, String columnFamily) throws NoSuchFieldException, InstantiationException, IllegalAccessException, UnsupportedEncodingException
     {
         CfDef columnFamilyDef   = getCfDef(columnFamily);
         String comparatorClass  = columnFamilyDef.comparator_type;
@@ -1731,7 +1788,7 @@ public class CliClient 
      * @param columnValue - actual column value
      * @return byte[] - value in byte array representation
      */
-    private byte[] columnValueAsByteArray(byte[] columnName, String columnFamilyName, String columnValue)
+    private ByteBuffer columnValueAsByteArray(ByteBuffer columnName, String columnFamilyName, String columnValue)
     {
         CfDef columnFamilyDef = getCfDef(columnFamilyName);
         
@@ -1739,7 +1796,7 @@ public class CliClient 
         {
             byte[] currentColumnName = columnDefinition.getName();
 
-            if (Arrays.equals(currentColumnName, columnName))
+            if (ByteBufferUtil.compare(currentColumnName,columnName)==0)
             {
                 try
                 {
@@ -1754,7 +1811,7 @@ public class CliClient 
         }
 
         // if no validation were set returning simple .getBytes()
-        return columnValue.getBytes();
+        return ByteBuffer.wrap(columnValue.getBytes());
     }
 
     /**
@@ -1771,7 +1828,7 @@ public class CliClient 
         {
             byte[] nameInBytes = columnDefinition.getName();
 
-            if (Arrays.equals(nameInBytes, columnNameInBytes))
+            if (nameInBytes.equals(columnNameInBytes))
             {
                 return getFormatTypeForColumn(columnDefinition.getValidation_class());
             }
@@ -1825,7 +1882,7 @@ public class CliClient 
      * @param columnName   - also updates column family metadata for given column
      * @return byte[] - string value as byte[] 
      */
-    private byte[] convertValueByFunction(Tree functionCall, CfDef columnFamily, byte[] columnName)
+    private ByteBuffer convertValueByFunction(Tree functionCall, CfDef columnFamily, ByteBuffer columnName)
     {
         return convertValueByFunction(functionCall, columnFamily, columnName, false);
     }
@@ -1838,7 +1895,7 @@ public class CliClient 
      * @param withUpdate   - also updates column family metadata for given column
      * @return byte[] - string value as byte[]
      */
-    private byte[] convertValueByFunction(Tree functionCall, CfDef columnFamily, byte[] columnName, boolean withUpdate)
+    private ByteBuffer convertValueByFunction(Tree functionCall, CfDef columnFamily, ByteBuffer columnName, boolean withUpdate)
     {
         String functionName = functionCall.getChild(0).getText();
         String functionArg  = CliUtils.unescapeSQLString(functionCall.getChild(1).getText());
@@ -1858,7 +1915,7 @@ public class CliClient 
         try
         {
             AbstractType validator = function.getValidator();
-            byte[] value = getBytesAccordingToType(functionArg, validator);
+            ByteBuffer value = getBytesAccordingToType(functionArg, validator);
 
             // performing ColumnDef local validator update
             if (withUpdate)
@@ -1880,7 +1937,7 @@ public class CliClient 
      * @param columnName      - column name represented as byte[]
      * @param validationClass - value validation class
      */
-    private void updateColumnMetaData(CfDef columnFamily, byte[] columnName, String validationClass)
+    private void updateColumnMetaData(CfDef columnFamily, ByteBuffer columnName, String validationClass)
     {
         List<ColumnDef> columnMetaData = columnFamily.getColumn_metadata();
         ColumnDef column = getColumnDefByName(columnFamily, columnName);
@@ -1906,13 +1963,13 @@ public class CliClient 
      * @param columnName   - column name represented as byte[]
      * @return ColumnDef   - found column definition
      */
-    private ColumnDef getColumnDefByName(CfDef columnFamily, byte[] columnName)
+    private ColumnDef getColumnDefByName(CfDef columnFamily, ByteBuffer columnName)
     {
         for (ColumnDef columnDef : columnFamily.getColumn_metadata())
         {
             byte[] currName = columnDef.getName();
 
-            if (Arrays.equals(currName, columnName))
+            if (ByteBufferUtil.compare(currName, columnName) == 0)
             {
                 return columnDef;
             }
@@ -1941,7 +1998,7 @@ public class CliClient 
         for (KeySlice ks : slices)
         {
             css_.out.printf("-------------------\n");
-            css_.out.printf("RowKey: %s\n", new String(ks.key, "UTF-8"));
+            css_.out.printf("RowKey: %s\n", new String(ks.key.array(),ks.key.position(),ks.key.remaining(), "UTF-8"));
 
             Iterator<ColumnOrSuperColumn> iterator = ks.getColumnsIterator();
 

Modified: cassandra/trunk/src/java/org/apache/cassandra/client/RingCache.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/client/RingCache.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/client/RingCache.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/client/RingCache.java Fri Oct 22 03:23:26 2010
@@ -17,30 +17,29 @@
  */
 package org.apache.cassandra.client;
 
-import java.util.*;
-
-import org.apache.cassandra.dht.IPartitioner;
-import org.apache.cassandra.dht.Range;
-import org.apache.cassandra.dht.Token;
-import org.apache.cassandra.locator.AbstractReplicationStrategy;
-import org.apache.cassandra.locator.TokenMetadata;
-
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 
+import org.apache.cassandra.dht.IPartitioner;
+import org.apache.cassandra.dht.Range;
+import org.apache.cassandra.dht.Token;
 import org.apache.cassandra.thrift.Cassandra;
 import org.apache.cassandra.thrift.InvalidRequestException;
 import org.apache.cassandra.thrift.TokenRange;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.transport.TFramedTransport;
 import org.apache.thrift.transport.TSocket;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Multimap;
 import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
 
 /**
  * A class for caching the ring map at the client. For usage example, see
@@ -120,12 +119,12 @@ public class RingCache
         return (List<InetAddress>) rangeMap.get(range);
     }
 
-    public List<InetAddress> getEndpoint(byte[] key)
+    public List<InetAddress> getEndpoint(ByteBuffer key)
     {
         return getEndpoint(getRange(key));
     }
 
-    public Range getRange(byte[] key)
+    public Range getRange(ByteBuffer key)
     {
         // TODO: naive linear search of the token map
         Token<?> t = partitioner_.getToken(key);

Modified: cassandra/trunk/src/java/org/apache/cassandra/config/CFMetaData.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/config/CFMetaData.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/config/CFMetaData.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/config/CFMetaData.java Fri Oct 22 03:23:26 2010
@@ -19,18 +19,20 @@
 package org.apache.cassandra.config;
 
 import java.nio.ByteBuffer;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import com.google.common.collect.BiMap;
-import com.google.common.collect.HashBiMap;
-import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.commons.lang.builder.ToStringBuilder;
-
 import org.apache.avro.util.Utf8;
 import org.apache.cassandra.avro.ColumnDef;
-import org.apache.cassandra.db.*;
+import org.apache.cassandra.db.ColumnFamilyType;
+import org.apache.cassandra.db.HintedHandOffManager;
+import org.apache.cassandra.db.SystemTable;
+import org.apache.cassandra.db.Table;
 import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.db.marshal.BytesType;
 import org.apache.cassandra.db.marshal.TimeUUIDType;
@@ -39,6 +41,12 @@ import org.apache.cassandra.db.migration
 import org.apache.cassandra.io.SerDeUtils;
 import org.apache.cassandra.utils.FBUtilities;
 import org.apache.cassandra.utils.Pair;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.commons.lang.builder.ToStringBuilder;
+
+import com.google.common.collect.BiMap;
+import com.google.common.collect.HashBiMap;
 
 
 public final class CFMetaData
@@ -90,7 +98,7 @@ public final class CFMetaData
                               DEFAULT_MEMTABLE_THROUGHPUT_IN_MB,
                               DEFAULT_MEMTABLE_OPERATIONS_IN_MILLIONS,
                               cfId,
-                              Collections.<byte[], ColumnDefinition>emptyMap());
+                              Collections.<ByteBuffer, ColumnDefinition>emptyMap());
     }
 
     /**
@@ -141,7 +149,7 @@ public final class CFMetaData
     public final double memtableOperationsInMillions; // default based on throughput
     // NOTE: if you find yourself adding members to this class, make sure you keep the convert methods in lockstep.
 
-    public final Map<byte[], ColumnDefinition> column_metadata;
+    public final Map<ByteBuffer, ColumnDefinition> column_metadata;
 
     private CFMetaData(String tableName,
                        String cfName,
@@ -163,7 +171,7 @@ public final class CFMetaData
                        Integer memtableThroughputInMb,
                        Double memtableOperationsInMillions,
                        Integer cfId,
-                       Map<byte[], ColumnDefinition> column_metadata)
+                       Map<ByteBuffer, ColumnDefinition> column_metadata)
 
     {
         assert column_metadata != null;
@@ -230,7 +238,7 @@ public final class CFMetaData
                       Integer memSize,
                       Double memOps,
                       //This constructor generates the id!
-                      Map<byte[], ColumnDefinition> column_metadata)
+                      Map<ByteBuffer, ColumnDefinition> column_metadata)
     {
         this(tableName,
              cfName,
@@ -276,7 +284,7 @@ public final class CFMetaData
                               DEFAULT_MEMTABLE_LIFETIME_IN_MINS,
                               DEFAULT_MEMTABLE_THROUGHPUT_IN_MB,
                               DEFAULT_MEMTABLE_OPERATIONS_IN_MILLIONS,
-                              Collections.<byte[], ColumnDefinition>emptyMap());
+                              Collections.<ByteBuffer, ColumnDefinition>emptyMap());
     }
 
     /** clones an existing CFMetaData using the same id. */
@@ -386,7 +394,7 @@ public final class CFMetaData
         {
             throw new RuntimeException("Could not inflate CFMetaData for " + cf, ex);
         }
-        Map<byte[], ColumnDefinition> column_metadata = new TreeMap<byte[], ColumnDefinition>(FBUtilities.byteArrayComparator);
+        Map<ByteBuffer, ColumnDefinition> column_metadata = new TreeMap<ByteBuffer, ColumnDefinition>(BytesType.instance);
         for (ColumnDef aColumn_metadata : cf.column_metadata)
         {
             ColumnDefinition cd = ColumnDefinition.inflate(aColumn_metadata);
@@ -491,7 +499,7 @@ public final class CFMetaData
         return idGen.getAndIncrement();
     }
 
-    public AbstractType getValueValidator(byte[] column)
+    public AbstractType getValueValidator(ByteBuffer column)
     {
         AbstractType validator = defaultValidator;
         ColumnDefinition columnDefinition = column_metadata.get(column);
@@ -574,7 +582,7 @@ public final class CFMetaData
         validateMinMaxCompactionThresholds(cf_def);
         validateMemtableSettings(cf_def);
 
-        Map<byte[], ColumnDefinition> metadata = new HashMap<byte[], ColumnDefinition>();
+        Map<ByteBuffer, ColumnDefinition> metadata = new HashMap<ByteBuffer, ColumnDefinition>();
         if (cf_def.column_metadata == null)
         {
             metadata = column_metadata;
@@ -685,7 +693,7 @@ public final class CFMetaData
             org.apache.cassandra.avro.ColumnDef tcd = new org.apache.cassandra.avro.ColumnDef();
             tcd.index_name = cd.index_name;
             tcd.index_type = org.apache.cassandra.avro.IndexType.valueOf(cd.index_type.name());
-            tcd.name = ByteBuffer.wrap(cd.name);
+            tcd.name = cd.name;
             tcd.validation_class = cd.validator.getClass().getName();
             column_meta.add(tcd);
         }

Modified: cassandra/trunk/src/java/org/apache/cassandra/config/ColumnDefinition.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/config/ColumnDefinition.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/config/ColumnDefinition.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/config/ColumnDefinition.java Fri Oct 22 03:23:26 2010
@@ -22,24 +22,24 @@ package org.apache.cassandra.config;
 
 
 import java.nio.ByteBuffer;
-import java.util.*;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
 
 import org.apache.avro.util.Utf8;
-import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.commons.lang.builder.HashCodeBuilder;
-
 import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.thrift.ColumnDef;
 import org.apache.cassandra.thrift.IndexType;
 import org.apache.cassandra.utils.FBUtilities;
 
 public class ColumnDefinition {
-    public final byte[] name;
+    public final ByteBuffer name;
     public final AbstractType validator;
     public final IndexType index_type;
     public final String index_name;
 
-    public ColumnDefinition(byte[] name, String validation_class, IndexType index_type, String index_name) throws ConfigurationException
+    public ColumnDefinition(ByteBuffer name, String validation_class, IndexType index_type, String index_name) throws ConfigurationException
     {
         this.name = name;
         this.index_type = index_type;
@@ -60,7 +60,7 @@ public class ColumnDefinition {
             return false;
         if (index_type != null ? !index_type.equals(that.index_type) : that.index_type != null)
             return false;
-        if (!Arrays.equals(name, that.name))
+        if (!name.equals(that.name))
             return false;
         return !(validator != null ? !validator.equals(that.validator) : that.validator != null);
     }
@@ -68,7 +68,7 @@ public class ColumnDefinition {
     @Override
     public int hashCode()
     {
-        int result = name != null ? Arrays.hashCode(name) : 0;
+        int result = name != null ? name.hashCode() : 0;
         result = 31 * result + (validator != null ? validator.hashCode() : 0);
         result = 31 * result + (index_type != null ? index_type.hashCode() : 0);
         result = 31 * result + (index_name != null ? index_name.hashCode() : 0);
@@ -78,7 +78,7 @@ public class ColumnDefinition {
     public org.apache.cassandra.avro.ColumnDef deflate()
     {
         org.apache.cassandra.avro.ColumnDef cd = new org.apache.cassandra.avro.ColumnDef();
-        cd.name = ByteBuffer.wrap(name);
+        cd.name = name;
         cd.validation_class = new Utf8(validator.getClass().getName());
         cd.index_type = index_type == null ? null :
             Enum.valueOf(org.apache.cassandra.avro.IndexType.class, index_type.name());
@@ -88,14 +88,12 @@ public class ColumnDefinition {
 
     public static ColumnDefinition inflate(org.apache.cassandra.avro.ColumnDef cd)
     {
-        byte[] name = new byte[cd.name.remaining()];
-        cd.name.get(name, 0, name.length);
         IndexType index_type = cd.index_type == null ? null :
             Enum.valueOf(IndexType.class, cd.index_type.name());
         String index_name = cd.index_name == null ? null : cd.index_name.toString();
         try
         {
-            return new ColumnDefinition(name, cd.validation_class.toString(), index_type, index_name);
+            return new ColumnDefinition(cd.name, cd.validation_class.toString(), index_type, index_name);
         }
         catch (ConfigurationException e)
         {
@@ -110,18 +108,18 @@ public class ColumnDefinition {
     
     public static ColumnDefinition fromColumnDef(org.apache.cassandra.avro.ColumnDef cd) throws ConfigurationException
     {
-        return new ColumnDefinition(cd.name.array(),
+        return new ColumnDefinition(cd.name,
                 cd.validation_class.toString(),
                 IndexType.valueOf(cd.index_type == null ? org.apache.cassandra.avro.CassandraServer.D_COLDEF_INDEXTYPE : cd.index_type.name()),
                 cd.index_name == null ? org.apache.cassandra.avro.CassandraServer.D_COLDEF_INDEXNAME : cd.index_name.toString());
     }
 
-    public static Map<byte[], ColumnDefinition> fromColumnDef(List<ColumnDef> thriftDefs) throws ConfigurationException
+    public static Map<ByteBuffer, ColumnDefinition> fromColumnDef(List<ColumnDef> thriftDefs) throws ConfigurationException
     {
         if (thriftDefs == null)
             return Collections.emptyMap();
 
-        Map<byte[], ColumnDefinition> cds = new TreeMap<byte[], ColumnDefinition>(FBUtilities.byteArrayComparator);
+        Map<ByteBuffer, ColumnDefinition> cds = new TreeMap<ByteBuffer, ColumnDefinition>();
         for (ColumnDef thriftColumnDef : thriftDefs)
         {
             cds.put(thriftColumnDef.name, fromColumnDef(thriftColumnDef));
@@ -130,15 +128,15 @@ public class ColumnDefinition {
         return Collections.unmodifiableMap(cds);
     }
     
-    public static Map<byte[], ColumnDefinition> fromColumnDefs(Iterable<org.apache.cassandra.avro.ColumnDef> avroDefs) throws ConfigurationException
+    public static Map<ByteBuffer, ColumnDefinition> fromColumnDefs(Iterable<org.apache.cassandra.avro.ColumnDef> avroDefs) throws ConfigurationException
     {
         if (avroDefs == null)
             return Collections.emptyMap();
 
-        Map<byte[], ColumnDefinition> cds = new TreeMap<byte[], ColumnDefinition>(FBUtilities.byteArrayComparator);
+        Map<ByteBuffer, ColumnDefinition> cds = new TreeMap<ByteBuffer, ColumnDefinition>();
         for (org.apache.cassandra.avro.ColumnDef avroColumnDef : avroDefs)
         {
-            cds.put(avroColumnDef.name.array(), fromColumnDef(avroColumnDef));
+            cds.put(avroColumnDef.name, fromColumnDef(avroColumnDef));
         }
 
         return Collections.unmodifiableMap(cds);

Modified: cassandra/trunk/src/java/org/apache/cassandra/config/DatabaseDescriptor.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/config/DatabaseDescriptor.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/config/DatabaseDescriptor.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/config/DatabaseDescriptor.java Fri Oct 22 03:23:26 2010
@@ -18,16 +18,27 @@
 
 package org.apache.cassandra.config;
 
-import java.io.*;
-import java.lang.reflect.Field;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOError;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
 import java.net.InetAddress;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.net.UnknownHostException;
-import java.util.*;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.UUID;
 
 import org.apache.cassandra.auth.AllowAllAuthenticator;
 import org.apache.cassandra.auth.AllowAllAuthority;
@@ -43,12 +54,18 @@ import org.apache.cassandra.db.migration
 import org.apache.cassandra.dht.IPartitioner;
 import org.apache.cassandra.io.sstable.Descriptor;
 import org.apache.cassandra.io.util.FileUtils;
-import org.apache.cassandra.locator.*;
+import org.apache.cassandra.locator.AbstractReplicationStrategy;
+import org.apache.cassandra.locator.DynamicEndpointSnitch;
+import org.apache.cassandra.locator.EndpointSnitchInfo;
+import org.apache.cassandra.locator.IEndpointSnitch;
+import org.apache.cassandra.locator.LocalStrategy;
 import org.apache.cassandra.scheduler.IRequestScheduler;
 import org.apache.cassandra.scheduler.NoScheduler;
 import org.apache.cassandra.service.StorageService;
 import org.apache.cassandra.utils.FBUtilities;
 import org.apache.cassandra.utils.Pair;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.yaml.snakeyaml.Loader;
 import org.yaml.snakeyaml.TypeDescription;
 import org.yaml.snakeyaml.Yaml;
@@ -574,13 +591,13 @@ public class    DatabaseDescriptor
                     throw new ConfigurationException("memtable_operations_in_millions must be a positive double");
                 }
 
-                 Map<byte[], ColumnDefinition> metadata = new TreeMap<byte[], ColumnDefinition>(FBUtilities.byteArrayComparator);
+                 Map<ByteBuffer, ColumnDefinition> metadata = new TreeMap<ByteBuffer, ColumnDefinition>();
 
                 for (RawColumnDefinition rcd : cf.column_metadata)
                 {
                     try
                     {
-                        byte[] columnName = rcd.name.getBytes("UTF-8");
+                        ByteBuffer columnName = ByteBuffer.wrap(rcd.name.getBytes("UTF-8"));
                         metadata.put(columnName, new ColumnDefinition(columnName, rcd.validator_class, rcd.index_type, rcd.index_name));
                     }
                     catch (UnsupportedEncodingException e)
@@ -1069,7 +1086,7 @@ public class    DatabaseDescriptor
         return conf.hinted_handoff_enabled;
     }
 
-    public static AbstractType getValueValidator(String keyspace, String cf, byte[] column)
+    public static AbstractType getValueValidator(String keyspace, String cf, ByteBuffer column)
     {
         return getCFMetaData(keyspace, cf).getValueValidator(column);
     }

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/BinaryMemtable.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/BinaryMemtable.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/BinaryMemtable.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/BinaryMemtable.java Fri Oct 22 03:23:26 2010
@@ -19,7 +19,11 @@
 package org.apache.cassandra.db;
 
 import java.io.IOException;
-import java.util.*;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -27,16 +31,15 @@ import java.util.concurrent.locks.Condit
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.cassandra.config.DatabaseDescriptor;
 import org.apache.cassandra.dht.IPartitioner;
-import org.apache.cassandra.io.sstable.SSTableWriter;
 import org.apache.cassandra.io.sstable.SSTableReader;
+import org.apache.cassandra.io.sstable.SSTableWriter;
 import org.apache.cassandra.service.StorageService;
 import org.apache.cassandra.utils.WrappedRunnable;
 import org.cliffc.high_scale_lib.NonBlockingHashMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class BinaryMemtable implements IFlushable
 {
@@ -46,7 +49,7 @@ public class BinaryMemtable implements I
 
     /* Table and ColumnFamily name are used to determine the ColumnFamilyStore */
     private boolean isFrozen = false;
-    private final Map<DecoratedKey, byte[]> columnFamilies = new NonBlockingHashMap<DecoratedKey, byte[]>();
+    private final Map<DecoratedKey, ByteBuffer> columnFamilies = new NonBlockingHashMap<DecoratedKey, ByteBuffer>();
     /* Lock and Condition for notifying new clients about Memtable switches */
     private final Lock lock = new ReentrantLock();
     Condition condition;
@@ -69,7 +72,7 @@ public class BinaryMemtable implements I
      * the memtable. This version will respect the threshold and flush
      * the memtable to disk when the size exceeds the threshold.
     */
-    void put(DecoratedKey key, byte[] buffer)
+    void put(DecoratedKey key, ByteBuffer buffer)
     {
         if (isThresholdViolated())
         {
@@ -103,10 +106,10 @@ public class BinaryMemtable implements I
         return columnFamilies.isEmpty();
     }
 
-    private void resolve(DecoratedKey key, byte[] buffer)
+    private void resolve(DecoratedKey key, ByteBuffer buffer)
     {
         columnFamilies.put(key, buffer);
-        currentSize.addAndGet(buffer.length + key.key.length);
+        currentSize.addAndGet(buffer.remaining() + key.key.remaining());
     }
 
     private List<DecoratedKey> getSortedKeys()
@@ -126,8 +129,8 @@ public class BinaryMemtable implements I
 
         for (DecoratedKey key : sortedKeys)
         {
-            byte[] bytes = columnFamilies.get(key);
-            assert bytes.length > 0;
+            ByteBuffer bytes = columnFamilies.get(key);
+            assert bytes.remaining() > 0;
             writer.append(key, bytes);
         }
         SSTableReader sstable = writer.closeAndOpenReader();

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/Column.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/Column.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/Column.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/Column.java Fri Oct 22 03:23:26 2010
@@ -18,18 +18,17 @@
 
 package org.apache.cassandra.db;
 
-import java.util.Arrays;
-import java.util.Collection;
-import java.security.MessageDigest;
 import java.io.IOException;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.commons.lang.ArrayUtils;
+import java.nio.ByteBuffer;
+import java.security.MessageDigest;
+import java.util.Collection;
 
 import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.io.util.DataOutputBuffer;
+import org.apache.cassandra.utils.ByteBufferUtil;
 import org.apache.cassandra.utils.FBUtilities;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -47,41 +46,41 @@ public class Column implements IColumn
         return new ColumnSerializer();
     }
 
-    protected final byte[] name;
-    protected final byte[] value;
+    protected final ByteBuffer name;
+    protected final ByteBuffer value;
     protected final long timestamp;
 
-    Column(byte[] name)
+    Column(ByteBuffer name)
     {
-        this(name, ArrayUtils.EMPTY_BYTE_ARRAY);
+        this(name, FBUtilities.EMPTY_BYTE_BUFFER);
     }
 
-    public Column(byte[] name, byte[] value)
+    public Column(ByteBuffer name, ByteBuffer value)
     {
         this(name, value, 0);
     }
 
-    public Column(byte[] name, byte[] value, long timestamp)
+    public Column(ByteBuffer name, ByteBuffer value, long timestamp)
     {
         assert name != null;
         assert value != null;
-        assert name.length <= IColumn.MAX_NAME_LENGTH;
+        assert name.remaining() <= IColumn.MAX_NAME_LENGTH;
         this.name = name;
         this.value = value;
         this.timestamp = timestamp;
     }
 
-    public byte[] name()
+    public ByteBuffer name()
     {
         return name;
     }
 
-    public Column getSubColumn(byte[] columnName)
+    public Column getSubColumn(ByteBuffer columnName)
     {
         throw new UnsupportedOperationException("This operation is unsupported on simple columns.");
     }
 
-    public byte[] value()
+    public ByteBuffer value()
     {
         return value;
     }
@@ -121,7 +120,7 @@ public class Column implements IColumn
          * + 4 bytes which basically indicates the size of the byte array
          * + entire byte array.
         */
-        return DBConstants.shortSize_ + name.length + DBConstants.boolSize_ + DBConstants.tsSize_ + DBConstants.intSize_ + value.length;
+        return DBConstants.shortSize_ + name.remaining() + DBConstants.boolSize_ + DBConstants.tsSize_ + DBConstants.intSize_ + value.remaining();
     }
 
     /*
@@ -149,8 +148,9 @@ public class Column implements IColumn
 
     public void updateDigest(MessageDigest digest)
     {
-        digest.update(name);
-        digest.update(value);
+        digest.update(name.array(),name.position()+name.arrayOffset(),name.remaining());
+        digest.update(value.array(),value.position()+name.arrayOffset(),value.remaining());        
+        
         DataOutputBuffer buffer = new DataOutputBuffer();
         try
         {
@@ -178,7 +178,7 @@ public class Column implements IColumn
             return timestamp() > column.timestamp() ? this : column;
         // break ties by comparing values.
         if (timestamp() == column.timestamp())
-            return FBUtilities.compareByteArrays(value(), column.value()) < 0 ? column : this;
+            return value().compareTo(column.value()) < 0 ? column : this;
         // neither is tombstoned and timestamps are different
         return timestamp() < column.timestamp() ? column : this;
     }
@@ -195,17 +195,17 @@ public class Column implements IColumn
 
         if (timestamp != column.timestamp)
             return false;
-        if (!Arrays.equals(name, column.name))
+        if (!name.equals(column.name))
             return false;
 
-        return Arrays.equals(value, column.value);
+        return value.equals(column.value);
     }
 
     @Override
     public int hashCode()
     {
-        int result = name != null ? Arrays.hashCode(name) : 0;
-        result = 31 * result + (value != null ? Arrays.hashCode(value) : 0);
+        int result = name != null ? name.hashCode() : 0;
+        result = 31 * result + (value != null ? value.hashCode() : 0);
         result = 31 * result + (int)(timestamp ^ (timestamp >>> 32));
         return result;
     }
@@ -217,7 +217,7 @@ public class Column implements IColumn
         sb.append(":");
         sb.append(isMarkedForDelete());
         sb.append(":");
-        sb.append(value.length);
+        sb.append(value.remaining());
         sb.append("@");
         sb.append(timestamp());
         return sb.toString();

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamily.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamily.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamily.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamily.java Fri Oct 22 03:23:26 2010
@@ -18,21 +18,23 @@
 
 package org.apache.cassandra.db;
 
-import java.util.*;
-import java.util.concurrent.ConcurrentSkipListMap;
+import java.nio.ByteBuffer;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.concurrent.ConcurrentSkipListMap;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.cassandra.config.CFMetaData;
-
 import org.apache.cassandra.config.DatabaseDescriptor;
-import org.apache.cassandra.dht.BytesToken;
-import org.apache.cassandra.dht.LocalPartitioner;
-import org.apache.cassandra.io.ICompactSerializer2;
 import org.apache.cassandra.db.filter.QueryPath;
 import org.apache.cassandra.db.marshal.AbstractType;
+import org.apache.cassandra.io.ICompactSerializer2;
 import org.apache.cassandra.io.util.IIterableColumns;
 import org.apache.cassandra.utils.FBUtilities;
 import org.slf4j.Logger;
@@ -72,13 +74,13 @@ public class ColumnFamily implements ICo
     private transient ICompactSerializer2<IColumn> columnSerializer;
     final AtomicLong markedForDeleteAt = new AtomicLong(Long.MIN_VALUE);
     final AtomicInteger localDeletionTime = new AtomicInteger(Integer.MIN_VALUE);
-    private ConcurrentSkipListMap<byte[], IColumn> columns;
-
+    private ConcurrentSkipListMap<ByteBuffer, IColumn> columns;
+    
     public ColumnFamily(ColumnFamilyType type, AbstractType comparator, AbstractType subcolumnComparator, Integer cfid)
     {
         this.type = type;
         columnSerializer = type == ColumnFamilyType.Standard ? Column.serializer() : SuperColumn.serializer(subcolumnComparator);
-        columns = new ConcurrentSkipListMap<byte[], IColumn>(comparator);
+        columns = new ConcurrentSkipListMap<ByteBuffer, IColumn>(comparator);
         this.cfid = cfid;
      }
     
@@ -151,13 +153,13 @@ public class ColumnFamily implements ICo
         return type == ColumnFamilyType.Super;
     }
 
-    public void addColumn(QueryPath path, byte[] value, long timestamp)
+    public void addColumn(QueryPath path, ByteBuffer value, long timestamp)
     {
         assert path.columnName != null : path;
         addColumn(path.superColumnName, new Column(path.columnName, value, timestamp));
     }
 
-    public void addColumn(QueryPath path, byte[] value, long timestamp, int timeToLive)
+    public void addColumn(QueryPath path, ByteBuffer value, long timestamp, int timeToLive)
     {
         assert path.columnName != null : path;
         Column column;
@@ -168,7 +170,7 @@ public class ColumnFamily implements ICo
         addColumn(path.superColumnName, column);
     }
 
-    public void addTombstone(QueryPath path, byte[] localDeletionTime, long timestamp)
+    public void addTombstone(QueryPath path, ByteBuffer localDeletionTime, long timestamp)
     {
         assert path.columnName != null : path;
         addColumn(path.superColumnName, new DeletedColumn(path.columnName, localDeletionTime, timestamp));
@@ -180,12 +182,12 @@ public class ColumnFamily implements ICo
         addColumn(path.superColumnName, new DeletedColumn(path.columnName, localDeletionTime, timestamp));
     }
 
-    public void addTombstone(byte[] name, int localDeletionTime, long timestamp)
+    public void addTombstone(ByteBuffer name, int localDeletionTime, long timestamp)
     {
         addColumn(null, new DeletedColumn(name, localDeletionTime, timestamp));
     }
 
-    public void addColumn(byte[] superColumnName, Column column)
+    public void addColumn(ByteBuffer superColumnName, Column column)
     {
         IColumn c;
         if (superColumnName == null)
@@ -212,7 +214,7 @@ public class ColumnFamily implements ICo
     */
     public void addColumn(IColumn column)
     {
-        byte[] name = column.name();
+        ByteBuffer name = column.name();
         IColumn oldColumn = columns.putIfAbsent(name, column);
         if (oldColumn != null)
         {
@@ -236,12 +238,12 @@ public class ColumnFamily implements ICo
         }
     }
 
-    public IColumn getColumn(byte[] name)
+    public IColumn getColumn(ByteBuffer name)
     {
         return columns.get(name);
     }
 
-    public SortedSet<byte[]> getColumnNames()
+    public SortedSet<ByteBuffer> getColumnNames()
     {
         return columns.keySet();
     }
@@ -256,12 +258,12 @@ public class ColumnFamily implements ICo
         return columns.descendingMap().values();
     }
 
-    public Map<byte[], IColumn> getColumnsMap()
+    public Map<ByteBuffer, IColumn> getColumnsMap()
     {
         return columns;
     }
 
-    public void remove(byte[] columnName)
+    public void remove(ByteBuffer columnName)
     {
         columns.remove(columnName);
     }
@@ -299,9 +301,9 @@ public class ColumnFamily implements ICo
         // (don't need to worry about cfNew containing IColumns that are shadowed by
         // the delete tombstone, since cfNew was generated by CF.resolve, which
         // takes care of those for us.)
-        Map<byte[], IColumn> columns = cfComposite.getColumnsMap();
-        Set<byte[]> cNames = columns.keySet();
-        for (byte[] cName : cNames)
+        Map<ByteBuffer, IColumn> columns = cfComposite.getColumnsMap();
+        Set<ByteBuffer> cNames = columns.keySet();
+        for (ByteBuffer cName : cNames)
         {
             IColumn columnInternal = this.columns.get(cName);
             IColumn columnExternal = columns.get(cName);
@@ -362,7 +364,7 @@ public class ColumnFamily implements ICo
         return sb.toString();
     }
 
-    public static byte[] digest(ColumnFamily cf)
+    public static ByteBuffer digest(ColumnFamily cf)
     {
         MessageDigest digest;
         try
@@ -376,7 +378,7 @@ public class ColumnFamily implements ICo
         if (cf != null)
             cf.updateDigest(digest);
 
-        return digest.digest();
+        return ByteBuffer.wrap(digest.digest());
     }
 
     public void updateDigest(MessageDigest digest)
@@ -395,7 +397,7 @@ public class ColumnFamily implements ICo
         return localDeletionTime.get();
     }
 
-    public static AbstractType getComparatorFor(String table, String columnFamilyName, byte[] superColumnName)
+    public static AbstractType getComparatorFor(String table, String columnFamilyName, ByteBuffer superColumnName)
     {
         return superColumnName == null
                ? DatabaseDescriptor.getComparator(table, columnFamilyName)

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamilyStore.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamilyStore.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamilyStore.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/ColumnFamilyStore.java Fri Oct 22 03:23:26 2010
@@ -18,22 +18,46 @@
 
 package org.apache.cassandra.db;
 
-import java.io.*;
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FilenameFilter;
+import java.io.IOError;
+import java.io.IOException;
+import java.io.ObjectInputStream;
 import java.lang.management.ManagementFactory;
-import java.util.*;
-import java.util.concurrent.*;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TreeSet;
+import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.concurrent.ConcurrentSkipListSet;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Pattern;
+
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
 
-import com.google.common.collect.Iterables;
-import org.apache.commons.collections.IteratorUtils;
-import org.apache.commons.lang.ArrayUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor;
 import org.apache.cassandra.concurrent.NamedThreadFactory;
 import org.apache.cassandra.concurrent.RetryingScheduledThreadPoolExecutor;
@@ -45,22 +69,46 @@ import org.apache.cassandra.db.columnite
 import org.apache.cassandra.db.columniterator.IdentityQueryFilter;
 import org.apache.cassandra.db.commitlog.CommitLog;
 import org.apache.cassandra.db.commitlog.CommitLogSegment;
-import org.apache.cassandra.db.filter.*;
+import org.apache.cassandra.db.filter.IFilter;
+import org.apache.cassandra.db.filter.NamesQueryFilter;
+import org.apache.cassandra.db.filter.QueryFilter;
+import org.apache.cassandra.db.filter.QueryPath;
+import org.apache.cassandra.db.filter.SliceQueryFilter;
 import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.db.marshal.BytesType;
 import org.apache.cassandra.db.marshal.LocalByPartionerType;
-import org.apache.cassandra.dht.*;
-import org.apache.cassandra.io.sstable.*;
+import org.apache.cassandra.dht.AbstractBounds;
+import org.apache.cassandra.dht.Bounds;
+import org.apache.cassandra.dht.ByteOrderedPartitioner;
+import org.apache.cassandra.dht.IPartitioner;
+import org.apache.cassandra.dht.LocalPartitioner;
+import org.apache.cassandra.dht.LocalToken;
+import org.apache.cassandra.dht.OrderPreservingPartitioner;
+import org.apache.cassandra.dht.Range;
+import org.apache.cassandra.dht.Token;
+import org.apache.cassandra.io.sstable.Component;
+import org.apache.cassandra.io.sstable.Descriptor;
+import org.apache.cassandra.io.sstable.ReducingKeyIterator;
+import org.apache.cassandra.io.sstable.SSTable;
+import org.apache.cassandra.io.sstable.SSTableReader;
+import org.apache.cassandra.io.sstable.SSTableTracker;
 import org.apache.cassandra.io.util.FileUtils;
 import org.apache.cassandra.service.StorageService;
 import org.apache.cassandra.thrift.IndexClause;
 import org.apache.cassandra.thrift.IndexExpression;
 import org.apache.cassandra.thrift.IndexOperator;
+import org.apache.cassandra.utils.ByteBufferUtil;
 import org.apache.cassandra.utils.EstimatedHistogram;
 import org.apache.cassandra.utils.FBUtilities;
 import org.apache.cassandra.utils.LatencyTracker;
 import org.apache.cassandra.utils.Pair;
 import org.apache.cassandra.utils.WrappedRunnable;
+import org.apache.commons.collections.IteratorUtils;
+import org.apache.commons.lang.ArrayUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Iterables;
 
 public class ColumnFamilyStore implements ColumnFamilyStoreMBean
 {
@@ -117,7 +165,7 @@ public class ColumnFamilyStore implement
     /* active memtable associated with this ColumnFamilyStore. */
     private Memtable memtable;
 
-    private final SortedMap<byte[], ColumnFamilyStore> indexedColumns;
+    private final SortedMap<ByteBuffer, ColumnFamilyStore> indexedColumns;
 
     // TODO binarymemtable ops are not threadsafe (do they need to be?)
     private AtomicReference<BinaryMemtable> binaryMemtable;
@@ -197,7 +245,7 @@ public class ColumnFamilyStore implement
         ssTables.add(sstables);
 
         // create the private ColumnFamilyStores for the secondary column indexes
-        indexedColumns = new ConcurrentSkipListMap<byte[], ColumnFamilyStore>(getComparator());
+        indexedColumns = new ConcurrentSkipListMap<ByteBuffer, ColumnFamilyStore>(getComparator());
         for (ColumnDefinition info : metadata.column_metadata.values())
         {
             if (info.index_type != null)
@@ -236,7 +284,7 @@ public class ColumnFamilyStore implement
                     int size = in.readInt();
                     byte[] bytes = new byte[size];
                     in.readFully(bytes);
-                    keys.add(StorageService.getPartitioner().decorateKey(bytes));
+                    keys.add(StorageService.getPartitioner().decorateKey(ByteBuffer.wrap(bytes)));
                 }
                 in.close();
                 if (logger.isDebugEnabled())
@@ -287,7 +335,7 @@ public class ColumnFamilyStore implement
         }
     }
 
-    public void buildSecondaryIndexes(Collection<SSTableReader> sstables, SortedSet<byte[]> columns)
+    public void buildSecondaryIndexes(Collection<SSTableReader> sstables, SortedSet<ByteBuffer> columns)
     {
         logger.debug("Submitting index build to compactionmanager");
         Table.IndexBuilder builder = table.createIndexBuilder(this, columns, new ReducingKeyIterator(sstables));
@@ -295,7 +343,7 @@ public class ColumnFamilyStore implement
         try
         {
             future.get();
-            for (byte[] column : columns)
+            for (ByteBuffer column : columns)
                 getIndexedColumnFamilyStore(column).forceBlockingFlush();
         }
         catch (InterruptedException e)
@@ -621,7 +669,7 @@ public class ColumnFamilyStore implement
         }
     }
 
-    void switchBinaryMemtable(DecoratedKey key, byte[] buffer)
+    void switchBinaryMemtable(DecoratedKey key, ByteBuffer buffer)
     {
         binaryMemtable.set(new BinaryMemtable(this));
         binaryMemtable.get().put(key, buffer);
@@ -682,7 +730,7 @@ public class ColumnFamilyStore implement
      * needs to be used. param @ key - key for update/insert param @
      * columnFamily - columnFamily changes
      */
-    void applyBinary(DecoratedKey key, byte[] buffer)
+    void applyBinary(DecoratedKey key, ByteBuffer buffer)
     {
         long start = System.nanoTime();
         binaryMemtable.get().put(key, buffer);
@@ -725,9 +773,9 @@ public class ColumnFamilyStore implement
 
     private static void removeDeletedStandard(ColumnFamily cf, int gcBefore)
     {
-        for (Map.Entry<byte[], IColumn> entry : cf.getColumnsMap().entrySet())
+        for (Map.Entry<ByteBuffer, IColumn> entry : cf.getColumnsMap().entrySet())
         {
-            byte[] cname = entry.getKey();
+            ByteBuffer cname = entry.getKey();
             IColumn c = entry.getValue();
             // remove columns if
             // (a) the column itself is tombstoned or
@@ -746,7 +794,7 @@ public class ColumnFamilyStore implement
         // TODO assume deletion means "most are deleted?" and add to clone, instead of remove from original?
         // this could be improved by having compaction, or possibly even removeDeleted, r/m the tombstone
         // once gcBefore has passed, so if new stuff is added in it doesn't used the wrong algorithm forever
-        for (Map.Entry<byte[], IColumn> entry : cf.getColumnsMap().entrySet())
+        for (Map.Entry<ByteBuffer, IColumn> entry : cf.getColumnsMap().entrySet())
         {
             SuperColumn c = (SuperColumn) entry.getValue();
             long minTimestamp = Math.max(c.getMarkedForDeleteAt(), cf.getMarkedForDeleteAt());
@@ -988,7 +1036,7 @@ public class ColumnFamilyStore implement
         return writeStats.getRecentLatencyHistogramMicros();
     }
 
-    public ColumnFamily getColumnFamily(DecoratedKey key, QueryPath path, byte[] start, byte[] finish, boolean reversed, int limit)
+    public ColumnFamily getColumnFamily(DecoratedKey key, QueryPath path, ByteBuffer start, ByteBuffer finish, boolean reversed, int limit)
     {
         return getColumnFamily(QueryFilter.getSliceFilter(key, path, start, finish, reversed, limit));
     }
@@ -1031,6 +1079,7 @@ public class ColumnFamilyStore implement
             if (ssTables.getRowCache().getCapacity() == 0)
             {
                 ColumnFamily cf = getTopLevelColumns(filter, gcBefore);
+                         
                 // TODO this is necessary because when we collate supercolumns together, we don't check
                 // their subcolumns for relevance, so we need to do a second prune post facto here.
                 return cf.isSuper() ? removeDeleted(cf, gcBefore) : removeDeletedCF(cf, gcBefore);
@@ -1039,7 +1088,7 @@ public class ColumnFamilyStore implement
             ColumnFamily cached = cacheRow(filter.key);
             if (cached == null)
                 return null;
-
+ 
             return filterColumnFamily(cached, filter, gcBefore);
         }
         finally
@@ -1062,7 +1111,7 @@ public class ColumnFamilyStore implement
         if (filter.filter instanceof SliceQueryFilter)
         {
             SliceQueryFilter sliceFilter = (SliceQueryFilter) filter.filter;
-            if (sliceFilter.start.length == 0 && sliceFilter.finish.length == 0)
+            if (sliceFilter.start.remaining() == 0 && sliceFilter.finish.remaining() == 0)
             {
                 if (cached.isSuper() && filter.path.superColumnName != null)
                 {
@@ -1081,7 +1130,7 @@ public class ColumnFamilyStore implement
                     // top-level columns
                     if (sliceFilter.count >= cached.getColumnCount())
                     {
-                        removeDeletedColumnsOnly(cached, gcBefore);
+                        removeDeletedColumnsOnly(cached, gcBefore);                    
                         return removeDeletedCF(cached, gcBefore);
                     }
                 }
@@ -1118,6 +1167,7 @@ public class ColumnFamilyStore implement
             if (iter != null)
             {
                 returnCF.delete(iter.getColumnFamily());
+                    
                 iterators.add(iter);
             }
 
@@ -1149,7 +1199,11 @@ public class ColumnFamilyStore implement
 
             Comparator<IColumn> comparator = filter.filter.getColumnComparator(getComparator());
             Iterator collated = IteratorUtils.collatedIterator(comparator, iterators);
+          
+                     
             filter.collectCollatedColumns(returnCF, collated, gcBefore);
+          
+            
             // Caller is responsible for final removeDeletedCF.  This is important for cacheRow to work correctly:
             // we need to distinguish between "there is no data at all for this row" (BF will let us rebuild that efficiently)
             // and "there used to be data, but it's gone now" (we should cache the empty CF so we don't need to rebuild that slower)
@@ -1185,7 +1239,7 @@ public class ColumnFamilyStore implement
       * @param columnFilter description of the columns we're interested in for each row
       * @return true if we found all keys we were looking for, otherwise false
      */
-    public List<Row> getRangeSlice(byte[] superColumn, final AbstractBounds range, int maxResults, IFilter columnFilter)
+    public List<Row> getRangeSlice(ByteBuffer superColumn, final AbstractBounds range, int maxResults, IFilter columnFilter)
     throws ExecutionException, InterruptedException
     {
         assert range instanceof Bounds
@@ -1193,8 +1247,8 @@ public class ColumnFamilyStore implement
                : range;
 
         List<Row> rows = new ArrayList<Row>();
-        DecoratedKey startWith = new DecoratedKey(range.left, (byte[])null);
-        DecoratedKey stopAt = new DecoratedKey(range.right, (byte[])null);
+        DecoratedKey startWith = new DecoratedKey(range.left, null);
+        DecoratedKey stopAt = new DecoratedKey(range.right, null);
 
         QueryFilter filter = new QueryFilter(null, new QueryPath(columnFamily, superColumn, null), columnFilter);
         Collection<Memtable> memtables = new ArrayList<Memtable>();
@@ -1268,14 +1322,14 @@ public class ColumnFamilyStore implement
                 // otherwise, create an extraFilter to fetch by name the columns referenced by the additional expressions.
                 if (getMaxRowSize() < DatabaseDescriptor.getColumnIndexSize())
                 {
-                    firstFilter = new SliceQueryFilter(ArrayUtils.EMPTY_BYTE_ARRAY,
-                                                       ArrayUtils.EMPTY_BYTE_ARRAY,
+                    firstFilter = new SliceQueryFilter(FBUtilities.EMPTY_BYTE_BUFFER,
+                                                       FBUtilities.EMPTY_BYTE_BUFFER,
                                                        ((SliceQueryFilter) dataFilter).reversed,
                                                        Integer.MAX_VALUE);
                 }
                 else
                 {
-                    SortedSet<byte[]> columns = new TreeSet<byte[]>(getComparator());
+                    SortedSet<ByteBuffer> columns = new TreeSet<ByteBuffer>(getComparator());
                     for (IndexExpression expr : clause.expressions)
                     {
                         if (expr == primary)
@@ -1289,7 +1343,7 @@ public class ColumnFamilyStore implement
             {
                 // just add in columns that are not part of the resultset
                 assert dataFilter instanceof NamesQueryFilter;
-                SortedSet<byte[]> columns = new TreeSet<byte[]>(getComparator());
+                SortedSet<ByteBuffer> columns = new TreeSet<ByteBuffer>(getComparator());
                 for (IndexExpression expr : clause.expressions)
                 {
                     if (expr == primary || ((NamesQueryFilter) dataFilter).columns.contains(expr.column_name))
@@ -1305,7 +1359,7 @@ public class ColumnFamilyStore implement
         }
 
         List<Row> rows = new ArrayList<Row>();
-        byte[] startKey = clause.start_key;
+        ByteBuffer startKey = clause.start_key;
         QueryPath path = new QueryPath(columnFamily);
 
         // fetch row keys matching the primary expression, fetch the slice predicate for each
@@ -1320,14 +1374,14 @@ public class ColumnFamilyStore implement
             QueryFilter indexFilter = QueryFilter.getSliceFilter(indexKey,
                                                                  new QueryPath(indexCFS.getColumnFamilyName()),
                                                                  startKey,
-                                                                 ArrayUtils.EMPTY_BYTE_ARRAY,
+                                                                 FBUtilities.EMPTY_BYTE_BUFFER,
                                                                  false,
                                                                  clause.count);
             ColumnFamily indexRow = indexCFS.getColumnFamily(indexFilter);
             if (indexRow == null)
                 break;
 
-            byte[] dataKey = null;
+            ByteBuffer dataKey = null;
             int n = 0;
             for (IColumn column : indexRow.getSortedColumns())
             {
@@ -1375,7 +1429,7 @@ public class ColumnFamilyStore implement
                 if (rows.size() == clause.count)
                     break outer;
             }
-            if (n < clause.count || Arrays.equals(startKey, dataKey))
+            if (n < clause.count || ByteBufferUtil.equals(startKey, dataKey))
                 break;
             startKey = dataKey;
         }
@@ -1743,22 +1797,22 @@ public class ColumnFamilyStore implement
         return (double) falseCount / (trueCount + falseCount);
     }
 
-    public SortedSet<byte[]> getIndexedColumns()
+    public SortedSet<ByteBuffer> getIndexedColumns()
     {
-        return (SortedSet<byte[]>) indexedColumns.keySet();
+        return (SortedSet<ByteBuffer>) indexedColumns.keySet();
     }
 
-    public ColumnFamilyStore getIndexedColumnFamilyStore(byte[] column)
+    public ColumnFamilyStore getIndexedColumnFamilyStore(ByteBuffer column)
     {
         return indexedColumns.get(column);
     }
 
-    public ColumnFamily newIndexedColumnFamily(byte[] column)
+    public ColumnFamily newIndexedColumnFamily(ByteBuffer column)
     {
         return ColumnFamily.create(indexedColumns.get(column).metadata);
     }
 
-    public DecoratedKey<LocalToken> getIndexKeyFor(byte[] name, byte[] value)
+    public DecoratedKey<LocalToken> getIndexKeyFor(ByteBuffer name, ByteBuffer value)
     {
         return indexedColumns.get(name).partitioner.decorateKey(value);
     }

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/ColumnSerializer.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/ColumnSerializer.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/ColumnSerializer.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/ColumnSerializer.java Fri Oct 22 03:23:26 2010
@@ -56,21 +56,22 @@ public class ColumnSerializer implements
 
     public Column deserialize(DataInput dis) throws IOException
     {
-        byte[] name = FBUtilities.readShortByteArray(dis);
+        ByteBuffer name = FBUtilities.readShortByteArray(dis);
         int b = dis.readUnsignedByte();
         if ((b & EXPIRATION_MASK) != 0)
         {
             int ttl = dis.readInt();
             int expiration = dis.readInt();
             long ts = dis.readLong();
-            byte[] value = FBUtilities.readByteArray(dis);
+            ByteBuffer value = FBUtilities.readByteArray(dis);
             if ((int) (System.currentTimeMillis() / 1000 ) > expiration)
             {
                 // the column is now expired, we can safely return a simple
                 // tombstone
                 ByteBuffer bytes = ByteBuffer.allocate(4);
                 bytes.putInt(expiration);
-                return new DeletedColumn(name, bytes.array(), ts);
+                bytes.rewind();
+                return new DeletedColumn(name, bytes, ts);
             }
             else
             {
@@ -81,7 +82,7 @@ public class ColumnSerializer implements
         {
             boolean delete = (b & DELETION_MASK) != 0;
             long ts = dis.readLong();
-            byte[] value = FBUtilities.readByteArray(dis);
+            ByteBuffer value = FBUtilities.readByteArray(dis);
             if ((b & DELETION_MASK) != 0) {
                 return new DeletedColumn(name, value, ts);
             } else {

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/DecoratedKey.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/DecoratedKey.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/DecoratedKey.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/DecoratedKey.java Fri Oct 22 03:23:26 2010
@@ -18,6 +18,7 @@
 
 package org.apache.cassandra.db;
 
+import java.nio.ByteBuffer;
 import java.util.Comparator;
 
 import org.apache.cassandra.dht.IPartitioner;
@@ -47,9 +48,9 @@ public class DecoratedKey<T extends Toke
     };
 
     public final T token;
-    public final byte[] key;
+    public final ByteBuffer key;
 
-    public DecoratedKey(T token, byte[] key)
+    public DecoratedKey(T token, ByteBuffer key)
     {
         super();
         assert token != null;

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/DefsTable.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/DefsTable.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/DefsTable.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/DefsTable.java Fri Oct 22 03:23:26 2010
@@ -18,42 +18,39 @@
 
 package org.apache.cassandra.db;
 
-import org.apache.avro.Schema;
+import static com.google.common.base.Charsets.UTF_8;
 
-import org.apache.cassandra.config.ConfigurationException;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.avro.Schema;
 import org.apache.cassandra.config.DatabaseDescriptor;
 import org.apache.cassandra.config.KSMetaData;
-import org.apache.cassandra.db.commitlog.CommitLog;
 import org.apache.cassandra.db.filter.QueryFilter;
 import org.apache.cassandra.db.filter.QueryPath;
-import org.apache.cassandra.db.filter.SliceQueryFilter;
 import org.apache.cassandra.db.migration.Migration;
 import org.apache.cassandra.io.SerDeUtils;
-import org.apache.cassandra.io.util.DataOutputBuffer;
-import org.apache.cassandra.io.util.FileUtils;
 import org.apache.cassandra.service.StorageService;
-import org.apache.cassandra.utils.FBUtilities;
 import org.apache.cassandra.utils.UUIDGen;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.IOException;
-import java.util.*;
-
-import static com.google.common.base.Charsets.UTF_8;
-
 public class DefsTable
 {
     // column name for the schema storing serialized keyspace definitions
     // NB: must be an invalid keyspace name
-    public static final byte[] DEFINITION_SCHEMA_COLUMN_NAME = "Avro/Schema".getBytes(UTF_8);
+    public static final ByteBuffer DEFINITION_SCHEMA_COLUMN_NAME = ByteBuffer.wrap("Avro/Schema".getBytes(UTF_8));
 
     /** dumps current keyspace definitions to storage */
     public static synchronized void dumpToStorage(UUID version) throws IOException
     {
-        final byte[] versionKey = Migration.toUTF8Bytes(version);
+        final ByteBuffer versionKey = Migration.toUTF8Bytes(version);
 
         // build a list of keyspaces
         Collection<String> ksnames = DatabaseDescriptor.getNonSystemTables();
@@ -64,20 +61,20 @@ public class DefsTable
         for (String ksname : ksnames)
         {
             KSMetaData ksm = DatabaseDescriptor.getTableDefinition(ksname);
-            rm.add(new QueryPath(Migration.SCHEMA_CF, null, ksm.name.getBytes(UTF_8)), SerDeUtils.serialize(ksm.deflate()), now);
+            rm.add(new QueryPath(Migration.SCHEMA_CF, null, ByteBuffer.wrap(ksm.name.getBytes(UTF_8))), SerDeUtils.serialize(ksm.deflate()), now);
         }
         // add the schema
         rm.add(new QueryPath(Migration.SCHEMA_CF,
                              null,
                              DEFINITION_SCHEMA_COLUMN_NAME),
-                             org.apache.cassandra.avro.KsDef.SCHEMA$.toString().getBytes(UTF_8),
+                             ByteBuffer.wrap(org.apache.cassandra.avro.KsDef.SCHEMA$.toString().getBytes(UTF_8)),
                              now);
         rm.apply();
 
         // apply new version
         rm = new RowMutation(Table.SYSTEM_TABLE, Migration.LAST_MIGRATION_KEY);
         rm.add(new QueryPath(Migration.SCHEMA_CF, null, Migration.LAST_MIGRATION_KEY),
-               UUIDGen.decompose(version),
+               ByteBuffer.wrap(UUIDGen.decompose(version)),
                now);
         rm.apply();
     }
@@ -94,13 +91,13 @@ public class DefsTable
         if (avroschema == null)
             // TODO: more polite way to handle this?
             throw new RuntimeException("Cannot read system table! Are you upgrading a pre-release version?");
-        Schema schema = Schema.parse(new String(avroschema.value()));
+        Schema schema = Schema.parse(new String(avroschema.value().array(),avroschema.value().position()+avroschema.value().arrayOffset(),avroschema.value().remaining()));
 
         // deserialize keyspaces using schema
         Collection<KSMetaData> keyspaces = new ArrayList<KSMetaData>();
         for (IColumn column : cf.getSortedColumns())
         {
-            if (Arrays.equals(column.name(), DEFINITION_SCHEMA_COLUMN_NAME))
+            if (column.name().equals(DEFINITION_SCHEMA_COLUMN_NAME))
                 continue;
             org.apache.cassandra.avro.KsDef ks = SerDeUtils.deserialize(schema, column.value(), new org.apache.cassandra.avro.KsDef());
             keyspaces.add(KSMetaData.inflate(ks));

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/DeletedColumn.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/DeletedColumn.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/DeletedColumn.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/DeletedColumn.java Fri Oct 22 03:23:26 2010
@@ -20,6 +20,7 @@ package org.apache.cassandra.db;
 
 import java.nio.ByteBuffer;
 
+import org.apache.cassandra.utils.ByteBufferUtil;
 import org.apache.cassandra.utils.FBUtilities;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -28,12 +29,12 @@ public class DeletedColumn extends Colum
 {
     private static Logger logger = LoggerFactory.getLogger(DeletedColumn.class);
     
-    public DeletedColumn(byte[] name, int localDeletionTime, long timestamp)
+    public DeletedColumn(ByteBuffer name, int localDeletionTime, long timestamp)
     {
         this(name, FBUtilities.toByteArray(localDeletionTime), timestamp);
     }
 
-    public DeletedColumn(byte[] name, byte[] value, long timestamp)
+    public DeletedColumn(ByteBuffer name, ByteBuffer value, long timestamp)
     {
         super(name, value, timestamp);
     }
@@ -53,6 +54,6 @@ public class DeletedColumn extends Colum
     @Override
     public int getLocalDeletionTime()
     {
-        return ByteBuffer.wrap(value()).getInt();
+       return value.getInt(value.position()+value.arrayOffset()	);
     }
 }

Modified: cassandra/trunk/src/java/org/apache/cassandra/db/ExpiringColumn.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/db/ExpiringColumn.java?rev=1026200&r1=1026199&r2=1026200&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/db/ExpiringColumn.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/db/ExpiringColumn.java Fri Oct 22 03:23:26 2010
@@ -18,13 +18,13 @@
 
 package org.apache.cassandra.db;
 
-import java.security.MessageDigest;
 import java.io.IOException;
-
-import org.apache.log4j.Logger;
+import java.nio.ByteBuffer;
+import java.security.MessageDigest;
 
 import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.io.util.DataOutputBuffer;
+import org.apache.log4j.Logger;
 
 /**
  * Alternative to Column that have an expiring time.
@@ -43,12 +43,12 @@ public class ExpiringColumn extends Colu
     private final int localExpirationTime;
     private final int timeToLive;
 
-    public ExpiringColumn(byte[] name, byte[] value, long timestamp, int timeToLive)
+    public ExpiringColumn(ByteBuffer name, ByteBuffer value, long timestamp, int timeToLive)
     {
       this(name, value, timestamp, timeToLive, (int) (System.currentTimeMillis() / 1000) + timeToLive);
     }
 
-    public ExpiringColumn(byte[] name, byte[] value, long timestamp, int timeToLive, int localExpirationTime)
+    public ExpiringColumn(ByteBuffer name, ByteBuffer value, long timestamp, int timeToLive, int localExpirationTime)
     {
         super(name, value, timestamp);
         assert timeToLive > 0;



Mime
View raw message