cassandra-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jbel...@apache.org
Subject svn commit: r796108 [3/4] - in /incubator/cassandra/trunk: conf/ interface/ interface/gen-java/org/apache/cassandra/service/ src/java/org/apache/cassandra/cli/ src/java/org/apache/cassandra/config/ src/java/org/apache/cassandra/cql/common/ src/java/org...
Date Tue, 21 Jul 2009 01:36:54 GMT
Added: incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/LongType.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/LongType.java?rev=796108&view=auto
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/LongType.java (added)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/LongType.java Tue Jul 21 01:36:52 2009
@@ -0,0 +1,19 @@
+package org.apache.cassandra.db.marshal;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class LongType extends AbstractType
+{
+    public int compare(byte[] o1, byte[] o2)
+    {
+        long L1 = ByteBuffer.wrap(o1).order(ByteOrder.LITTLE_ENDIAN).getLong();
+        long L2 = ByteBuffer.wrap(o2).order(ByteOrder.LITTLE_ENDIAN).getLong();
+        return new Long(L1).compareTo(L2);
+    }
+
+    public String getString(byte[] bytes)
+    {
+        return String.valueOf(ByteBuffer.wrap(bytes).getLong());
+    }
+}

Added: incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/MarshalException.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/MarshalException.java?rev=796108&view=auto
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/MarshalException.java (added)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/MarshalException.java Tue Jul 21 01:36:52 2009
@@ -0,0 +1,9 @@
+package org.apache.cassandra.db.marshal;
+
+public class MarshalException extends RuntimeException
+{
+    public MarshalException(String message)
+    {
+        super(message);
+    }
+}

Added: incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UTF8Type.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UTF8Type.java?rev=796108&view=auto
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UTF8Type.java (added)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UTF8Type.java Tue Jul 21 01:36:52 2009
@@ -0,0 +1,30 @@
+package org.apache.cassandra.db.marshal;
+
+import java.io.UnsupportedEncodingException;
+
+public class UTF8Type extends AbstractType
+{
+    public int compare(byte[] o1, byte[] o2)
+    {
+        try
+        {
+            return new String(o1, "UTF-8").compareTo(new String(o2, "UTF-8"));
+        }
+        catch (UnsupportedEncodingException e)
+        {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public String getString(byte[] bytes)
+    {
+        try
+        {
+            return new String(bytes, "UTF-8");
+        }
+        catch (UnsupportedEncodingException e)
+        {
+            throw new RuntimeException(e);
+        }
+    }
+}

Added: incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UUIDType.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UUIDType.java?rev=796108&view=auto
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UUIDType.java (added)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/db/marshal/UUIDType.java Tue Jul 21 01:36:52 2009
@@ -0,0 +1,23 @@
+package org.apache.cassandra.db.marshal;
+
+import java.util.UUID;
+import java.nio.ByteBuffer;
+
+public class UUIDType extends AbstractType
+{
+    private UUID getUUID(byte[] bytes)
+    {
+        ByteBuffer bb = ByteBuffer.wrap(bytes);
+        return new UUID(bb.getLong(), bb.getLong());
+    }
+
+    public int compare(byte[] o1, byte[] o2)
+    {
+        return getUUID(o1).compareTo(getUUID(o2));
+    }
+
+    public String getString(byte[] bytes)
+    {
+        return getUUID(bytes).toString();
+    }
+}

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IFileReader.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IFileReader.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IFileReader.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IFileReader.java Tue Jul 21 01:36:52 2009
@@ -73,7 +73,7 @@
      * @param columnNames - The list of columns in the cfName column family
      * 					     that we want to return
     */
-    public long next(String key, DataOutputBuffer bufOut, String columnFamilyName, SortedSet<String> columnNames, long position) throws IOException;
+    public long next(String key, DataOutputBuffer bufOut, String columnFamilyName, SortedSet<byte[]> columnNames, long position) throws IOException;
 
     /**
      * Close the file after reading.

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IndexHelper.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IndexHelper.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IndexHelper.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/io/IndexHelper.java Tue Jul 21 01:36:52 2009
@@ -25,9 +25,8 @@
 import java.util.*;
 
 import org.apache.cassandra.config.DatabaseDescriptor;
-import org.apache.cassandra.db.IColumn;
-import org.apache.cassandra.db.ColumnComparatorFactory;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.db.ColumnSerializer;
+import org.apache.cassandra.db.marshal.AbstractType;
 
 
 /**
@@ -152,21 +151,21 @@
         DataInputBuffer indexIn = new DataInputBuffer();
         indexIn.reset(indexOut.getData(), indexOut.getLength());
         
-        ColumnComparatorFactory.ComparatorType typeInfo = DatabaseDescriptor.getTypeInfo(tableName, cfName);
+        AbstractType comparator = DatabaseDescriptor.getType(tableName, cfName);
 
-        while(indexIn.available() > 0)
-        {            
-            ColumnIndexInfo cIndexInfo = ColumnIndexFactory.instance(typeInfo);
-        	cIndexInfo = cIndexInfo.deserialize(indexIn);
-        	columnIndexList.add(cIndexInfo);
+        while (indexIn.available() > 0)
+        {
+            // TODO this is all kinds of messed up
+            ColumnIndexInfo cIndexInfo = new ColumnIndexInfo(comparator);
+            cIndexInfo = cIndexInfo.deserialize(indexIn);
+            columnIndexList.add(cIndexInfo);
         }
 
-		return totalBytesRead;
+        return totalBytesRead;
 	}
 
     /**
      * Returns the range in which a given column falls in the index
-     * @param column The column whose range needs to be found
      * @param columnIndexList the in-memory representation of the column index
      * @param dataSize the total size of the data
      * @param totalNumCols total number of columns
@@ -221,40 +220,31 @@
 	 * @param totalNumCols the total number of columns
 	 * @return a list of subranges which contain all the columns in columnNames
 	 */
-	static List<ColumnRange> getMultiColumnRangesFromNameIndex(SortedSet<String> columnNames, List<IndexHelper.ColumnIndexInfo> columnIndexList, int dataSize, int totalNumCols)
+	static List<ColumnRange> getMultiColumnRangesFromNameIndex(SortedSet<byte[]> columnNames, List<IndexHelper.ColumnIndexInfo> columnIndexList, int dataSize, int totalNumCols)
 	{
-		List<ColumnRange> columnRanges = new ArrayList<ColumnRange>();				
+		List<ColumnRange> columnRanges = new ArrayList<ColumnRange>();
 
-        if ( columnIndexList.size() == 0 )
+        if (columnIndexList.size() == 0)
         {
-            columnRanges.add( new ColumnRange(0, dataSize, totalNumCols) );
+            columnRanges.add(new ColumnRange(0, dataSize, totalNumCols));
         }
         else
         {
             Map<Long, Boolean> offset = new HashMap<Long, Boolean>();
-    		for(String column : columnNames)
-    		{
-                IndexHelper.ColumnIndexInfo cIndexInfo = new IndexHelper.ColumnNameIndexInfo(column);
-    			ColumnRange columnRange = getColumnRangeFromNameIndex(cIndexInfo, columnIndexList, dataSize, totalNumCols);   
-                if ( offset.get( columnRange.coordinate().start_ ) == null ) 
+            for (byte[] column : columnNames)
+            {
+                IndexHelper.ColumnIndexInfo cIndexInfo = new IndexHelper.ColumnIndexInfo(column, 0, 0, (AbstractType)columnNames.comparator());
+                ColumnRange columnRange = getColumnRangeFromNameIndex(cIndexInfo, columnIndexList, dataSize, totalNumCols);
+                if (offset.get(columnRange.coordinate().start_) == null)
                 {
                     columnRanges.add(columnRange);
                     offset.put(columnRange.coordinate().start_, true);
                 }
-    		}
+            }
         }
 
-		return columnRanges;
+        return columnRanges;
 	}
-        
-    public static class ColumnIndexFactory
-    {
-        public static ColumnIndexInfo instance(ColumnComparatorFactory.ComparatorType typeInfo)
-        {
-            return typeInfo == ColumnComparatorFactory.ComparatorType.NAME
-                    ? new ColumnNameIndexInfo() : new ColumnTimestampIndexInfo();
-        }
-    }
 
 
     /**
@@ -290,13 +280,22 @@
 	 * A helper class to generate indexes while
      * the columns are sorted by name on disk.
 	*/
-    public static abstract class ColumnIndexInfo implements Comparable<ColumnIndexInfo>
+    public static class ColumnIndexInfo implements Comparable<ColumnIndexInfo>
     {
         private long position_;
-        private int columnCount_;        
-        
-        ColumnIndexInfo(long position, int columnCount)
+        private int columnCount_;
+        private byte[] name_;
+        private AbstractType comparator_;
+
+        public ColumnIndexInfo(AbstractType comparator_)
         {
+            this.comparator_ = comparator_;
+        }
+
+        public ColumnIndexInfo(byte[] name, long position, int columnCount, AbstractType comparator)
+        {
+            this(comparator);
+            name_ = name;
             position_ = position;
             columnCount_ = columnCount;
         }
@@ -320,135 +319,36 @@
         {
             columnCount_ = count;
         }
-                
-        public abstract void set(Object o);
-        public abstract void serialize(DataOutputStream dos) throws IOException;
-        public abstract ColumnIndexInfo deserialize(DataInputStream dis) throws IOException;
-        
-        public int size()
-        {
-            /* size of long for "position_"  + size of columnCount_ */
-            return (8 + 4);
-        }
-    }
 
-    static class ColumnNameIndexInfo extends ColumnIndexInfo
-    {
-        private String name_;       
-        
-        ColumnNameIndexInfo()
-        {
-            super(0L, 0);
-        }
-        
-        ColumnNameIndexInfo(String name)
-        {
-            this(name, 0L, 0);
-        }
-                
-        ColumnNameIndexInfo(String name, long position, int columnCount)
-        {
-            super(position, columnCount);
-            name_ = name;
-        }
-        
-        String name()
-        {
-            return name_;
-        }                
-        
-        public void set(Object o)
-        {
-            name_ = (String)o;
-        }
-        
         public int compareTo(ColumnIndexInfo rhs)
         {
-            IndexHelper.ColumnNameIndexInfo cIndexInfo = (IndexHelper.ColumnNameIndexInfo)rhs;
-            return name_.compareTo(cIndexInfo.name_);
+            return comparator_.compare(name_, rhs.name_);
         }
-        
+
         public void serialize(DataOutputStream dos) throws IOException
         {
-            dos.writeLong(position()); 
+            dos.writeLong(position());
             dos.writeInt(count());
-            dos.writeUTF(name_);        
+            ColumnSerializer.writeName(name_, dos);
         }
-        
-        public ColumnNameIndexInfo deserialize(DataInputStream dis) throws IOException
+
+        public ColumnIndexInfo deserialize(DataInputStream dis) throws IOException
         {
             long position = dis.readLong();
-            int columnCount = dis.readInt();            
-            String name = dis.readUTF();       
-            return new ColumnNameIndexInfo(name, position, columnCount);
+            int columnCount = dis.readInt();
+            byte[] name = ColumnSerializer.readName(dis);
+            return new ColumnIndexInfo(name, position, columnCount, comparator_);
         }
-        
+
         public int size()
         {
-            int size = super.size();
-            /* Size of the name_ as an UTF8 and the actual length as a short for the readUTF. */
-            size += FBUtilities.getUTF8Length(name_) + IColumn.UtfPrefix_;
-            return size;
+            // serialized size -- CS.writeName includes a 2-byte length prefix
+            return 8 + 4 + 2 + name_.length;
         }
-    }
 
-    static class ColumnTimestampIndexInfo extends ColumnIndexInfo
-    {
-        private long timestamp_;
-        
-        ColumnTimestampIndexInfo()
+        public byte[] name()
         {
-            super(0L, 0);
-        }
-        
-        ColumnTimestampIndexInfo(long timestamp)
-        {
-            this(timestamp, 0L, 0);  
-        }
-        
-        ColumnTimestampIndexInfo(long timestamp, long position, int columnCount)
-        {
-            super(position, columnCount);
-            timestamp_ = timestamp;
-        }
-        
-        public long timestamp()
-        {
-            return timestamp_;
-        }
-        
-        public void set(Object o)
-        {
-            timestamp_ = (Long)o;
-        }
-        
-        public int compareTo(ColumnIndexInfo rhs)
-        {
-            ColumnTimestampIndexInfo cIndexInfo = (ColumnTimestampIndexInfo)rhs;
-            return Long.valueOf(timestamp_).compareTo(Long.valueOf(cIndexInfo.timestamp_));
-        }
-        
-        public void serialize(DataOutputStream dos) throws IOException
-        {
-            dos.writeLong(position()); 
-            dos.writeInt(count());
-            dos.writeLong(timestamp_);        
-        }
-        
-        public ColumnTimestampIndexInfo deserialize(DataInputStream dis) throws IOException
-        {
-            long position = dis.readLong();
-            int columnCount = dis.readInt();
-            long timestamp = dis.readLong();        
-            return new ColumnTimestampIndexInfo(timestamp, position, columnCount);
-        }
-        
-        public int size()
-        {
-            int size = super.size();
-            /* add the size of the timestamp which is a long */ 
-            size += 8;
-            return size;
+            return name_;
         }
     }
 }

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SSTableReader.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SSTableReader.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SSTableReader.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SSTableReader.java Tue Jul 21 01:36:52 2009
@@ -23,7 +23,7 @@
 
 import org.apache.log4j.Logger;
 
-import org.apache.cassandra.db.RowMutation;
+import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.dht.IPartitioner;
 import org.apache.cassandra.io.SequenceFile.ColumnGroupReader;
 import org.apache.cassandra.utils.BloomFilter;
@@ -285,7 +285,7 @@
         }
     }
 
-    public DataInputBuffer next(final String clientKey, String cfName, SortedSet<String> columnNames) throws IOException
+    public DataInputBuffer next(final String clientKey, String cfName, SortedSet<byte[]> columnNames) throws IOException
     {
         IFileReader dataReader = null;
         try
@@ -318,18 +318,10 @@
         }
     }
 
-    public DataInputBuffer next(String clientKey, String columnFamilyColumn) throws IOException
-    {
-        String[] values = RowMutation.getColumnAndColumnFamily(columnFamilyColumn);
-        String columnFamilyName = values[0];
-        SortedSet<String> columnNames = (values.length == 1) ? null : new TreeSet<String>(Arrays.asList(values[1]));
-        return next(clientKey, columnFamilyName, columnNames);
-    }
-
     /**
      * obtain a BlockReader for the getColumnSlice call.
      */
-    public ColumnGroupReader getColumnGroupReader(String key, String cfName, String startColumn, boolean isAscending) throws IOException
+    public ColumnGroupReader getColumnGroupReader(String key, String cfName, byte[] startColumn, boolean isAscending) throws IOException
     {
         IFileReader dataReader = SequenceFile.reader(dataFile);
 
@@ -338,7 +330,8 @@
             /* Morph key into actual key based on the partition type. */
             String decoratedKey = partitioner.decorateKey(key);
             long position = getPosition(decoratedKey, partitioner);
-            return new ColumnGroupReader(dataFile, decoratedKey, cfName, startColumn, isAscending, position);
+            AbstractType comparator = DatabaseDescriptor.getType(getTableName(), cfName);
+            return new ColumnGroupReader(dataFile, decoratedKey, cfName, comparator, startColumn, isAscending, position);
         }
         finally
         {
@@ -380,6 +373,11 @@
         return new FileStruct(this);
     }
 
+    public String getTableName()
+    {
+        return parseTableName(dataFile);
+    }
+
     public static void deleteAll() throws IOException
     {
         for (SSTableReader sstable : openedFiles.values())

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SequenceFile.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SequenceFile.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/io/SequenceFile.java Tue Jul 21 01:36:52 2009
@@ -19,13 +19,14 @@
 package org.apache.cassandra.io;
 
 import java.io.*;
-import java.nio.ByteBuffer;
 import java.util.*;
 
 import org.apache.cassandra.config.DatabaseDescriptor;
 import org.apache.cassandra.utils.BloomFilter;
+import org.apache.cassandra.db.marshal.AbstractType;
 
 import org.apache.log4j.Logger;
+import org.apache.commons.lang.ArrayUtils;
 
 /**
  * This class writes key/value pairs sequentially to disk. It is
@@ -33,8 +34,6 @@
  * jump to random positions to read data from the file. This class
  * also has many implementations of the IFileWriter and IFileReader
  * interfaces which are exposed through factory methods.
- * <p/>
- * Author : Avinash Lakshman ( alakshman@facebook.com) & Prashant Malik ( pmalik@facebook.com ) & Karthik Ranganathan ( kranganathan@facebook.com )
  */
 
 public class SequenceFile
@@ -177,7 +176,6 @@
 
         public void close(byte[] footer, int size) throws IOException
         {
-            file_.writeUTF(SequenceFile.marker_);
             file_.writeInt(size);
             file_.write(footer, 0, size);
         }
@@ -230,7 +228,7 @@
         private String key_;
         private String cfName_;
         private String cfType_;
-        private int indexType_;
+        private AbstractType comparator_;
         private boolean isAscending_;
 
         private List<IndexHelper.ColumnIndexInfo> columnIndexList_;
@@ -240,10 +238,11 @@
         private int localDeletionTime_;
         private long markedForDeleteAt_;
 
-        ColumnGroupReader(String filename, String key, String cfName, String startColumn, boolean isAscending, long position) throws IOException
+        ColumnGroupReader(String filename, String key, String cfName, AbstractType comparator, byte[] startColumn, boolean isAscending, long position) throws IOException
         {
             super(filename, 128 * 1024);
             this.cfName_ = cfName;
+            this.comparator_ = comparator;
             this.key_ = key;
             this.isAscending_ = isAscending;
             init(startColumn, position);
@@ -257,7 +256,7 @@
             if (columnIndexList.size() == 0)
             {
                 /* if there is no column index, add an index entry that covers the full space. */
-                return Arrays.asList(new IndexHelper.ColumnIndexInfo[]{new IndexHelper.ColumnNameIndexInfo("", 0, totalNumCols)});
+                return Arrays.asList(new IndexHelper.ColumnIndexInfo(ArrayUtils.EMPTY_BYTE_ARRAY, 0, totalNumCols, comparator_));
             }
 
             List<IndexHelper.ColumnIndexInfo> fullColIndexList = new ArrayList<IndexHelper.ColumnIndexInfo>();
@@ -266,22 +265,24 @@
                 accumulatededCols += colPosInfo.count();
             int remainingCols = totalNumCols - accumulatededCols;
 
-            fullColIndexList.add(new IndexHelper.ColumnNameIndexInfo("", 0, columnIndexList.get(0).count()));
+            fullColIndexList.add(new IndexHelper.ColumnIndexInfo(ArrayUtils.EMPTY_BYTE_ARRAY, 0, columnIndexList.get(0).count(), comparator_));
             for (int i = 0; i < columnIndexList.size() - 1; i++)
             {
-                IndexHelper.ColumnNameIndexInfo colPosInfo = (IndexHelper.ColumnNameIndexInfo)columnIndexList.get(i);
-                fullColIndexList.add(new IndexHelper.ColumnNameIndexInfo(colPosInfo.name(),
-                                                                         colPosInfo.position(),
-                                                                         columnIndexList.get(i + 1).count()));
-            }
-            String columnName = ((IndexHelper.ColumnNameIndexInfo)columnIndexList.get(columnIndexList.size() - 1)).name();
-            fullColIndexList.add(new IndexHelper.ColumnNameIndexInfo(columnName,
-                                                                     columnIndexList.get(columnIndexList.size() - 1).position(),
-                                                                     remainingCols));
+                IndexHelper.ColumnIndexInfo colPosInfo = columnIndexList.get(i);
+                fullColIndexList.add(new IndexHelper.ColumnIndexInfo(colPosInfo.name(),
+                                                                     colPosInfo.position(),
+                                                                     columnIndexList.get(i + 1).count(),
+                                                                     comparator_));
+            }
+            byte[] columnName = columnIndexList.get(columnIndexList.size() - 1).name();
+            fullColIndexList.add(new IndexHelper.ColumnIndexInfo(columnName,
+                                                                 columnIndexList.get(columnIndexList.size() - 1).position(),
+                                                                 remainingCols,
+                                                                 comparator_));
             return fullColIndexList;
         }
 
-        private void init(String startColumn, long position) throws IOException
+        private void init(byte[] startColumn, long position) throws IOException
         {
             String keyInDisk = null;
             if (seekTo(position) >= 0)
@@ -307,16 +308,17 @@
                  * 2. calculate the size of all columns */
                 String cfName = file_.readUTF();
                 cfType_ = file_.readUTF();
-                indexType_ = file_.readInt();
+                String comparatorName = file_.readUTF();
+                assert comparatorName.equals(comparator_.getClass().getCanonicalName());
                 localDeletionTime_ = file_.readInt();
                 markedForDeleteAt_ = file_.readLong();
                 int totalNumCols = file_.readInt();
-                allColumnsSize_ = dataSize - (totalBytesRead + 2 * utfPrefix_ + cfName.length() + cfType_.length() + 4 + 4 + 8 + 4);
+                allColumnsSize_ = dataSize - (totalBytesRead + 3 * utfPrefix_ + cfName.length() + cfType_.length() + comparatorName.length() + 4 + 8 + 4);
 
                 columnStartPosition_ = file_.getFilePointer();
                 columnIndexList_ = getFullColumnIndexList(colIndexList, totalNumCols);
 
-                int index = Collections.binarySearch(columnIndexList_, new IndexHelper.ColumnNameIndexInfo(startColumn));
+                int index = Collections.binarySearch(columnIndexList_, new IndexHelper.ColumnIndexInfo(startColumn, 0, 0, comparator_));
                 curRangeIndex_ = index < 0 ? (++index) * (-1) - 1 : index;
             }
             else
@@ -345,7 +347,7 @@
             // write CF info
             bufOut.writeUTF(cfName_);
             bufOut.writeUTF(cfType_);
-            bufOut.writeInt(indexType_);
+            bufOut.writeUTF(comparator_.getClass().getCanonicalName());
             bufOut.writeInt(localDeletionTime_);
             bufOut.writeLong(markedForDeleteAt_);
             // now write the columns
@@ -433,34 +435,6 @@
         }
 
         /**
-         * Reads the column name indexes if present. If the
-         * indexes are based on time then skip over them.
-         *
-         * @param cfName
-         * @return
-         */
-        private int handleColumnTimeIndexes(String cfName, List<IndexHelper.ColumnIndexInfo> columnIndexList) throws IOException
-        {
-            /* check if we have an index */
-            boolean hasColumnIndexes = file_.readBoolean();
-            int totalBytesRead = 1;
-            /* if we do then deserialize the index */
-            if (hasColumnIndexes)
-            {
-                if (DatabaseDescriptor.isTimeSortingEnabled(null, cfName))
-                {
-                    /* read the index */
-                    totalBytesRead += IndexHelper.deserializeIndex(getTableName(), cfName, file_, columnIndexList);
-                }
-                else
-                {
-                    totalBytesRead += IndexHelper.skipIndex(file_);
-                }
-            }
-            return totalBytesRead;
-        }
-
-        /**
          * This method dumps the next key/value into the DataOuputStream
          * passed in. Always use this method to query for application
          * specific data as it will have indexes.
@@ -470,7 +444,7 @@
          * @param columnFamilyName name of the columnFamily
          * @param columnNames columnNames we are interested in
          */
-        public long next(String key, DataOutputBuffer bufOut, String columnFamilyName, SortedSet<String> columnNames, long position) throws IOException
+        public long next(String key, DataOutputBuffer bufOut, String columnFamilyName, SortedSet<byte[]> columnNames, long position) throws IOException
         {
             assert columnNames != null;
 
@@ -514,8 +488,8 @@
             return bytesRead;
         }
 
-        private void readColumns(String key, DataOutputBuffer bufOut, String columnFamilyName, SortedSet<String> cNames)
-                throws IOException
+        private void readColumns(String key, DataOutputBuffer bufOut, String columnFamilyName, SortedSet<byte[]> cNames)
+        throws IOException
         {
             int dataSize = file_.readInt();
 
@@ -556,8 +530,8 @@
                 String cfType = file_.readUTF();
                 dataSize -= (utfPrefix_ + cfType.length());
 
-                int indexType = file_.readInt();
-                dataSize -= 4;
+                String comparatorName = file_.readUTF();
+                dataSize -= (utfPrefix_ + comparatorName.length());
 
                 /* read local deletion time */
                 int localDeletionTime = file_.readInt();
@@ -590,7 +564,7 @@
                 // echo back the CF data we read
                 bufOut.writeUTF(cfName);
                 bufOut.writeUTF(cfType);
-                bufOut.writeInt(indexType);
+                bufOut.writeUTF(comparatorName);
                 bufOut.writeInt(localDeletionTime);
                 bufOut.writeLong(markedForDeleteAt);
                 /* write number of columns */
@@ -636,15 +610,6 @@
                 bytesRead = endPosition - startPosition;
             }
 
-            /*
-             * If we have read the bloom filter in the data
-             * file we know we are at the end of the file
-             * and no further key processing is required. So
-             * we return -1 indicating we are at the end of
-             * the file.
-            */
-            if (key.equals(SequenceFile.marker_))
-                bytesRead = -1L;
             return bytesRead;
         }
     }

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraDaemon.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraDaemon.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraDaemon.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraDaemon.java Tue Jul 21 01:36:52 2009
@@ -84,6 +84,7 @@
         Set<String> tables = DatabaseDescriptor.getTableToColumnFamilyMap().keySet();
         for (String table : tables)
         {
+            logger.debug("opening table " + table);
             Table tbl = Table.open(table);
             tbl.onStart();
         }

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraServer.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraServer.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraServer.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/service/CassandraServer.java Tue Jul 21 01:36:52 2009
@@ -26,11 +26,14 @@
 
 import org.apache.log4j.Logger;
 
+import org.apache.commons.lang.ArrayUtils;
+
 import org.apache.cassandra.config.CFMetaData;
 import org.apache.cassandra.config.DatabaseDescriptor;
 import org.apache.cassandra.cql.common.CqlResult;
 import org.apache.cassandra.cql.driver.CqlDriver;
 import org.apache.cassandra.db.*;
+import org.apache.cassandra.db.marshal.MarshalException;
 import org.apache.cassandra.db.filter.QueryPath;
 import org.apache.cassandra.utils.LogUtil;
 import org.apache.cassandra.dht.OrderPreservingPartitioner;
@@ -121,19 +124,19 @@
     private List<Column> getSlice(ReadCommand command) throws InvalidRequestException
     {
         ColumnFamily cfamily = readColumnFamily(command);
-        if (cfamily == null || cfamily.getColumns().size() == 0)
+        if (cfamily == null || cfamily.getColumnsMap().size() == 0)
         {
             return EMPTY_COLUMNS;
         }
         if (cfamily.isSuper())
         {
-            IColumn column = cfamily.getColumns().values().iterator().next();
+            IColumn column = cfamily.getColumnsMap().values().iterator().next();
             return thriftifyColumns(column.getSubColumns());
         }
-        return thriftifyColumns(cfamily.getAllColumns());
+        return thriftifyColumns(cfamily.getSortedColumns());
     }
 
-    public List<Column> get_slice_by_names(String table, String key, ColumnParent column_parent, List<String> column_names)
+    public List<Column> get_slice_by_names(String table, String key, ColumnParent column_parent, List<byte[]> column_names)
     throws InvalidRequestException, NotFoundException
     {
         logger.debug("get_slice_by_names");
@@ -141,7 +144,7 @@
         return getSlice(new SliceByNamesReadCommand(table, key, column_parent, column_names));
     }
 
-    public List<Column> get_slice(String table, String key, ColumnParent column_parent, String start, String finish, boolean is_ascending, int count)
+    public List<Column> get_slice(String table, String key, ColumnParent column_parent, byte[] start, byte[] finish, boolean is_ascending, int count)
     throws InvalidRequestException, NotFoundException
     {
         logger.debug("get_slice_from");
@@ -151,8 +154,6 @@
             throw new InvalidRequestException("get_slice does not yet support super columns (we need to fix this)");
         if (count <= 0)
             throw new InvalidRequestException("get_slice requires positive count");
-        if (!"Name".equals(DatabaseDescriptor.getCFMetaData(table, column_parent.column_family).indexProperty_))
-            throw new InvalidRequestException("get_slice requires CF indexed by name");
 
         return getSlice(new SliceFromReadCommand(table, key, column_parent, start, finish, is_ascending, count));
     }
@@ -181,7 +182,7 @@
         }
         else
         {
-            columns = cfamily.getAllColumns();
+            columns = cfamily.getSortedColumns();
         }
         if (columns == null || columns.size() == 0)
         {
@@ -212,7 +213,7 @@
         }
 
         ColumnFamily cfamily;
-        cfamily = readColumnFamily(new SliceFromReadCommand(table, key, column_parent, "", "", true, Integer.MAX_VALUE));
+        cfamily = readColumnFamily(new SliceFromReadCommand(table, key, column_parent, ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.EMPTY_BYTE_ARRAY, true, Integer.MAX_VALUE));
         if (cfamily == null)
         {
             return 0;
@@ -228,7 +229,7 @@
         }
         else
         {
-            columns = cfamily.getAllColumns();
+            columns = cfamily.getSortedColumns();
         }
         if (columns == null || columns.size() == 0)
         {
@@ -245,7 +246,14 @@
         ThriftValidation.validateColumnPath(table, column_path);
 
         RowMutation rm = new RowMutation(table, key.trim());
-        rm.add(new QueryPath(column_path), value, timestamp);
+        try
+        {
+            rm.add(new QueryPath(column_path), value, timestamp);
+        }
+        catch (MarshalException e)
+        {
+            throw new InvalidRequestException(e.getMessage());
+        }
         doInsert(block_for, rm);
     }
 
@@ -284,7 +292,7 @@
         }
     }
 
-    public List<SuperColumn> get_slice_super_by_names(String table, String key, String column_family, List<String> super_column_names)
+    public List<SuperColumn> get_slice_super_by_names(String table, String key, String column_family, List<byte[]> super_column_names)
     throws InvalidRequestException
     {
         logger.debug("get_slice_super_by_names");
@@ -295,7 +303,7 @@
         {
             return EMPTY_SUPERCOLUMNS;
         }
-        return thriftifySuperColumns(cfamily.getAllColumns());
+        return thriftifySuperColumns(cfamily.getSortedColumns());
     }
 
     private List<SuperColumn> thriftifySuperColumns(Collection<IColumn> columns)
@@ -319,7 +327,7 @@
         return thriftSuperColumns;
     }
 
-    public List<SuperColumn> get_slice_super(String table, String key, String column_family, String start, String finish, boolean is_ascending, int count)
+    public List<SuperColumn> get_slice_super(String table, String key, String column_family, byte[] start, byte[] finish, boolean is_ascending, int count)
     throws InvalidRequestException
     {
         logger.debug("get_slice_super");
@@ -333,7 +341,7 @@
         {
             return EMPTY_SUPERCOLUMNS;
         }
-        Collection<IColumn> columns = cfamily.getAllColumns();
+        Collection<IColumn> columns = cfamily.getSortedColumns();
         return thriftifySuperColumns(columns);
     }
 
@@ -349,7 +357,7 @@
         {
             throw new NotFoundException();
         }
-        Collection<IColumn> columns = cfamily.getAllColumns();
+        Collection<IColumn> columns = cfamily.getSortedColumns();
         if (columns == null || columns.size() == 0)
         {
             throw new NotFoundException();
@@ -458,7 +466,7 @@
                 columnFamilyMetaData.n_rowKey + ", " + desc + ")";
 
             columnMap.put("desc", desc);
-            columnMap.put("sort", columnFamilyMetaData.indexProperty_);
+            columnMap.put("type", columnFamilyMetaData.comparator.getClass().getName());
             columnMap.put("flushperiod", columnFamilyMetaData.flushPeriodInMinutes + "");
             columnFamiliesMap.put(columnFamilyMetaData.cfName, columnMap);
         }

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/BloomFilter.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/BloomFilter.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/BloomFilter.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/BloomFilter.java Tue Jul 21 01:36:52 2009
@@ -98,6 +98,14 @@
         }
     }
 
+    public void add(byte[] key)
+    {
+        for (int bucketIndex : getHashBuckets(key))
+        {
+            filter_.set(bucketIndex);
+        }
+    }
+
     public String toString()
     {
         return filter_.toString();

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/FBUtilities.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/FBUtilities.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/FBUtilities.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/FBUtilities.java Tue Jul 21 01:36:52 2009
@@ -18,14 +18,7 @@
 
 package org.apache.cassandra.utils;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.io.UnsupportedEncodingException;
+import java.io.*;
 import java.math.BigInteger;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
@@ -377,4 +370,18 @@
 
      	return length;
      }
+
+    public static void writeByteArray(byte[] bytes, DataOutput out) throws IOException
+    {
+        out.writeInt(bytes.length);
+        out.write(bytes);
+    }
+
+    public static byte[] readByteArray(DataInput in) throws IOException
+    {
+        int length = in.readInt();
+        byte[] bytes = new byte[length];
+        in.readFully(bytes);
+        return bytes;
+    }
 }

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/Filter.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/Filter.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/Filter.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/Filter.java Tue Jul 21 01:36:52 2009
@@ -39,6 +39,12 @@
         return Filter.getHashBuckets(key, hashCount, buckets());
     }
 
+    public int[] getHashBuckets(byte[] key)
+    {
+        return Filter.getHashBuckets(key, hashCount, buckets());
+    }
+
+
     abstract int buckets();
 
     public abstract void add(String key);
@@ -78,6 +84,11 @@
         {
             throw new RuntimeException(e);
         }
+        return getHashBuckets(b, hashCount, max);
+    }
+
+    static int[] getHashBuckets(byte[] b, int hashCount, int max)
+    {
         int[] result = new int[hashCount];
         int hash1 = hasher.hash(b, b.length, 0);
         int hash2 = hasher.hash(b, b.length, hash1);

Modified: incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/ReducingIterator.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/ReducingIterator.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/ReducingIterator.java (original)
+++ incubator/cassandra/trunk/src/java/org/apache/cassandra/utils/ReducingIterator.java Tue Jul 21 01:36:52 2009
@@ -24,12 +24,12 @@
     protected abstract T getReduced();
 
     /** override this if the keys you want to base the reduce on are not the same as the object itself (but can be generated from it) */
-    protected Object getKey(T o)
+    protected boolean isEqual(T o1, T o2)
     {
-        return o;
+        return o1.equals(o2);
     }
 
-     protected T computeNext()
+    protected T computeNext()
     {
         if (last == null && !source.hasNext())
             return endOfData();
@@ -45,7 +45,7 @@
                 break;
             }
             T current = source.next();
-            if (last != null && !getKey(current).equals(getKey(last)))
+            if (last != null && !isEqual(current, last))
                 keyChanged = true;
             last = current;
         }

Modified: incubator/cassandra/trunk/test/conf/storage-conf.xml
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/conf/storage-conf.xml?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/conf/storage-conf.xml (original)
+++ incubator/cassandra/trunk/test/conf/storage-conf.xml Tue Jul 21 01:36:52 2009
@@ -40,16 +40,16 @@
    <MemtableObjectCountInMillions>0.00002</MemtableObjectCountInMillions> <!-- 20 -->
    <Tables>
      <Table Name = "Table1">
-       <ColumnFamily ColumnSort="Name" Name="Standard1"/>
-       <ColumnFamily ColumnSort="Name" Name="Standard2"/>
-       <ColumnFamily ColumnSort="Time" Name="StandardByTime1"/>
-       <ColumnFamily ColumnSort="Time" Name="StandardByTime2"/>
+       <ColumnFamily Name="Standard1"/>
+       <ColumnFamily Name="Standard2"/>
+       <ColumnFamily CompareWith="LongType" Name="StandardByTime1"/>
+       <ColumnFamily CompareWith="LongType" Name="StandardByTime2"/>
        <ColumnFamily ColumnType="Super" Name="Super1"/>
        <ColumnFamily ColumnType="Super" Name="Super2"/>
      </Table>
      <Table Name = "Table2">
-       <ColumnFamily ColumnSort="Name" Name="Standard1"/>
-       <ColumnFamily ColumnSort="Name" Name="Standard3"/>
+       <ColumnFamily Name="Standard1"/>
+       <ColumnFamily Name="Standard3"/>
      </Table>
    </Tables>
    <Seeds>

Modified: incubator/cassandra/trunk/test/system/test_server.py
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/system/test_server.py?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/system/test_server.py (original)
+++ incubator/cassandra/trunk/test/system/test_server.py Tue Jul 21 01:36:52 2009
@@ -17,18 +17,22 @@
 # to run a single test, run from trunk/:
 # PYTHONPATH=test nosetests --tests=system.test_server:TestMutations.test_empty_range
 
-import os, sys, time
+import os, sys, time, struct
 
 from . import client, root, CassandraTester
 
 from thrift.Thrift import TApplicationException
 from ttypes import *
 
+
+def _i64(n):
+    return struct.pack('<q', n) # little endian, to match cassandra.db.marshal.LongType
+
 _SIMPLE_COLUMNS = [Column('c1', 'value1', 0),
                    Column('c2', 'value2', 0)]
-_SUPER_COLUMNS = [SuperColumn(name='sc1', columns=[Column('c4', 'value4', 0)]),
-                  SuperColumn(name='sc2', columns=[Column('c5', 'value5', 0),
-                                                   Column('c6', 'value6', 0)])]
+_SUPER_COLUMNS = [SuperColumn(name='sc1', columns=[Column(_i64(4), 'value4', 0)]),
+                  SuperColumn(name='sc2', columns=[Column(_i64(5), 'value5', 0),
+                                                   Column(_i64(6), 'value6', 0)])]
 
 def _insert_simple(block=True):
     client.insert('Table1', 'key1', ColumnPath('Standard1', column='c1'), 'value1', 0, block)
@@ -50,9 +54,9 @@
     assert L == _SIMPLE_COLUMNS, L
 
 def _insert_super():
-    client.insert('Table1', 'key1', ColumnPath('Super1', 'sc1', 'c4'), 'value4', 0, False)
-    client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', 'c5'), 'value5', 0, False)
-    client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', 'c6'), 'value6', 0, False)
+    client.insert('Table1', 'key1', ColumnPath('Super1', 'sc1', _i64(4)), 'value4', 0, False)
+    client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', _i64(5)), 'value5', 0, False)
+    client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', _i64(6)), 'value6', 0, False)
     time.sleep(0.1)
 
 def _insert_range():
@@ -75,7 +79,7 @@
 
 	 	
 def _verify_super(supercf='Super1'):
-    assert client.get_column('Table1', 'key1', ColumnPath(supercf, 'sc1', 'c4')) == Column('c4', 'value4', 0)
+    assert client.get_column('Table1', 'key1', ColumnPath(supercf, 'sc1', _i64(4))) == Column(_i64(4), 'value4', 0)
     slice = client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000)
     assert slice == _SUPER_COLUMNS, slice
 
@@ -203,35 +207,37 @@
         _insert_super()
 
         # Make sure remove clears out what it's supposed to, and _only_ that:
-        client.remove('Table1', 'key1', ColumnPathOrParent('Super1', 'sc2', 'c5'), 5, True)
-        _expect_missing(lambda: client.get_column('Table1', 'key1', ColumnPath('Super1', 'sc2', 'c5')))
+        client.remove('Table1', 'key1', ColumnPathOrParent('Super1', 'sc2', _i64(5)), 5, True)
+        _expect_missing(lambda: client.get_column('Table1', 'key1', ColumnPath('Super1', 'sc2', _i64(5))))
         assert client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000) == \
-            [SuperColumn(name='sc1', columns=[Column('c4', 'value4', 0)]),
-             SuperColumn(name='sc2', columns=[Column('c6', 'value6', 0)])]
+            [SuperColumn(name='sc1', columns=[Column(_i64(4), 'value4', 0)]),
+             SuperColumn(name='sc2', columns=[Column(_i64(6), 'value6', 0)])]
         _verify_simple()
 
         # New insert, make sure it shows up post-remove:
-        client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', 'c7'), 'value7', 0, True)
-        scs = [SuperColumn(name='sc1', columns=[Column('c4', 'value4', 0)]),
+        client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', _i64(7)), 'value7', 0, True)
+        scs = [SuperColumn(name='sc1', 
+                           columns=[Column(_i64(4), 'value4', 0)]),
                SuperColumn(name='sc2', 
-                             columns=[Column('c6', 'value6', 0), Column('c7', 'value7', 0)])]
+                           columns=[Column(_i64(6), 'value6', 0), Column(_i64(7), 'value7', 0)])]
 
-        assert client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000) == scs
+        actual = client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000)
+        assert client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000) == scs, actual
 
         # Test resurrection.  First, re-insert the value w/ older timestamp, 
         # and make sure it stays removed:
-        client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', 'c5'), 'value5', 0, True)
+        client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', _i64(5)), 'value5', 0, True)
         actual = client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000)
         assert actual == scs, actual
 
         # Next, w/ a newer timestamp; it should come back
-        client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', 'c5'), 'value5', 6, True)
+        client.insert('Table1', 'key1', ColumnPath('Super1', 'sc2', _i64(5)), 'value5', 6, True)
         actual = client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000)
         assert actual == \
-            [SuperColumn(name='sc1', columns=[Column('c4', 'value4', 0)]), 
-             SuperColumn(name='sc2', columns=[Column('c5', 'value5', 6), 
-                                              Column('c6', 'value6', 0), 
-                                              Column('c7', 'value7', 0)])], actual
+            [SuperColumn(name='sc1', columns=[Column(_i64(4), 'value4', 0)]), 
+             SuperColumn(name='sc2', columns=[Column(_i64(5), 'value5', 6), 
+                                              Column(_i64(6), 'value6', 0), 
+                                              Column(_i64(7), 'value7', 0)])], actual
 
     def test_super_cf_remove_supercolumn(self):
         _insert_simple()
@@ -239,10 +245,10 @@
 
         # Make sure remove clears out what it's supposed to, and _only_ that:
         client.remove('Table1', 'key1', ColumnPathOrParent('Super1', 'sc2'), 5, True)
-        _expect_missing(lambda: client.get_column('Table1', 'key1', ColumnPath('Super1', 'sc2', 'c5')))
-        actual = client.get_columns_since('Table1', 'key1', ColumnParent('Super1', 'sc2'), -1)
+        _expect_missing(lambda: client.get_column('Table1', 'key1', ColumnPath('Super1', 'sc2', _i64(5))))
+        actual = client.get_slice('Table1', 'key1', ColumnParent('Super1', 'sc2'), '', '', True, 1000)
         assert actual == [], actual
-        scs = [SuperColumn(name='sc1', columns=[Column('c4', 'value4', 0)])]
+        scs = [SuperColumn(name='sc1', columns=[Column(_i64(4), 'value4', 0)])]
         actual = client.get_slice_super('Table1', 'key1', 'Super1', '', '', True, 1000)
         assert actual == scs, actual
         _verify_simple()
@@ -318,6 +324,6 @@
         assert result[1].name == 'c2'
 
         _insert_super()
-        result = client.get_slice_by_names('Table1','key1', ColumnParent('Super1', 'sc1'), ['c4']) 
+        result = client.get_slice_by_names('Table1','key1', ColumnParent('Super1', 'sc1'), [_i64(4)]) 
         assert len(result) == 1
-        assert result[0].name == 'c4'
+        assert result[0].name == _i64(4)

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/Util.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/Util.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/Util.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/Util.java Tue Jul 21 01:36:52 2009
@@ -1,11 +1,28 @@
 package org.apache.cassandra;
 
+import java.nio.ByteBuffer;
+
 import org.apache.cassandra.db.Column;
+import org.apache.cassandra.db.RowMutation;
+import org.apache.cassandra.db.filter.QueryPath;
 
 public class Util
 {
     public static Column column(String name, String value, long timestamp)
     {
-        return new Column(name, value.getBytes(), timestamp);
+        return new Column(name.getBytes(), value.getBytes(), timestamp);
+    }
+
+    public static void addMutation(RowMutation rm, String columnFamilyName, String superColumnName, long columnName, String value, long timestamp)
+    {
+        rm.add(new QueryPath(columnFamilyName, superColumnName.getBytes(), getBytes(columnName)), value.getBytes(), timestamp);
+    }
+
+    public static byte[] getBytes(long v)
+    {
+        byte[] bytes = new byte[8];
+        ByteBuffer bb = ByteBuffer.wrap(bytes);
+        bb.putLong(v);
+        return bytes;
     }
 }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyStoreTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyStoreTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyStoreTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyStoreTest.java Tue Jul 21 01:36:52 2009
@@ -109,15 +109,15 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Standard1", null, "Column1"), "asdf".getBytes(), 0);
-        rm.add(new QueryPath("Standard1", null, "Column2"), "asdf".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column1".getBytes()), "asdf".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column2".getBytes()), "asdf".getBytes(), 0);
         rm.apply();
         store.forceBlockingFlush();
 
         List<SSTableReader> ssTables = table.getAllSSTablesOnDisk();
         assertEquals(1, ssTables.size());
         ssTables.get(0).forceBloomFilterFailures();
-        ColumnFamily cf = store.getColumnFamily(new IdentityQueryFilter("key2", new QueryPath("Standard1", null, "Column1")));
+        ColumnFamily cf = store.getColumnFamily(new IdentityQueryFilter("key2", new QueryPath("Standard1", null, "Column1".getBytes())));
         assertNull(cf);
     }
 }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ColumnFamilyTest.java Tue Jul 21 01:36:52 2009
@@ -38,13 +38,10 @@
     @Test
     public void testSingleColumn() throws IOException
     {
-        Random random = new Random();
-        byte[] bytes = new byte[1024];
-        random.nextBytes(bytes);
         ColumnFamily cf;
 
         cf = ColumnFamily.create("Table1", "Standard1");
-        cf.addColumn(QueryPath.column("C"), bytes, 1);
+        cf.addColumn(column("C", "v", 1));
         DataOutputBuffer bufOut = new DataOutputBuffer();
         ColumnFamily.serializer().serialize(cf, bufOut);
 
@@ -53,7 +50,7 @@
         cf = ColumnFamily.serializer().deserialize(bufIn);
         assert cf != null;
         assert cf.name().equals("Standard1");
-        assert cf.getAllColumns().size() == 1;
+        assert cf.getSortedColumns().size() == 1;
     }
 
     @Test
@@ -61,10 +58,10 @@
     {
         ColumnFamily cf;
 
-        TreeMap<String, byte[]> map = new TreeMap<String, byte[]>();
+        TreeMap<String, String> map = new TreeMap<String, String>();
         for (int i = 100; i < 1000; ++i)
         {
-            map.put(Integer.toString(i), ("Avinash Lakshman is a good man: " + i).getBytes());
+            map.put(Integer.toString(i), "Avinash Lakshman is a good man: " + i);
         }
 
         // write
@@ -72,7 +69,7 @@
         DataOutputBuffer bufOut = new DataOutputBuffer();
         for (String cName : map.navigableKeySet())
         {
-            cf.addColumn(QueryPath.column(cName), map.get(cName), 314);
+            cf.addColumn(column(cName, map.get(cName), 314));
         }
         ColumnFamily.serializer().serialize(cf, bufOut);
 
@@ -82,9 +79,9 @@
         cf = ColumnFamily.serializer().deserialize(bufIn);
         for (String cName : map.navigableKeySet())
         {
-            assert Arrays.equals(cf.getColumn(cName).value(), map.get(cName));
+            assert new String(cf.getColumn(cName.getBytes()).value()).equals(map.get(cName));
         }
-        assert new HashSet<String>(cf.getColumns().keySet()).equals(map.keySet());
+        assert cf.getColumnNames().size() == map.size();
     }
 
     @Test
@@ -97,7 +94,7 @@
         cf.addColumn(column("col1", "", 3));
 
         assert 2 == cf.getColumnCount();
-        assert 2 == cf.getAllColumns().size();
+        assert 2 == cf.getSortedColumns().size();
     }
 
     @Test
@@ -109,7 +106,7 @@
         cf.addColumn(column("col1", "val2", 2)); // same timestamp, new value
         cf.addColumn(column("col1", "val3", 1)); // older timestamp -- should be ignored
 
-        assert Arrays.equals("val2".getBytes(), cf.getColumn("col1").value());
+        assert Arrays.equals("val2".getBytes(), cf.getColumn("col1".getBytes()).value());
     }
 
     @Test
@@ -122,18 +119,18 @@
         byte val2[] = "x value ".getBytes();
 
         // exercise addColumn(QueryPath, ...)
-        cf_new.addColumn(QueryPath.column("col1"), val, 3);
-        cf_new.addColumn(QueryPath.column("col2"), val, 4);
+        cf_new.addColumn(QueryPath.column("col1".getBytes()), val, 3);
+        cf_new.addColumn(QueryPath.column("col2".getBytes()), val, 4);
 
-        cf_old.addColumn(QueryPath.column("col2"), val2, 1);
-        cf_old.addColumn(QueryPath.column("col3"), val2, 2);
+        cf_old.addColumn(QueryPath.column("col2".getBytes()), val2, 1);
+        cf_old.addColumn(QueryPath.column("col3".getBytes()), val2, 2);
 
         cf_result.addColumns(cf_new);
         cf_result.addColumns(cf_old);
 
         assert 3 == cf_result.getColumnCount() : "Count is " + cf_new.getColumnCount();
         //addcolumns will only add if timestamp >= old timestamp
-        assert Arrays.equals(val, cf_result.getColumn("col2").value());
+        assert Arrays.equals(val, cf_result.getColumn("col2".getBytes()).value());
     }
 
     @Test

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CommitLogTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CommitLogTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CommitLogTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CommitLogTest.java Tue Jul 21 01:36:52 2009
@@ -44,8 +44,8 @@
         for (int i = 0; i < 10; i++)
         {
             rm = new RowMutation("Table1", "key1");
-            rm.add(new QueryPath("Standard1", null, "Column1"), value, 0);
-            rm.add(new QueryPath("Standard2", null, "Column1"), value, 0);
+            rm.add(new QueryPath("Standard1", null, "Column1".getBytes()), value, 0);
+            rm.add(new QueryPath("Standard2", null, "Column1".getBytes()), value, 0);
             rm.apply();
         }
         assert CommitLog.getSegmentCount() > 1;

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CompactionsTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CompactionsTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CompactionsTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/CompactionsTest.java Tue Jul 21 01:36:52 2009
@@ -47,7 +47,7 @@
             for (int i = 0; i < ROWS_PER_SSTABLE; i++) {
                 String key = String.valueOf(i % 2);
                 RowMutation rm = new RowMutation("Table1", key);
-                rm.add(new QueryPath("Standard1", null, String.valueOf(i / 2)), new byte[0], j * ROWS_PER_SSTABLE + i);
+                rm.add(new QueryPath("Standard1", null, String.valueOf(i / 2).getBytes()), new byte[0], j * ROWS_PER_SSTABLE + i);
                 rm.apply();
                 inserted.add(key);
             }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/NameSortTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/NameSortTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/NameSortTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/NameSortTest.java Tue Jul 21 01:36:52 2009
@@ -22,11 +22,15 @@
 import java.util.concurrent.ExecutionException;
 import java.util.Collection;
 import java.util.Arrays;
+import java.nio.ByteBuffer;
 
 import org.junit.Test;
 
 import org.apache.cassandra.CleanupHelper;
+import static org.apache.cassandra.Util.addMutation;
 import org.apache.cassandra.db.filter.QueryPath;
+import org.apache.cassandra.db.filter.IdentityQueryFilter;
+import static junit.framework.Assert.assertEquals;
 
 public class NameSortTest extends CleanupHelper
 {
@@ -66,20 +70,20 @@
             {
                 byte[] bytes = j % 2 == 0 ? "a".getBytes() : "b".getBytes();
                 rm = new RowMutation("Table1", key);
-                rm.add(new QueryPath("Standard1", null, "Column-" + j), bytes, j);
+                rm.add(new QueryPath("Standard1", null, ("Column-" + j).getBytes()), bytes, j);
                 rm.apply();
             }
 
             // super
             for (int j = 0; j < 8; ++j)
             {
+                rm = new RowMutation("Table1", key);
                 for (int k = 0; k < 4; ++k)
                 {
-                    byte[] bytes = (j + k) % 2 == 0 ? "a".getBytes() : "b".getBytes();
-                    rm = new RowMutation("Table1", key);
-                    rm.add(new QueryPath("Super1", "SuperColumn-" + j, "Column-" + k), bytes, k);
-                    rm.apply();
+                    String value = (j + k) % 2 == 0 ? "a" : "b";
+                    addMutation(rm, "Super1", "SuperColumn-" + j, k, value, k);
                 }
+                rm.apply();
             }
         }
 
@@ -98,26 +102,26 @@
             ColumnFamily cf;
 
             cf = table.get(key, "Standard1");
-            Collection<IColumn> columns = cf.getAllColumns();
+            Collection<IColumn> columns = cf.getSortedColumns();
             for (IColumn column : columns)
             {
-                int j = Integer.valueOf(column.name().split("-")[1]);
+                int j = Integer.valueOf(new String(column.name()).split("-")[1]);
                 byte[] bytes = j % 2 == 0 ? "a".getBytes() : "b".getBytes();
                 assert Arrays.equals(bytes, column.value());
             }
 
             cf = table.get(key, "Super1");
             assert cf != null : "key " + key + " is missing!";
-            Collection<IColumn> superColumns = cf.getAllColumns();
-            assert superColumns.size() == 8;
+            Collection<IColumn> superColumns = cf.getSortedColumns();
+            assert superColumns.size() == 8 : cf;
             for (IColumn superColumn : superColumns)
             {
-                int j = Integer.valueOf(superColumn.name().split("-")[1]);
+                int j = Integer.valueOf(new String(superColumn.name()).split("-")[1]);
                 Collection<IColumn> subColumns = superColumn.getSubColumns();
                 assert subColumns.size() == 4;
                 for (IColumn subColumn : subColumns)
                 {
-                    int k = Integer.valueOf(subColumn.name().split("-")[1]);
+                    long k = ByteBuffer.wrap(subColumn.name()).getLong();
                     byte[] bytes = (j + k) % 2 == 0 ? "a".getBytes() : "b".getBytes();
                     assert Arrays.equals(bytes, subColumn.value());
                 }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/OneCompactionTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/OneCompactionTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/OneCompactionTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/OneCompactionTest.java Tue Jul 21 01:36:52 2009
@@ -41,7 +41,7 @@
         for (int j = 0; j < insertsPerTable; j++) {
             String key = "0";
             RowMutation rm = new RowMutation("Table1", key);
-            rm.add(new QueryPath(columnFamilyName, null, "0"), new byte[0], j);
+            rm.add(new QueryPath(columnFamilyName, null, "0".getBytes()), new byte[0], j);
             rm.apply();
             inserted.add(key);
             store.forceBlockingFlush();

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ReadMessageTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ReadMessageTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ReadMessageTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/ReadMessageTest.java Tue Jul 21 01:36:52 2009
@@ -24,55 +24,48 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 
+import org.apache.commons.lang.ArrayUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.cassandra.io.DataInputBuffer;
 import org.apache.cassandra.io.DataOutputBuffer;
 import org.apache.cassandra.db.filter.QueryPath;
+import org.apache.cassandra.db.marshal.AsciiType;
 
 public class ReadMessageTest
 {
     @Test
-    public void testMakeReadMessage()
+    public void testMakeReadMessage() throws IOException
     {
-        ArrayList<String> colList = new ArrayList<String>();
-        colList.add("col1");
-        colList.add("col2");
+        ArrayList<byte[]> colList = new ArrayList<byte[]>();
+        colList.add("col1".getBytes());
+        colList.add("col2".getBytes());
         
         ReadCommand rm, rm2;
         
-        rm = new SliceByNamesReadCommand("Table1", "row1", new QueryPath("foo"), colList);
+        rm = new SliceByNamesReadCommand("Table1", "row1", new QueryPath("Standard1"), colList);
         rm2 = serializeAndDeserializeReadMessage(rm);
         assert rm2.toString().equals(rm.toString());
 
-        rm = new SliceFromReadCommand("Table1", "row1", new QueryPath("foo"), "", "", true, 2);
+        rm = new SliceFromReadCommand("Table1", "row1", new QueryPath("Standard1"), ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.EMPTY_BYTE_ARRAY, true, 2);
         rm2 = serializeAndDeserializeReadMessage(rm);
         assert rm2.toString().equals(rm.toString());
         
-        rm = new SliceFromReadCommand("Table1", "row1", new QueryPath("foo"), "a", "z", true, 5);
+        rm = new SliceFromReadCommand("Table1", "row1", new QueryPath("Standard1"), "a".getBytes(), "z".getBytes(), true, 5);
         rm2 = serializeAndDeserializeReadMessage(rm);
         assertEquals(rm2.toString(), rm.toString());
     }
 
-    private ReadCommand serializeAndDeserializeReadMessage(ReadCommand rm)
+    private ReadCommand serializeAndDeserializeReadMessage(ReadCommand rm) throws IOException
     {
-        ReadCommand rm2 = null;
         ReadCommandSerializer rms = ReadCommand.serializer();
         DataOutputBuffer dos = new DataOutputBuffer();
         DataInputBuffer dis = new DataInputBuffer();
 
-        try
-        {
-            rms.serialize(rm, dos);
-            dis.reset(dos.getData(), dos.getLength());
-            rm2 = rms.deserialize(dis);
-        }
-        catch (IOException e)
-        {
-            throw new RuntimeException(e);
-        }
-        return rm2;
+        rms.serialize(rm, dos);
+        dis.reset(dos.getData(), dos.getLength());
+        return rms.deserialize(dis);
     }
     
     @Test
@@ -83,13 +76,13 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Standard1", null, "Column1"), "abcd".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column1".getBytes()), "abcd".getBytes(), 0);
         rm.apply();
 
-        ReadCommand command = new SliceByNamesReadCommand("Table1", "key1", new QueryPath("Standard1"), Arrays.asList("Column1"));
+        ReadCommand command = new SliceByNamesReadCommand("Table1", "key1", new QueryPath("Standard1"), Arrays.asList("Column1".getBytes()));
         Row row = command.getRow(table);
         ColumnFamily cf = row.getColumnFamily("Standard1");
-        IColumn col = cf.getColumn("Column1");
-        assert Arrays.equals(((Column)col).value(), "abcd".getBytes());  
+        IColumn col = cf.getColumn("Column1".getBytes());
+        assert Arrays.equals(col.value(), "abcd".getBytes());  
     }
 }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyTest.java Tue Jul 21 01:36:52 2009
@@ -38,7 +38,7 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Standard1", null, "Column1"), "asdf".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column1".getBytes()), "asdf".getBytes(), 0);
         rm.apply();
 
         // remove
@@ -46,9 +46,9 @@
         rm.delete(new QueryPath("Standard1"), 1);
         rm.apply();
 
-        ColumnFamily retrieved = store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Standard1", null, "Column1")));
+        ColumnFamily retrieved = store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Standard1", null, "Column1".getBytes())));
         assert retrieved.isMarkedForDelete();
-        assertNull(retrieved.getColumn("Column1"));
+        assertNull(retrieved.getColumn("Column1".getBytes()));
         assertNull(ColumnFamilyStore.removeDeleted(retrieved, Integer.MAX_VALUE));
     }
 }
\ No newline at end of file

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush1Test.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush1Test.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush1Test.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush1Test.java Tue Jul 21 01:36:52 2009
@@ -38,8 +38,8 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Standard1", null, "Column1"), "asdf".getBytes(), 0);
-        rm.add(new QueryPath("Standard1", null, "Column2"), "asdf".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column1".getBytes()), "asdf".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column2".getBytes()), "asdf".getBytes(), 0);
         rm.apply();
         store.forceBlockingFlush();
 
@@ -50,7 +50,7 @@
 
         ColumnFamily retrieved = store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Standard1")));
         assert retrieved.isMarkedForDelete();
-        assertNull(retrieved.getColumn("Column1"));
+        assertNull(retrieved.getColumn("Column1".getBytes()));
         assertNull(ColumnFamilyStore.removeDeleted(retrieved, Integer.MAX_VALUE));
     }
 }
\ No newline at end of file

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush2Test.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush2Test.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush2Test.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnFamilyWithFlush2Test.java Tue Jul 21 01:36:52 2009
@@ -38,7 +38,7 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Standard1", null, "Column1"), "asdf".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column1".getBytes()), "asdf".getBytes(), 0);
         rm.apply();
         // remove
         rm = new RowMutation("Table1", "key1");
@@ -46,9 +46,9 @@
         rm.apply();
         store.forceBlockingFlush();
 
-        ColumnFamily retrieved = store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Standard1", null, "Column1")));
+        ColumnFamily retrieved = store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Standard1", null, "Column1".getBytes())));
         assert retrieved.isMarkedForDelete();
-        assertNull(retrieved.getColumn("Column1"));
+        assertNull(retrieved.getColumn("Column1".getBytes()));
         assertNull(ColumnFamilyStore.removeDeleted(retrieved, Integer.MAX_VALUE));
     }
 }
\ No newline at end of file

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveColumnTest.java Tue Jul 21 01:36:52 2009
@@ -39,17 +39,17 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Standard1", null, "Column1"), "asdf".getBytes(), 0);
+        rm.add(new QueryPath("Standard1", null, "Column1".getBytes()), "asdf".getBytes(), 0);
         rm.apply();
         store.forceBlockingFlush();
 
         // remove
         rm = new RowMutation("Table1", "key1");
-        rm.delete(new QueryPath("Standard1", null, "Column1"), 1);
+        rm.delete(new QueryPath("Standard1", null, "Column1".getBytes()), 1);
         rm.apply();
 
-        ColumnFamily retrieved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Standard1"), "Column1"));
-        assert retrieved.getColumn("Column1").isMarkedForDelete();
+        ColumnFamily retrieved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Standard1"), "Column1".getBytes()));
+        assert retrieved.getColumn("Column1".getBytes()).isMarkedForDelete();
         assertNull(ColumnFamilyStore.removeDeleted(retrieved, Integer.MAX_VALUE));
         assertNull(ColumnFamilyStore.removeDeleted(store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Standard1"))), Integer.MAX_VALUE));
     }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSubColumnTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSubColumnTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSubColumnTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSubColumnTest.java Tue Jul 21 01:36:52 2009
@@ -26,6 +26,8 @@
 import static junit.framework.Assert.assertNull;
 import org.apache.cassandra.db.filter.IdentityQueryFilter;
 import org.apache.cassandra.db.filter.QueryPath;
+import static org.apache.cassandra.Util.addMutation;
+import static org.apache.cassandra.Util.getBytes;
 
 public class RemoveSubColumnTest
 {
@@ -38,17 +40,17 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Super1", "SC1", "Column1"), "asdf".getBytes(), 0);
+        addMutation(rm, "Super1", "SC1", 1, "asdf", 0);
         rm.apply();
         store.forceBlockingFlush();
 
         // remove
         rm = new RowMutation("Table1", "key1");
-        rm.delete(new QueryPath("Super1", "SC1", "Column1"), 1);
+        rm.delete(new QueryPath("Super1", "SC1".getBytes(), getBytes(1)), 1);
         rm.apply();
 
-        ColumnFamily retrieved = store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Super1", "SC1")));
-        assert retrieved.getColumn("SC1").getSubColumn("Column1").isMarkedForDelete();
+        ColumnFamily retrieved = store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Super1", "SC1".getBytes())));
+        assert retrieved.getColumn("SC1".getBytes()).getSubColumn(getBytes(1)).isMarkedForDelete();
         assertNull(ColumnFamilyStore.removeDeleted(retrieved, Integer.MAX_VALUE));
     }
 }
\ No newline at end of file

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSuperColumnTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSuperColumnTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSuperColumnTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RemoveSuperColumnTest.java Tue Jul 21 01:36:52 2009
@@ -21,10 +21,7 @@
 import java.io.IOException;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
-import java.util.List;
 import java.util.Collection;
-import java.util.Arrays;
-import java.util.TreeSet;
 
 import org.junit.Test;
 import static org.junit.Assert.assertNull;
@@ -33,6 +30,8 @@
 import org.apache.cassandra.db.filter.IdentityQueryFilter;
 import org.apache.cassandra.db.filter.NamesQueryFilter;
 import org.apache.cassandra.db.filter.QueryPath;
+import static org.apache.cassandra.Util.addMutation;
+import static org.apache.cassandra.Util.getBytes;
 
 public class RemoveSuperColumnTest
 {
@@ -44,13 +43,13 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Super1", "SC1", "Column1"), "asdf".getBytes(), 0);
+        addMutation(rm, "Super1", "SC1", 1, "val1", 0);
         rm.apply();
         store.forceBlockingFlush();
 
         // remove
         rm = new RowMutation("Table1", "key1");
-        rm.delete(new QueryPath("Super1", "SC1"), 1);
+        rm.delete(new QueryPath("Super1", "SC1".getBytes()), 1);
         rm.apply();
 
         validateRemoveTwoSources();
@@ -67,9 +66,9 @@
     private void validateRemoveTwoSources() throws IOException
     {
         ColumnFamilyStore store = Table.open("Table1").getColumnFamilyStore("Super1");
-        ColumnFamily resolved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Super1"), "SC1"));
-        assert resolved.getAllColumns().first().getMarkedForDeleteAt() == 1;
-        assert resolved.getAllColumns().first().getSubColumns().size() == 0;
+        ColumnFamily resolved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Super1"), "SC1".getBytes()));
+        assert resolved.getSortedColumns().iterator().next().getMarkedForDeleteAt() == 1;
+        assert resolved.getSortedColumns().iterator().next().getSubColumns().size() == 0;
         assertNull(ColumnFamilyStore.removeDeleted(resolved, Integer.MAX_VALUE));
         assertNull(ColumnFamilyStore.removeDeleted(store.getColumnFamily(new IdentityQueryFilter("key1", new QueryPath("Super1"))), Integer.MAX_VALUE));
     }
@@ -77,9 +76,9 @@
     private void validateRemoveCompacted() throws IOException
     {
         ColumnFamilyStore store = Table.open("Table1").getColumnFamilyStore("Super1");
-        ColumnFamily resolved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Super1"), "SC1"));
-        assert resolved.getAllColumns().first().getMarkedForDeleteAt() == 1;
-        Collection<IColumn> subColumns = resolved.getAllColumns().first().getSubColumns();
+        ColumnFamily resolved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Super1"), "SC1".getBytes()));
+        assert resolved.getSortedColumns().iterator().next().getMarkedForDeleteAt() == 1;
+        Collection<IColumn> subColumns = resolved.getSortedColumns().iterator().next().getSubColumns();
         assert subColumns.size() == 0;
     }
 
@@ -91,18 +90,18 @@
 
         // add data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Super2", "SC1", "Column1"), "asdf".getBytes(), 0);
+        addMutation(rm, "Super2", "SC1", 1, "val1", 0);
         rm.apply();
         store.forceBlockingFlush();
 
         // remove
         rm = new RowMutation("Table1", "key1");
-        rm.delete(new QueryPath("Super2", "SC1"), 1);
+        rm.delete(new QueryPath("Super2", "SC1".getBytes()), 1);
         rm.apply();
 
         // new data
         rm = new RowMutation("Table1", "key1");
-        rm.add(new QueryPath("Super2", "SC1", "Column2"), "asdf".getBytes(), 2);
+        addMutation(rm, "Super2", "SC1", 2, "val2", 2);
         rm.apply();
 
         validateRemoveWithNewData();
@@ -119,14 +118,10 @@
     private void validateRemoveWithNewData() throws IOException
     {
         ColumnFamilyStore store = Table.open("Table1").getColumnFamilyStore("Super2");
-        ColumnFamily resolved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Super2", "SC1"), "Column2"));
-        validateNewDataFamily(resolved);
-    }
-
-    private void validateNewDataFamily(ColumnFamily resolved)
-    {
-        Collection<IColumn> subColumns = resolved.getAllColumns().first().getSubColumns();
+        ColumnFamily resolved = store.getColumnFamily(new NamesQueryFilter("key1", new QueryPath("Super2", "SC1".getBytes()), getBytes(2)));
+        Collection<IColumn> subColumns = resolved.getSortedColumns().iterator().next().getSubColumns();
         assert subColumns.size() == 1;
         assert subColumns.iterator().next().timestamp() == 2;
     }
+
 }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RowTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RowTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RowTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/RowTest.java Tue Jul 21 01:36:52 2009
@@ -38,17 +38,17 @@
         cf2.delete(0, 0);
 
         ColumnFamily cfDiff = cf1.diff(cf2);
-        assertEquals(cfDiff.getColumns().size(), 0);
+        assertEquals(cfDiff.getColumnsMap().size(), 0);
         assertEquals(cfDiff.getMarkedForDeleteAt(), 0);
     }
 
     @Test
     public void testDiffSuperColumn()
     {
-        SuperColumn sc1 = new SuperColumn("one");
+        SuperColumn sc1 = new SuperColumn("one".getBytes());
         sc1.addColumn(column("subcolumn", "A", 0));
 
-        SuperColumn sc2 = new SuperColumn("one");
+        SuperColumn sc2 = new SuperColumn("one".getBytes());
         sc2.markForDeleteAt(0, 0);
 
         SuperColumn scDiff = (SuperColumn)sc1.diff(sc2);
@@ -68,15 +68,15 @@
         ColumnFamily cf2 = ColumnFamily.create("Table1", "Standard1");
         cf2.addColumn(column("one", "B", 1));
         cf2.addColumn(column("two", "C", 1));
-        ColumnFamily cf3 = ColumnFamily.create("Table2", "Standard2");
+        ColumnFamily cf3 = ColumnFamily.create("Table2", "Standard3");
         cf3.addColumn(column("three", "D", 1));
         row2.addColumnFamily(cf2);
         row2.addColumnFamily(cf3);
 
         row1.repair(row2);
         cf1 = row1.getColumnFamily("Standard1");
-        assert Arrays.equals(cf1.getColumn("one").value(), "B".getBytes());
-        assert Arrays.equals(cf2.getColumn("two").value(), "C".getBytes());
-        assert row1.getColumnFamily("Standard2") != null;
+        assert Arrays.equals(cf1.getColumn("one".getBytes()).value(), "B".getBytes());
+        assert Arrays.equals(cf2.getColumn("two".getBytes()).value(), "C".getBytes());
+        assert row1.getColumnFamily("Standard3") != null;
     }
 }

Modified: incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/SuperColumnTest.java
URL: http://svn.apache.org/viewvc/incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/SuperColumnTest.java?rev=796108&r1=796107&r2=796108&view=diff
==============================================================================
--- incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/SuperColumnTest.java (original)
+++ incubator/cassandra/trunk/test/unit/org/apache/cassandra/db/SuperColumnTest.java Tue Jul 21 01:36:52 2009
@@ -23,14 +23,15 @@
 import static junit.framework.Assert.assertNotNull;
 import static junit.framework.Assert.assertNull;
 import static org.apache.cassandra.Util.column;
+import static org.apache.cassandra.Util.getBytes;
 
 public class SuperColumnTest
 {   
     @Test
     public void testMissingSubcolumn() {
-    	SuperColumn sc = new SuperColumn("sc1");
-    	sc.addColumn(column("col1","sample value",1L));
-    	assertNotNull(sc.getSubColumn("col1"));
-    	assertNull(sc.getSubColumn("col2"));
+    	SuperColumn sc = new SuperColumn("sc1".getBytes());
+    	sc.addColumn(new Column(getBytes(1), "value".getBytes(), 1));
+    	assertNotNull(sc.getSubColumn(getBytes(1)));
+    	assertNull(sc.getSubColumn(getBytes(2)));
     }
 }



Mime
View raw message