cassandra-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From slebre...@apache.org
Subject [01/11] cassandra git commit: Remove pre-3.0 compatibility code for 4.0
Date Wed, 30 Nov 2016 09:49:54 GMT
Repository: cassandra
Updated Branches:
  refs/heads/trunk 3fabc3350 -> 4a2464192


http://git-wip-us.apache.org/repos/asf/cassandra/blob/4a246419/test/unit/org/apache/cassandra/schema/LegacySchemaMigratorTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/schema/LegacySchemaMigratorTest.java b/test/unit/org/apache/cassandra/schema/LegacySchemaMigratorTest.java
deleted file mode 100644
index 239a90d..0000000
--- a/test/unit/org/apache/cassandra/schema/LegacySchemaMigratorTest.java
+++ /dev/null
@@ -1,845 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.cassandra.schema;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.*;
-import java.util.stream.Collectors;
-
-import com.google.common.collect.ImmutableList;
-import org.junit.Test;
-
-import org.apache.cassandra.SchemaLoader;
-import org.apache.cassandra.config.CFMetaData;
-import org.apache.cassandra.config.ColumnDefinition;
-import org.apache.cassandra.config.Schema;
-import org.apache.cassandra.config.SchemaConstants;
-import org.apache.cassandra.cql3.CQLTester;
-import org.apache.cassandra.cql3.ColumnIdentifier;
-import org.apache.cassandra.cql3.FieldIdentifier;
-import org.apache.cassandra.cql3.functions.*;
-import org.apache.cassandra.db.*;
-import org.apache.cassandra.db.rows.Row;
-import org.apache.cassandra.db.marshal.*;
-import org.apache.cassandra.index.TargetParser;
-import org.apache.cassandra.thrift.ThriftConversion;
-import org.apache.cassandra.utils.*;
-
-import static java.lang.String.format;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertFalse;
-import static junit.framework.Assert.assertTrue;
-import static org.apache.cassandra.cql3.QueryProcessor.executeOnceInternal;
-import static org.apache.cassandra.utils.ByteBufferUtil.bytes;
-import static org.apache.cassandra.utils.FBUtilities.json;
-
-@SuppressWarnings("deprecation")
-public class LegacySchemaMigratorTest
-{
-    private static final long TIMESTAMP = 1435908994000000L;
-
-    private static final String KEYSPACE_PREFIX = "LegacySchemaMigratorTest";
-
-    /*
-     * 1. Write a variety of different keyspaces/tables/types/function in the legacy manner, using legacy schema tables
-     * 2. Run the migrator
-     * 3. Read all the keyspaces from the new schema tables
-     * 4. Make sure that we've read *exactly* the same set of keyspaces/tables/types/functions
-     * 5. Validate that the legacy schema tables are now empty
-     */
-    @Test
-    public void testMigrate() throws IOException
-    {
-        CQLTester.cleanupAndLeaveDirs();
-
-        Keyspaces expected = keyspacesToMigrate();
-
-        // write the keyspaces into the legacy tables
-        expected.forEach(LegacySchemaMigratorTest::legacySerializeKeyspace);
-
-        // run the migration
-        LegacySchemaMigrator.migrate();
-
-        // read back all the metadata from the new schema tables
-        Keyspaces actual = SchemaKeyspace.fetchNonSystemKeyspaces();
-
-        // need to load back CFMetaData of those tables (CFS instances will still be loaded)
-        loadLegacySchemaTables();
-
-        // verify that nothing's left in the old schema tables
-        for (CFMetaData table : LegacySchemaMigrator.LegacySchemaTables)
-        {
-            String query = format("SELECT * FROM %s.%s", SchemaConstants.SYSTEM_KEYSPACE_NAME, table.cfName);
-            //noinspection ConstantConditions
-            assertTrue(executeOnceInternal(query).isEmpty());
-        }
-
-        // make sure that we've read *exactly* the same set of keyspaces/tables/types/functions
-        assertEquals(expected.diff(actual).toString(), expected, actual);
-
-        // check that the build status of all indexes has been updated to use the new
-        // format of index name: the index_name column of system.IndexInfo used to
-        // contain table_name.index_name. Now it should contain just the index_name.
-        expected.forEach(LegacySchemaMigratorTest::verifyIndexBuildStatus);
-    }
-
-    private static FieldIdentifier field(String field)
-    {
-        return FieldIdentifier.forQuoted(field);
-    }
-
-    private static void loadLegacySchemaTables()
-    {
-        KeyspaceMetadata systemKeyspace = Schema.instance.getKSMetaData(SchemaConstants.SYSTEM_KEYSPACE_NAME);
-
-        Tables systemTables = systemKeyspace.tables;
-        for (CFMetaData table : LegacySchemaMigrator.LegacySchemaTables)
-            systemTables = systemTables.with(table);
-
-        LegacySchemaMigrator.LegacySchemaTables.forEach(Schema.instance::load);
-
-        Schema.instance.setKeyspaceMetadata(systemKeyspace.withSwapped(systemTables));
-    }
-
-    private static Keyspaces keyspacesToMigrate()
-    {
-        Keyspaces.Builder keyspaces = Keyspaces.builder();
-
-        // A whole bucket of shorthand
-        String ks1 = KEYSPACE_PREFIX + "Keyspace1";
-        String ks2 = KEYSPACE_PREFIX + "Keyspace2";
-        String ks3 = KEYSPACE_PREFIX + "Keyspace3";
-        String ks4 = KEYSPACE_PREFIX + "Keyspace4";
-        String ks5 = KEYSPACE_PREFIX + "Keyspace5";
-        String ks6 = KEYSPACE_PREFIX + "Keyspace6";
-        String ks_rcs = KEYSPACE_PREFIX + "RowCacheSpace";
-        String ks_nocommit = KEYSPACE_PREFIX + "NoCommitlogSpace";
-        String ks_prsi = KEYSPACE_PREFIX + "PerRowSecondaryIndex";
-        String ks_cql = KEYSPACE_PREFIX + "cql_keyspace";
-
-        // Make it easy to test compaction
-        Map<String, String> compactionOptions = new HashMap<>();
-        compactionOptions.put("tombstone_compaction_interval", "1");
-
-        Map<String, String> leveledOptions = new HashMap<>();
-        leveledOptions.put("sstable_size_in_mb", "1");
-
-        keyspaces.add(KeyspaceMetadata.create(ks1,
-                                              KeyspaceParams.simple(1),
-                                              Tables.of(SchemaLoader.standardCFMD(ks1, "Standard1")
-                                                                    .compaction(CompactionParams.scts(compactionOptions)),
-                                                        SchemaLoader.standardCFMD(ks1, "StandardGCGS0").gcGraceSeconds(0),
-                                                        SchemaLoader.standardCFMD(ks1, "StandardLong1"),
-                                                        SchemaLoader.superCFMD(ks1, "Super1", LongType.instance),
-                                                        SchemaLoader.superCFMD(ks1, "Super2", UTF8Type.instance),
-                                                        SchemaLoader.superCFMD(ks1, "Super5", BytesType.instance),
-                                                        SchemaLoader.superCFMD(ks1, "Super6", LexicalUUIDType.instance, UTF8Type.instance),
-                                                        SchemaLoader.keysIndexCFMD(ks1, "Indexed1", true),
-                                                        SchemaLoader.keysIndexCFMD(ks1, "Indexed2", false),
-                                                        SchemaLoader.superCFMD(ks1, "SuperDirectGC", BytesType.instance)
-                                                                    .gcGraceSeconds(0),
-                                                        SchemaLoader.jdbcCFMD(ks1, "JdbcUtf8", UTF8Type.instance)
-                                                                    .addColumnDefinition(SchemaLoader.utf8Column(ks1, "JdbcUtf8")),
-                                                        SchemaLoader.jdbcCFMD(ks1, "JdbcLong", LongType.instance),
-                                                        SchemaLoader.jdbcCFMD(ks1, "JdbcBytes", BytesType.instance),
-                                                        SchemaLoader.jdbcCFMD(ks1, "JdbcAscii", AsciiType.instance),
-                                                        SchemaLoader.standardCFMD(ks1, "StandardLeveled")
-                                                                    .compaction(CompactionParams.lcs(leveledOptions)),
-                                                        SchemaLoader.standardCFMD(ks1, "legacyleveled")
-                                                                    .compaction(CompactionParams.lcs(leveledOptions)),
-                                                        SchemaLoader.standardCFMD(ks1, "StandardLowIndexInterval")
-                                                                    .minIndexInterval(8)
-                                                                    .maxIndexInterval(256)
-                                                                    .caching(CachingParams.CACHE_NOTHING))));
-
-        // Keyspace 2
-        keyspaces.add(KeyspaceMetadata.create(ks2,
-                                              KeyspaceParams.simple(1),
-                                              Tables.of(SchemaLoader.standardCFMD(ks2, "Standard1"),
-                                                        SchemaLoader.superCFMD(ks2, "Super3", BytesType.instance),
-                                                        SchemaLoader.superCFMD(ks2, "Super4", TimeUUIDType.instance),
-                                                        SchemaLoader.keysIndexCFMD(ks2, "Indexed1", true),
-                                                        SchemaLoader.compositeIndexCFMD(ks2, "Indexed2", true),
-                                                        SchemaLoader.compositeIndexCFMD(ks2, "Indexed3", true)
-                                                                    .gcGraceSeconds(0))));
-
-        // Keyspace 3
-        keyspaces.add(KeyspaceMetadata.create(ks3,
-                                              KeyspaceParams.simple(5),
-                                              Tables.of(SchemaLoader.standardCFMD(ks3, "Standard1"),
-                                                        SchemaLoader.keysIndexCFMD(ks3, "Indexed1", true))));
-
-        // Keyspace 4
-        keyspaces.add(KeyspaceMetadata.create(ks4,
-                                              KeyspaceParams.simple(3),
-                                              Tables.of(SchemaLoader.standardCFMD(ks4, "Standard1"),
-                                                        SchemaLoader.superCFMD(ks4, "Super3", BytesType.instance),
-                                                        SchemaLoader.superCFMD(ks4, "Super4", TimeUUIDType.instance),
-                                                        SchemaLoader.superCFMD(ks4, "Super5", TimeUUIDType.instance, BytesType.instance))));
-
-        // Keyspace 5
-        keyspaces.add(KeyspaceMetadata.create(ks5,
-                                              KeyspaceParams.simple(2),
-                                              Tables.of(SchemaLoader.standardCFMD(ks5, "Standard1"))));
-
-        // Keyspace 6
-        keyspaces.add(KeyspaceMetadata.create(ks6,
-                                              KeyspaceParams.simple(1),
-                                              Tables.of(SchemaLoader.keysIndexCFMD(ks6, "Indexed1", true))));
-
-        // RowCacheSpace
-        keyspaces.add(KeyspaceMetadata.create(ks_rcs,
-                                              KeyspaceParams.simple(1),
-                                              Tables.of(SchemaLoader.standardCFMD(ks_rcs, "CFWithoutCache")
-                                                                    .caching(CachingParams.CACHE_NOTHING),
-                                                        SchemaLoader.standardCFMD(ks_rcs, "CachedCF")
-                                                                    .caching(CachingParams.CACHE_EVERYTHING),
-                                                        SchemaLoader.standardCFMD(ks_rcs, "CachedIntCF")
-                                                                    .caching(new CachingParams(true, 100)))));
-
-        keyspaces.add(KeyspaceMetadata.create(ks_nocommit,
-                                              KeyspaceParams.simpleTransient(1),
-                                              Tables.of(SchemaLoader.standardCFMD(ks_nocommit, "Standard1"))));
-
-        // PerRowSecondaryIndexTest
-        keyspaces.add(KeyspaceMetadata.create(ks_prsi,
-                                              KeyspaceParams.simple(1),
-                                              Tables.of(SchemaLoader.perRowIndexedCFMD(ks_prsi, "Indexed1"))));
-
-        // CQLKeyspace
-        keyspaces.add(KeyspaceMetadata.create(ks_cql,
-                                              KeyspaceParams.simple(1),
-                                              Tables.of(CFMetaData.compile("CREATE TABLE table1 ("
-                                                                           + "k int PRIMARY KEY,"
-                                                                           + "v1 text,"
-                                                                           + "v2 int"
-                                                                           + ')', ks_cql),
-
-                                                        CFMetaData.compile("CREATE TABLE table2 ("
-                                                                           + "k text,"
-                                                                           + "c text,"
-                                                                           + "v text,"
-                                                                           + "PRIMARY KEY (k, c))", ks_cql),
-
-                                                        CFMetaData.compile("CREATE TABLE foo ("
-                                                                           + "bar text, "
-                                                                           + "baz text, "
-                                                                           + "qux text, "
-                                                                           + "PRIMARY KEY(bar, baz) ) "
-                                                                           + "WITH COMPACT STORAGE", ks_cql),
-
-                                                        CFMetaData.compile("CREATE TABLE compact_pkonly ("
-                                                                           + "k int, "
-                                                                           + "c int, "
-                                                                           + "PRIMARY KEY (k, c)) "
-                                                                           + "WITH COMPACT STORAGE",
-                                                                           ks_cql),
-
-                                                        CFMetaData.compile("CREATE TABLE foofoo ("
-                                                                           + "bar text, "
-                                                                           + "baz text, "
-                                                                           + "qux text, "
-                                                                           + "quz text, "
-                                                                           + "foo text, "
-                                                                           + "PRIMARY KEY((bar, baz), qux, quz) ) "
-                                                                           + "WITH COMPACT STORAGE", ks_cql))));
-
-        // NTS keyspace
-        keyspaces.add(KeyspaceMetadata.create("nts", KeyspaceParams.nts("dc1", 1, "dc2", 2)));
-
-        keyspaces.add(keyspaceWithDroppedCollections());
-        keyspaces.add(keyspaceWithTriggers());
-        keyspaces.add(keyspaceWithUDTs());
-        keyspaces.add(keyspaceWithUDFs());
-        keyspaces.add(keyspaceWithUDFsAndUDTs());
-        keyspaces.add(keyspaceWithUDAs());
-        keyspaces.add(keyspaceWithUDAsAndUDTs());
-
-        return keyspaces.build();
-    }
-
-    private static KeyspaceMetadata keyspaceWithDroppedCollections()
-    {
-        String keyspace = KEYSPACE_PREFIX + "DroppedCollections";
-
-        CFMetaData table =
-            CFMetaData.compile("CREATE TABLE dropped_columns ("
-                               + "foo text,"
-                               + "bar text,"
-                               + "map1 map<text, text>,"
-                               + "map2 map<int, int>,"
-                               + "set1 set<ascii>,"
-                               + "list1 list<blob>,"
-                               + "PRIMARY KEY ((foo), bar))",
-                               keyspace);
-
-        String[] collectionColumnNames = { "map1", "map2", "set1", "list1" };
-        for (String name : collectionColumnNames)
-        {
-            ColumnDefinition column = table.getColumnDefinition(bytes(name));
-            table.recordColumnDrop(column, FBUtilities.timestampMicros());
-            table.removeColumnDefinition(column);
-        }
-
-        return KeyspaceMetadata.create(keyspace, KeyspaceParams.simple(1), Tables.of(table));
-    }
-
-    private static KeyspaceMetadata keyspaceWithTriggers()
-    {
-        String keyspace = KEYSPACE_PREFIX + "Triggers";
-
-        Triggers.Builder triggers = Triggers.builder();
-        CFMetaData table = SchemaLoader.standardCFMD(keyspace, "WithTriggers");
-        for (int i = 0; i < 10; i++)
-            triggers.add(new TriggerMetadata("trigger" + i, "DummyTrigger" + i));
-        table.triggers(triggers.build());
-
-        return KeyspaceMetadata.create(keyspace, KeyspaceParams.simple(1), Tables.of(table));
-    }
-
-    private static KeyspaceMetadata keyspaceWithUDTs()
-    {
-        String keyspace = KEYSPACE_PREFIX + "UDTs";
-
-        UserType udt1 = new UserType(keyspace,
-                                     bytes("udt1"),
-                                     new ArrayList<FieldIdentifier>() {{ add(field("col1")); add(field("col2")); }},
-                                     new ArrayList<AbstractType<?>>() {{ add(UTF8Type.instance); add(Int32Type.instance); }},
-                                     true);
-
-        UserType udt2 = new UserType(keyspace,
-                                     bytes("udt2"),
-                                     new ArrayList<FieldIdentifier>() {{ add(field("col3")); add(field("col4")); }},
-                                     new ArrayList<AbstractType<?>>() {{ add(BytesType.instance); add(BooleanType.instance); }},
-                                     true);
-
-        UserType udt3 = new UserType(keyspace,
-                                     bytes("udt3"),
-                                     new ArrayList<FieldIdentifier>() {{ add(field("col5")); }},
-                                     new ArrayList<AbstractType<?>>() {{ add(AsciiType.instance); }},
-                                     true);
-
-        return KeyspaceMetadata.create(keyspace,
-                                       KeyspaceParams.simple(1),
-                                       Tables.none(),
-                                       Views.none(),
-                                       Types.of(udt1, udt2, udt3),
-                                       Functions.none());
-    }
-
-    private static KeyspaceMetadata keyspaceWithUDFs()
-    {
-        String keyspace = KEYSPACE_PREFIX + "UDFs";
-
-        UDFunction udf1 = UDFunction.create(new FunctionName(keyspace, "udf"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false), new ColumnIdentifier("col2", false)),
-                                            ImmutableList.of(BytesType.instance, Int32Type.instance),
-                                            LongType.instance,
-                                            false,
-                                            "java",
-                                            "return 42L;");
-
-        // an overload with the same name, not a typo
-        UDFunction udf2 = UDFunction.create(new FunctionName(keyspace, "udf"),
-                                            ImmutableList.of(new ColumnIdentifier("col3", false), new ColumnIdentifier("col4", false)),
-                                            ImmutableList.of(AsciiType.instance, LongType.instance),
-                                            Int32Type.instance,
-                                            true,
-                                            "java",
-                                            "return 42;");
-
-        UDFunction udf3 = UDFunction.create(new FunctionName(keyspace, "udf3"),
-                                            ImmutableList.of(new ColumnIdentifier("col4", false)),
-                                            ImmutableList.of(UTF8Type.instance),
-                                            BooleanType.instance,
-                                            false,
-                                            "java",
-                                            "return true;");
-
-        return KeyspaceMetadata.create(keyspace,
-                                       KeyspaceParams.simple(1),
-                                       Tables.none(),
-                                       Views.none(),
-                                       Types.none(),
-                                       Functions.of(udf1, udf2, udf3));
-    }
-
-    private static KeyspaceMetadata keyspaceWithUDAs()
-    {
-        String keyspace = KEYSPACE_PREFIX + "UDAs";
-
-        UDFunction udf1 = UDFunction.create(new FunctionName(keyspace, "udf1"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false), new ColumnIdentifier("col2", false)),
-                                            ImmutableList.of(Int32Type.instance, Int32Type.instance),
-                                            Int32Type.instance,
-                                            false,
-                                            "java",
-                                            "return 42;");
-
-        UDFunction udf2 = UDFunction.create(new FunctionName(keyspace, "udf2"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false), new ColumnIdentifier("col2", false)),
-                                            ImmutableList.of(LongType.instance, Int32Type.instance),
-                                            LongType.instance,
-                                            false,
-                                            "java",
-                                            "return 42L;");
-
-        UDFunction udf3 = UDFunction.create(new FunctionName(keyspace, "udf3"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false)),
-                                            ImmutableList.of(LongType.instance),
-                                            DoubleType.instance,
-                                            false,
-                                            "java",
-                                            "return 42d;");
-
-        Functions udfs = Functions.builder().add(udf1).add(udf2).add(udf3).build();
-
-        UDAggregate uda1 = UDAggregate.create(udfs, new FunctionName(keyspace, "uda1"),
-                                              ImmutableList.of(udf1.argTypes().get(1)),
-                                              udf1.returnType(),
-                                              udf1.name(),
-                                              null,
-                                              udf1.argTypes().get(0),
-                                              null
-        );
-
-        UDAggregate uda2 = UDAggregate.create(udfs, new FunctionName(keyspace, "uda2"),
-                                              ImmutableList.of(udf2.argTypes().get(1)),
-                                              udf3.returnType(),
-                                              udf2.name(),
-                                              udf3.name(),
-                                              udf2.argTypes().get(0),
-                                              LongType.instance.decompose(0L)
-        );
-
-        return KeyspaceMetadata.create(keyspace,
-                                       KeyspaceParams.simple(1),
-                                       Tables.none(),
-                                       Views.none(),
-                                       Types.none(),
-                                       Functions.of(udf1, udf2, udf3, uda1, uda2));
-    }
-
-    private static KeyspaceMetadata keyspaceWithUDFsAndUDTs()
-    {
-        String keyspace = KEYSPACE_PREFIX + "UDFUDTs";
-
-        UserType udt1 = new UserType(keyspace,
-                                     bytes("udt1"),
-                                     new ArrayList<FieldIdentifier>() {{ add(field("col1")); add(field("col2")); }},
-                                     new ArrayList<AbstractType<?>>() {{ add(UTF8Type.instance); add(Int32Type.instance); }},
-                                     true);
-
-        UserType udt2 = new UserType(keyspace,
-                                     bytes("udt2"),
-                                     new ArrayList<FieldIdentifier>() {{ add(field("col1")); add(field("col2")); }},
-                                     new ArrayList<AbstractType<?>>() {{ add(ListType.getInstance(udt1, false)); add(Int32Type.instance); }},
-                                     true);
-
-        UDFunction udf1 = UDFunction.create(new FunctionName(keyspace, "udf"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false), new ColumnIdentifier("col2", false)),
-                                            ImmutableList.of(udt1, udt2),
-                                            LongType.instance,
-                                            false,
-                                            "java",
-                                            "return 42L;");
-
-        // an overload with the same name, not a typo
-        UDFunction udf2 = UDFunction.create(new FunctionName(keyspace, "udf"),
-                                            ImmutableList.of(new ColumnIdentifier("col3", false), new ColumnIdentifier("col4", false)),
-                                            ImmutableList.of(AsciiType.instance, LongType.instance),
-                                            Int32Type.instance,
-                                            true,
-                                            "java",
-                                            "return 42;");
-
-        UDFunction udf3 = UDFunction.create(new FunctionName(keyspace, "udf3"),
-                                            ImmutableList.of(new ColumnIdentifier("col4", false)),
-                                            ImmutableList.of(new TupleType(Arrays.asList(udt1, udt2))),
-                                            BooleanType.instance,
-                                            false,
-                                            "java",
-                                            "return true;");
-
-        return KeyspaceMetadata.create(keyspace,
-                                       KeyspaceParams.simple(1),
-                                       Tables.none(),
-                                       Views.none(),
-                                       Types.of(udt1, udt2),
-                                       Functions.of(udf1, udf2, udf3));
-    }
-
-    private static KeyspaceMetadata keyspaceWithUDAsAndUDTs()
-    {
-        String keyspace = KEYSPACE_PREFIX + "UDAUDTs";
-
-        UserType udt1 = new UserType(keyspace,
-                                     bytes("udt1"),
-                                     new ArrayList<FieldIdentifier>() {{ add(field("col1")); add(field("col2")); }},
-                                     new ArrayList<AbstractType<?>>() {{ add(UTF8Type.instance); add(Int32Type.instance); }},
-                                     true);
-
-        UserType udt2 = new UserType(keyspace,
-                                     bytes("udt2"),
-                                     new ArrayList<FieldIdentifier>() {{ add(field("col1")); add(field("col2")); }},
-                                     new ArrayList<AbstractType<?>>() {{ add(ListType.getInstance(udt1, false)); add(Int32Type.instance); }},
-                                     true);
-
-        UDFunction udf1 = UDFunction.create(new FunctionName(keyspace, "udf1"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false), new ColumnIdentifier("col2", false)),
-                                            ImmutableList.of(udt1, udt2),
-                                            udt1,
-                                            false,
-                                            "java",
-                                            "return null;");
-
-        UDFunction udf2 = UDFunction.create(new FunctionName(keyspace, "udf2"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false), new ColumnIdentifier("col2", false)),
-                                            ImmutableList.of(udt2, udt1),
-                                            udt2,
-                                            false,
-                                            "java",
-                                            "return null;");
-
-        UDFunction udf3 = UDFunction.create(new FunctionName(keyspace, "udf3"),
-                                            ImmutableList.of(new ColumnIdentifier("col1", false)),
-                                            ImmutableList.of(udt2),
-                                            DoubleType.instance,
-                                            false,
-                                            "java",
-                                            "return 42d;");
-
-        Functions udfs = Functions.builder().add(udf1).add(udf2).add(udf3).build();
-
-        UDAggregate uda1 = UDAggregate.create(udfs, new FunctionName(keyspace, "uda1"),
-                                              ImmutableList.of(udf1.argTypes().get(1)),
-                                              udf1.returnType(),
-                                              udf1.name(),
-                                              null,
-                                              udf1.argTypes().get(0),
-                                              null
-        );
-
-        ByteBuffer twoNullEntries = ByteBuffer.allocate(8);
-        twoNullEntries.putInt(-1);
-        twoNullEntries.putInt(-1);
-        twoNullEntries.flip();
-        UDAggregate uda2 = UDAggregate.create(udfs, new FunctionName(keyspace, "uda2"),
-                                              ImmutableList.of(udf2.argTypes().get(1)),
-                                              udf3.returnType(),
-                                              udf2.name(),
-                                              udf3.name(),
-                                              udf2.argTypes().get(0),
-                                              twoNullEntries
-        );
-
-        return KeyspaceMetadata.create(keyspace,
-                                       KeyspaceParams.simple(1),
-                                       Tables.none(),
-                                       Views.none(),
-                                       Types.of(udt1, udt2),
-                                       Functions.of(udf1, udf2, udf3, uda1, uda2));
-    }
-
-    /*
-     * Serializing keyspaces
-     */
-
-    private static void legacySerializeKeyspace(KeyspaceMetadata keyspace)
-    {
-        makeLegacyCreateKeyspaceMutation(keyspace, TIMESTAMP).apply();
-        setLegacyIndexStatus(keyspace);
-    }
-
-    private static DecoratedKey decorate(CFMetaData metadata, Object value)
-    {
-        return metadata.decorateKey(((AbstractType)metadata.getKeyValidator()).decompose(value));
-    }
-
-    private static Mutation makeLegacyCreateKeyspaceMutation(KeyspaceMetadata keyspace, long timestamp)
-    {
-        Mutation.SimpleBuilder builder = Mutation.simpleBuilder(SchemaConstants.SYSTEM_KEYSPACE_NAME, decorate(SystemKeyspace.LegacyKeyspaces, keyspace.name))
-                                                 .timestamp(timestamp);
-
-        builder.update(SystemKeyspace.LegacyKeyspaces)
-               .row()
-               .add("durable_writes", keyspace.params.durableWrites)
-               .add("strategy_class", keyspace.params.replication.klass.getName())
-               .add("strategy_options", json(keyspace.params.replication.options));
-
-        keyspace.tables.forEach(table -> addTableToSchemaMutation(table, true, builder));
-        keyspace.types.forEach(type -> addTypeToSchemaMutation(type, builder));
-        keyspace.functions.udfs().forEach(udf -> addFunctionToSchemaMutation(udf, builder));
-        keyspace.functions.udas().forEach(uda -> addAggregateToSchemaMutation(uda, builder));
-
-        return builder.build();
-    }
-
-    /*
-     * Serializing tables
-     */
-
-    private static void addTableToSchemaMutation(CFMetaData table, boolean withColumnsAndTriggers, Mutation.SimpleBuilder builder)
-    {
-        // For property that can be null (and can be changed), we insert tombstones, to make sure
-        // we don't keep a property the user has removed
-        Row.SimpleBuilder adder = builder.update(SystemKeyspace.LegacyColumnfamilies)
-                                         .row(table.cfName);
-
-        adder.add("cf_id", table.cfId)
-             .add("type", table.isSuper() ? "Super" : "Standard");
-
-        if (table.isSuper())
-        {
-            adder.add("comparator", table.comparator.subtype(0).toString())
-                 .add("subcomparator", ((MapType)table.compactValueColumn().type).getKeysType().toString());
-        }
-        else
-        {
-            adder.add("comparator", LegacyLayout.makeLegacyComparator(table).toString());
-        }
-
-        adder.add("bloom_filter_fp_chance", table.params.bloomFilterFpChance)
-             .add("caching", cachingToString(table.params.caching))
-             .add("comment", table.params.comment)
-             .add("compaction_strategy_class", table.params.compaction.klass().getName())
-             .add("compaction_strategy_options", json(table.params.compaction.options()))
-             .add("compression_parameters", json(ThriftConversion.compressionParametersToThrift(table.params.compression)))
-             .add("default_time_to_live", table.params.defaultTimeToLive)
-             .add("gc_grace_seconds", table.params.gcGraceSeconds)
-             .add("key_validator", table.getKeyValidator().toString())
-             .add("local_read_repair_chance", table.params.dcLocalReadRepairChance)
-             .add("max_compaction_threshold", table.params.compaction.maxCompactionThreshold())
-             .add("max_index_interval", table.params.maxIndexInterval)
-             .add("memtable_flush_period_in_ms", table.params.memtableFlushPeriodInMs)
-             .add("min_compaction_threshold", table.params.compaction.minCompactionThreshold())
-             .add("min_index_interval", table.params.minIndexInterval)
-             .add("read_repair_chance", table.params.readRepairChance)
-             .add("speculative_retry", table.params.speculativeRetry.toString());
-
-        Map<String, Long> dropped = new HashMap<>();
-        for (Map.Entry<ByteBuffer, CFMetaData.DroppedColumn> entry : table.getDroppedColumns().entrySet())
-        {
-            String name = UTF8Type.instance.getString(entry.getKey());
-            CFMetaData.DroppedColumn column = entry.getValue();
-            dropped.put(name, column.droppedTime);
-        }
-        adder.add("dropped_columns", dropped);
-
-        adder.add("is_dense", table.isDense());
-
-        adder.add("default_validator", table.makeLegacyDefaultValidator().toString());
-
-        if (withColumnsAndTriggers)
-        {
-            for (ColumnDefinition column : table.allColumns())
-                addColumnToSchemaMutation(table, column, builder);
-
-            for (TriggerMetadata trigger : table.getTriggers())
-                addTriggerToSchemaMutation(table, trigger, builder);
-        }
-    }
-
-    private static String cachingToString(CachingParams caching)
-    {
-        return format("{\"keys\":\"%s\", \"rows_per_partition\":\"%s\"}",
-                      caching.keysAsString(),
-                      caching.rowsPerPartitionAsString());
-    }
-
-    private static void addColumnToSchemaMutation(CFMetaData table, ColumnDefinition column, Mutation.SimpleBuilder builder)
-    {
-        // We need to special case pk-only dense tables. See CASSANDRA-9874.
-        String name = table.isDense() && column.kind == ColumnDefinition.Kind.REGULAR && column.type instanceof EmptyType
-                    ? ""
-                    : column.name.toString();
-
-        final Row.SimpleBuilder adder = builder.update(SystemKeyspace.LegacyColumns).row(table.cfName, name);
-
-        adder.add("validator", column.type.toString())
-             .add("type", serializeKind(column.kind, table.isDense()))
-             .add("component_index", column.position());
-
-        Optional<IndexMetadata> index = findIndexForColumn(table.getIndexes(), table, column);
-        if (index.isPresent())
-        {
-            IndexMetadata i = index.get();
-            adder.add("index_name", i.name);
-            adder.add("index_type", i.kind.toString());
-            adder.add("index_options", json(i.options));
-        }
-        else
-        {
-            adder.add("index_name", null);
-            adder.add("index_type", null);
-            adder.add("index_options", null);
-        }
-    }
-
-    private static Optional<IndexMetadata> findIndexForColumn(Indexes indexes,
-                                                              CFMetaData table,
-                                                              ColumnDefinition column)
-    {
-        // makes the assumptions that the string option denoting the
-        // index targets can be parsed by CassandraIndex.parseTarget
-        // which should be true for any pre-3.0 index
-        for (IndexMetadata index : indexes)
-          if (TargetParser.parse(table, index).left.equals(column))
-                return Optional.of(index);
-
-        return Optional.empty();
-    }
-
-    private static String serializeKind(ColumnDefinition.Kind kind, boolean isDense)
-    {
-        // For backward compatibility, we special case CLUSTERING and the case where the table is dense.
-        if (kind == ColumnDefinition.Kind.CLUSTERING)
-            return "clustering_key";
-
-        if (kind == ColumnDefinition.Kind.REGULAR && isDense)
-            return "compact_value";
-
-        return kind.toString().toLowerCase();
-    }
-
-    private static void addTriggerToSchemaMutation(CFMetaData table, TriggerMetadata trigger, Mutation.SimpleBuilder builder)
-    {
-        builder.update(SystemKeyspace.LegacyTriggers)
-               .row(table.cfName, trigger.name)
-               .add("trigger_options", Collections.singletonMap("class", trigger.classOption));
-    }
-
-    /*
-     * Serializing types
-     */
-
-    private static void addTypeToSchemaMutation(UserType type, Mutation.SimpleBuilder builder)
-    {
-        Row.SimpleBuilder adder = builder.update(SystemKeyspace.LegacyUsertypes)
-                                         .row(type.getNameAsString());
-
-        List<String> names = new ArrayList<>();
-        List<String> types = new ArrayList<>();
-        for (int i = 0; i < type.size(); i++)
-        {
-            names.add(type.fieldName(i).toString());
-            types.add(type.fieldType(i).toString());
-        }
-
-        adder.add("field_names", names)
-             .add("field_types", types);
-    }
-
-    /*
-     * Serializing functions
-     */
-
-    private static void addFunctionToSchemaMutation(UDFunction function, Mutation.SimpleBuilder builder)
-    {
-        Row.SimpleBuilder adder = builder.update(SystemKeyspace.LegacyFunctions)
-                                         .row(function.name().name, functionSignatureWithTypes(function));
-
-        adder.add("body", function.body())
-             .add("language", function.language())
-             .add("return_type", function.returnType().toString())
-             .add("called_on_null_input", function.isCalledOnNullInput());
-
-        List<ByteBuffer> names = new ArrayList<>();
-        List<String> types = new ArrayList<>();
-        for (int i = 0; i < function.argNames().size(); i++)
-        {
-            names.add(function.argNames().get(i).bytes);
-            types.add(function.argTypes().get(i).toString());
-        }
-        adder.add("argument_names", names)
-             .add("argument_types", types);
-    }
-
-    /*
-     * Serializing aggregates
-     */
-
-    private static void addAggregateToSchemaMutation(UDAggregate aggregate, Mutation.SimpleBuilder builder)
-    {
-        Row.SimpleBuilder adder = builder.update(SystemKeyspace.LegacyAggregates)
-                                 .row(aggregate.name().name, functionSignatureWithTypes(aggregate));
-
-        adder.add("return_type", aggregate.returnType().toString())
-             .add("state_func", aggregate.stateFunction().name().name);
-
-        if (aggregate.stateType() != null)
-            adder.add("state_type", aggregate.stateType().toString());
-        if (aggregate.finalFunction() != null)
-            adder.add("final_func", aggregate.finalFunction().name().name);
-        if (aggregate.initialCondition() != null)
-            adder.add("initcond", aggregate.initialCondition());
-
-        List<String> types = new ArrayList<>();
-        for (AbstractType<?> argType : aggregate.argTypes())
-            types.add(argType.toString());
-
-        adder.add("argument_types", types);
-    }
-
-    // We allow method overloads, so a function is not uniquely identified by its name only, but
-    // also by its argument types. To distinguish overloads of given function name in the schema
-    // we use a "signature" which is just a list of it's CQL argument types.
-    public static ByteBuffer functionSignatureWithTypes(AbstractFunction fun)
-    {
-        List<String> arguments =
-            fun.argTypes()
-               .stream()
-               .map(argType -> argType.asCQL3Type().toString())
-               .collect(Collectors.toList());
-
-        return ListType.getInstance(UTF8Type.instance, false).decompose(arguments);
-    }
-
-    private static void setLegacyIndexStatus(KeyspaceMetadata keyspace)
-    {
-        keyspace.tables.forEach(LegacySchemaMigratorTest::setLegacyIndexStatus);
-    }
-
-    private static void setLegacyIndexStatus(CFMetaData table)
-    {
-        table.getIndexes().forEach((index) -> setLegacyIndexStatus(table.ksName, table.cfName, index));
-    }
-
-    private static void setLegacyIndexStatus(String keyspace, String table, IndexMetadata index)
-    {
-        SystemKeyspace.setIndexBuilt(keyspace, table + '.' + index.name);
-    }
-
-    private static void verifyIndexBuildStatus(KeyspaceMetadata keyspace)
-    {
-        keyspace.tables.forEach(LegacySchemaMigratorTest::verifyIndexBuildStatus);
-    }
-
-    private static void verifyIndexBuildStatus(CFMetaData table)
-    {
-        table.getIndexes().forEach(index -> verifyIndexBuildStatus(table.ksName, table.cfName, index));
-    }
-
-    private static void verifyIndexBuildStatus(String keyspace, String table, IndexMetadata index)
-    {
-        assertFalse(SystemKeyspace.isIndexBuilt(keyspace, table + '.' + index.name));
-        assertTrue(SystemKeyspace.isIndexBuilt(keyspace, index.name));
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/cassandra/blob/4a246419/test/unit/org/apache/cassandra/service/SerializationsTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/service/SerializationsTest.java b/test/unit/org/apache/cassandra/service/SerializationsTest.java
index 4df112a..4f3c80e 100644
--- a/test/unit/org/apache/cassandra/service/SerializationsTest.java
+++ b/test/unit/org/apache/cassandra/service/SerializationsTest.java
@@ -61,7 +61,7 @@ public class SerializationsTest extends AbstractSerializationsTester
         partitionerSwitcher = Util.switchPartitioner(RandomPartitioner.instance);
         RANDOM_UUID = UUID.fromString("b5c3d033-75aa-4c2f-a819-947aac7a0c54");
         FULL_RANGE = new Range<>(Util.testPartitioner().getMinimumToken(), Util.testPartitioner().getMinimumToken());
-        DESC = new RepairJobDesc(getVersion() < MessagingService.VERSION_21 ? null : RANDOM_UUID, RANDOM_UUID, "Keyspace1", "Standard1", Arrays.asList(FULL_RANGE));
+        DESC = new RepairJobDesc(RANDOM_UUID, RANDOM_UUID, "Keyspace1", "Standard1", Arrays.asList(FULL_RANGE));
     }
 
     @AfterClass

http://git-wip-us.apache.org/repos/asf/cassandra/blob/4a246419/test/unit/org/apache/cassandra/streaming/compression/CompressedInputStreamTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/streaming/compression/CompressedInputStreamTest.java b/test/unit/org/apache/cassandra/streaming/compression/CompressedInputStreamTest.java
index c0fc277..f3d0b52 100644
--- a/test/unit/org/apache/cassandra/streaming/compression/CompressedInputStreamTest.java
+++ b/test/unit/org/apache/cassandra/streaming/compression/CompressedInputStreamTest.java
@@ -85,8 +85,9 @@ public class CompressedInputStreamTest
         assert valuesToCheck != null && valuesToCheck.length > 0;
 
         // write compressed data file of longs
-        File tmp = new File(File.createTempFile("cassandra", "unittest").getParent(), "ks-cf-ib-1-Data.db");
-        Descriptor desc = Descriptor.fromFilename(tmp.getAbsolutePath());
+        File parentDir = new File(System.getProperty("java.io.tmpdir"));
+        Descriptor desc = new Descriptor(parentDir, "ks", "cf", 1);
+        File tmp = new File(desc.filenameFor(Component.DATA));
         MetadataCollector collector = new MetadataCollector(new ClusteringComparator(BytesType.instance));
         CompressionParams param = CompressionParams.snappy(32);
         Map<Long, Long> index = new HashMap<Long, Long>();

http://git-wip-us.apache.org/repos/asf/cassandra/blob/4a246419/test/unit/org/apache/cassandra/utils/BitSetTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/utils/BitSetTest.java b/test/unit/org/apache/cassandra/utils/BitSetTest.java
index 0f51531..4dab17e 100644
--- a/test/unit/org/apache/cassandra/utils/BitSetTest.java
+++ b/test/unit/org/apache/cassandra/utils/BitSetTest.java
@@ -44,13 +44,8 @@ public class BitSetTest
     @Test
     public void compareBitSets()
     {
-        compareBitSets(false);
-        compareBitSets(true);
-    }
-    private static void compareBitSets(boolean oldBfHashOrder)
-    {
-        BloomFilter bf2 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false, oldBfHashOrder);
-        BloomFilter bf3 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, true, oldBfHashOrder);
+        BloomFilter bf2 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false);
+        BloomFilter bf3 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, true);
 
         RandomStringGenerator gen1 = new KeyGenerator.RandomStringGenerator(new Random().nextInt(), FilterTestHelper.ELEMENTS);
 

http://git-wip-us.apache.org/repos/asf/cassandra/blob/4a246419/test/unit/org/apache/cassandra/utils/BloomFilterTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/utils/BloomFilterTest.java b/test/unit/org/apache/cassandra/utils/BloomFilterTest.java
index 818af9c..07cbc5a 100644
--- a/test/unit/org/apache/cassandra/utils/BloomFilterTest.java
+++ b/test/unit/org/apache/cassandra/utils/BloomFilterTest.java
@@ -38,7 +38,6 @@ import org.apache.cassandra.utils.KeyGenerator.RandomStringGenerator;
 
 public class BloomFilterTest
 {
-    public IFilter bfOldFormat;
     public IFilter bfInvHashes;
 
     public BloomFilterTest()
@@ -46,14 +45,14 @@ public class BloomFilterTest
 
     }
 
-    public static IFilter testSerialize(IFilter f, boolean oldBfHashOrder) throws IOException
+    public static IFilter testSerialize(IFilter f) throws IOException
     {
         f.add(FilterTestHelper.bytes("a"));
         DataOutputBuffer out = new DataOutputBuffer();
         FilterFactory.serialize(f, out);
 
         ByteArrayInputStream in = new ByteArrayInputStream(out.getData(), 0, out.getLength());
-        IFilter f2 = FilterFactory.deserialize(new DataInputStream(in), true, oldBfHashOrder);
+        IFilter f2 = FilterFactory.deserialize(new DataInputStream(in), true);
 
         assert f2.isPresent(FilterTestHelper.bytes("a"));
         assert !f2.isPresent(FilterTestHelper.bytes("b"));
@@ -64,14 +63,12 @@ public class BloomFilterTest
     @Before
     public void setup()
     {
-        bfOldFormat = FilterFactory.getFilter(10000L, FilterTestHelper.MAX_FAILURE_RATE, true, true);
-        bfInvHashes = FilterFactory.getFilter(10000L, FilterTestHelper.MAX_FAILURE_RATE, true, false);
+        bfInvHashes = FilterFactory.getFilter(10000L, FilterTestHelper.MAX_FAILURE_RATE, true);
     }
 
     @After
     public void destroy()
     {
-        bfOldFormat.close();
         bfInvHashes.close();
     }
 
@@ -91,10 +88,6 @@ public class BloomFilterTest
     @Test
     public void testOne()
     {
-        bfOldFormat.add(FilterTestHelper.bytes("a"));
-        assert bfOldFormat.isPresent(FilterTestHelper.bytes("a"));
-        assert !bfOldFormat.isPresent(FilterTestHelper.bytes("b"));
-
         bfInvHashes.add(FilterTestHelper.bytes("a"));
         assert bfInvHashes.isPresent(FilterTestHelper.bytes("a"));
         assert !bfInvHashes.isPresent(FilterTestHelper.bytes("b"));
@@ -103,16 +96,12 @@ public class BloomFilterTest
     @Test
     public void testFalsePositivesInt()
     {
-        FilterTestHelper.testFalsePositives(bfOldFormat, FilterTestHelper.intKeys(), FilterTestHelper.randomKeys2());
-
         FilterTestHelper.testFalsePositives(bfInvHashes, FilterTestHelper.intKeys(), FilterTestHelper.randomKeys2());
     }
 
     @Test
     public void testFalsePositivesRandom()
     {
-        FilterTestHelper.testFalsePositives(bfOldFormat, FilterTestHelper.randomKeys(), FilterTestHelper.randomKeys2());
-
         FilterTestHelper.testFalsePositives(bfInvHashes, FilterTestHelper.randomKeys(), FilterTestHelper.randomKeys2());
     }
 
@@ -123,39 +112,28 @@ public class BloomFilterTest
         {
             return;
         }
-        IFilter bf2 = FilterFactory.getFilter(KeyGenerator.WordGenerator.WORDS / 2, FilterTestHelper.MAX_FAILURE_RATE, true, false);
+        IFilter bf2 = FilterFactory.getFilter(KeyGenerator.WordGenerator.WORDS / 2, FilterTestHelper.MAX_FAILURE_RATE, true);
         int skipEven = KeyGenerator.WordGenerator.WORDS % 2 == 0 ? 0 : 2;
         FilterTestHelper.testFalsePositives(bf2,
                                             new KeyGenerator.WordGenerator(skipEven, 2),
                                             new KeyGenerator.WordGenerator(1, 2));
         bf2.close();
-
-        // new, swapped hash values bloom filter
-        bf2 = FilterFactory.getFilter(KeyGenerator.WordGenerator.WORDS / 2, FilterTestHelper.MAX_FAILURE_RATE, true, true);
-        FilterTestHelper.testFalsePositives(bf2,
-                                            new KeyGenerator.WordGenerator(skipEven, 2),
-                                            new KeyGenerator.WordGenerator(1, 2));
-        bf2.close();
     }
 
     @Test
     public void testSerialize() throws IOException
     {
-        BloomFilterTest.testSerialize(bfOldFormat, true).close();
-
-        BloomFilterTest.testSerialize(bfInvHashes, false).close();
+        BloomFilterTest.testSerialize(bfInvHashes).close();
     }
 
     @Test
     @Ignore
     public void testManyRandom()
     {
-        testManyRandom(FilterTestHelper.randomKeys(), false);
-
-        testManyRandom(FilterTestHelper.randomKeys(), true);
+        testManyRandom(FilterTestHelper.randomKeys());
     }
 
-    private static void testManyRandom(Iterator<ByteBuffer> keys, boolean oldBfHashOrder)
+    private static void testManyRandom(Iterator<ByteBuffer> keys)
     {
         int MAX_HASH_COUNT = 128;
         Set<Long> hashes = new HashSet<>();
@@ -164,7 +142,7 @@ public class BloomFilterTest
         {
             hashes.clear();
             FilterKey buf = FilterTestHelper.wrap(keys.next());
-            BloomFilter bf = (BloomFilter) FilterFactory.getFilter(10, 1, false, oldBfHashOrder);
+            BloomFilter bf = (BloomFilter) FilterFactory.getFilter(10, 1, false);
             for (long hashIndex : bf.getHashBuckets(buf, MAX_HASH_COUNT, 1024 * 1024))
             {
                 hashes.add(hashIndex);
@@ -179,41 +157,15 @@ public class BloomFilterTest
     public void testOffHeapException()
     {
         long numKeys = ((long)Integer.MAX_VALUE) * 64L + 1L; // approx 128 Billion
-        FilterFactory.getFilter(numKeys, 0.01d, true, true).close();
+        FilterFactory.getFilter(numKeys, 0.01d, true).close();
     }
 
     @Test
-    public void compareCachedKeyOldHashOrder()
+    public void compareCachedKey()
     {
-        BloomFilter bf1 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false, true);
-        BloomFilter bf2 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false, true);
-        BloomFilter bf3 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false, true);
-
-        RandomStringGenerator gen1 = new KeyGenerator.RandomStringGenerator(new Random().nextInt(), FilterTestHelper.ELEMENTS);
-
-        // make sure all bitsets are empty.
-        BitSetTest.compare(bf1.bitset, bf2.bitset);
-        BitSetTest.compare(bf1.bitset, bf3.bitset);
-
-        while (gen1.hasNext())
-        {
-            ByteBuffer key = gen1.next();
-            FilterKey cached = FilterTestHelper.wrapCached(key);
-            bf1.add(FilterTestHelper.wrap(key));
-            bf2.add(cached);
-            bf3.add(cached);
-        }
-
-        BitSetTest.compare(bf1.bitset, bf2.bitset);
-        BitSetTest.compare(bf1.bitset, bf3.bitset);
-    }
-
-    @Test
-    public void compareCachedKeyNewHashOrder()
-    {
-        try (BloomFilter bf1 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false, false);
-             BloomFilter bf2 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false, false);
-             BloomFilter bf3 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false, false))
+        try (BloomFilter bf1 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false);
+             BloomFilter bf2 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false);
+             BloomFilter bf3 = (BloomFilter) FilterFactory.getFilter(FilterTestHelper.ELEMENTS / 2, FilterTestHelper.MAX_FAILURE_RATE, false))
         {
             RandomStringGenerator gen1 = new KeyGenerator.RandomStringGenerator(new Random().nextInt(), FilterTestHelper.ELEMENTS);
 
@@ -239,16 +191,10 @@ public class BloomFilterTest
     @Ignore
     public void testHugeBFSerialization() throws IOException
     {
-        hugeBFSerialization(false);
-        hugeBFSerialization(true);
-    }
-
-    static void hugeBFSerialization(boolean oldBfHashOrder) throws IOException
-    {
         ByteBuffer test = ByteBuffer.wrap(new byte[] {0, 1});
 
         File file = FileUtils.createTempFile("bloomFilterTest-", ".dat");
-        BloomFilter filter = (BloomFilter) FilterFactory.getFilter(((long) Integer.MAX_VALUE / 8) + 1, 0.01d, true, oldBfHashOrder);
+        BloomFilter filter = (BloomFilter) FilterFactory.getFilter(((long) Integer.MAX_VALUE / 8) + 1, 0.01d, true);
         filter.add(FilterTestHelper.wrap(test));
         DataOutputStreamPlus out = new BufferedDataOutputStreamPlus(new FileOutputStream(file));
         FilterFactory.serialize(filter, out);
@@ -257,7 +203,7 @@ public class BloomFilterTest
         filter.close();
 
         DataInputStream in = new DataInputStream(new FileInputStream(file));
-        BloomFilter filter2 = (BloomFilter) FilterFactory.deserialize(in, true, oldBfHashOrder);
+        BloomFilter filter2 = (BloomFilter) FilterFactory.deserialize(in, true);
         Assert.assertTrue(filter2.isPresent(FilterTestHelper.wrap(test)));
         FileUtils.closeQuietly(in);
         filter2.close();

http://git-wip-us.apache.org/repos/asf/cassandra/blob/4a246419/test/unit/org/apache/cassandra/utils/SerializationsTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/utils/SerializationsTest.java b/test/unit/org/apache/cassandra/utils/SerializationsTest.java
index ac5a6a7..7973964 100644
--- a/test/unit/org/apache/cassandra/utils/SerializationsTest.java
+++ b/test/unit/org/apache/cassandra/utils/SerializationsTest.java
@@ -46,27 +46,27 @@ public class SerializationsTest extends AbstractSerializationsTester
         DatabaseDescriptor.daemonInitialization();
     }
 
-    private static void testBloomFilterWrite(boolean offheap, boolean oldBfHashOrder) throws IOException
+    private static void testBloomFilterWrite(boolean offheap) throws IOException
     {
         IPartitioner partitioner = Util.testPartitioner();
-        try (IFilter bf = FilterFactory.getFilter(1000000, 0.0001, offheap, oldBfHashOrder))
+        try (IFilter bf = FilterFactory.getFilter(1000000, 0.0001, offheap))
         {
             for (int i = 0; i < 100; i++)
                 bf.add(partitioner.decorateKey(partitioner.getTokenFactory().toByteArray(partitioner.getRandomToken())));
-            try (DataOutputStreamPlus out = getOutput(oldBfHashOrder ? "2.1" : "3.0", "utils.BloomFilter.bin"))
+            try (DataOutputStreamPlus out = getOutput("3.0", "utils.BloomFilter.bin"))
             {
                 FilterFactory.serialize(bf, out);
             }
         }
     }
 
-    private static void testBloomFilterWrite1000(boolean offheap, boolean oldBfHashOrder) throws IOException
+    private static void testBloomFilterWrite1000(boolean offheap) throws IOException
     {
-        try (IFilter bf = FilterFactory.getFilter(1000000, 0.0001, offheap, oldBfHashOrder))
+        try (IFilter bf = FilterFactory.getFilter(1000000, 0.0001, offheap))
         {
             for (int i = 0; i < 1000; i++)
                 bf.add(Util.dk(Int32Type.instance.decompose(i)));
-            try (DataOutputStreamPlus out = getOutput(oldBfHashOrder ? "2.1" : "3.0", "utils.BloomFilter1000.bin"))
+            try (DataOutputStreamPlus out = getOutput("3.0", "utils.BloomFilter1000.bin"))
             {
                 FilterFactory.serialize(bf, out);
             }
@@ -77,29 +77,10 @@ public class SerializationsTest extends AbstractSerializationsTester
     public void testBloomFilterRead1000() throws IOException
     {
         if (EXECUTE_WRITES)
-        {
-            testBloomFilterWrite1000(true, false);
-            testBloomFilterWrite1000(true, true);
-        }
+            testBloomFilterWrite1000(true);
 
         try (DataInputStream in = getInput("3.0", "utils.BloomFilter1000.bin");
-             IFilter filter = FilterFactory.deserialize(in, true, false))
-        {
-            boolean present;
-            for (int i = 0 ; i < 1000 ; i++)
-            {
-                present = filter.isPresent(Util.dk(Int32Type.instance.decompose(i)));
-                Assert.assertTrue(present);
-            }
-            for (int i = 1000 ; i < 2000 ; i++)
-            {
-                present = filter.isPresent(Util.dk(Int32Type.instance.decompose(i)));
-                Assert.assertFalse(present);
-            }
-        }
-
-        try (DataInputStream in = getInput("2.1", "utils.BloomFilter1000.bin");
-             IFilter filter = FilterFactory.deserialize(in, true, true))
+             IFilter filter = FilterFactory.deserialize(in, true))
         {
             boolean present;
             for (int i = 0 ; i < 1000 ; i++)
@@ -113,44 +94,20 @@ public class SerializationsTest extends AbstractSerializationsTester
                 Assert.assertFalse(present);
             }
         }
-
-        // eh - reading version version 'ka' (2.1) with 3.0 BloomFilter
-        int falsePositive = 0;
-        int falseNegative = 0;
-        try (DataInputStream in = getInput("2.1", "utils.BloomFilter1000.bin");
-             IFilter filter = FilterFactory.deserialize(in, true, false))
-        {
-            boolean present;
-            for (int i = 0 ; i < 1000 ; i++)
-            {
-                present = filter.isPresent(Util.dk(Int32Type.instance.decompose(i)));
-                if (!present)
-                    falseNegative ++;
-            }
-            for (int i = 1000 ; i < 2000 ; i++)
-            {
-                present = filter.isPresent(Util.dk(Int32Type.instance.decompose(i)));
-                if (present)
-                    falsePositive ++;
-            }
-        }
-        Assert.assertEquals(1000, falseNegative);
-        Assert.assertEquals(0, falsePositive);
     }
 
     @Test
     public void testBloomFilterTable() throws Exception
     {
-        testBloomFilterTable("test/data/bloom-filter/ka/foo/foo-atable-ka-1-Filter.db", true);
-        testBloomFilterTable("test/data/bloom-filter/la/foo/la-1-big-Filter.db", false);
+        testBloomFilterTable("test/data/bloom-filter/la/foo/la-1-big-Filter.db");
     }
 
-    private static void testBloomFilterTable(String file, boolean oldBfHashOrder) throws Exception
+    private static void testBloomFilterTable(String file) throws Exception
     {
         Murmur3Partitioner partitioner = new Murmur3Partitioner();
 
         try (DataInputStream in = new DataInputStream(new FileInputStream(new File(file)));
-             IFilter filter = FilterFactory.deserialize(in, true, oldBfHashOrder))
+             IFilter filter = FilterFactory.deserialize(in, true))
         {
             for (int i = 1; i <= 10; i++)
             {
@@ -173,31 +130,6 @@ public class SerializationsTest extends AbstractSerializationsTester
         }
     }
 
-    @Test
-    public void testBloomFilterReadMURMUR3() throws IOException
-    {
-        if (EXECUTE_WRITES)
-            testBloomFilterWrite(true, true);
-
-        try (DataInputStream in = getInput("3.0", "utils.BloomFilter.bin");
-             IFilter filter = FilterFactory.deserialize(in, true, true))
-        {
-            Assert.assertNotNull(filter);
-        }
-    }
-
-    @Test
-    public void testBloomFilterReadMURMUR3pre30() throws IOException
-    {
-        if (EXECUTE_WRITES)
-            testBloomFilterWrite(true, false);
-
-        try (DataInputStream in = getInput("2.1", "utils.BloomFilter.bin");
-             IFilter filter = FilterFactory.deserialize(in, true, false))
-        {
-            Assert.assertNotNull(filter);
-        }
-    }
 
     private static void testEstimatedHistogramWrite() throws IOException
     {


Mime
View raw message