Return-Path: X-Original-To: apmail-cassandra-commits-archive@www.apache.org Delivered-To: apmail-cassandra-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id D7054DF29 for ; Thu, 12 Jul 2012 15:40:41 +0000 (UTC) Received: (qmail 54180 invoked by uid 500); 12 Jul 2012 15:40:41 -0000 Delivered-To: apmail-cassandra-commits-archive@cassandra.apache.org Received: (qmail 54157 invoked by uid 500); 12 Jul 2012 15:40:41 -0000 Mailing-List: contact commits-help@cassandra.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@cassandra.apache.org Delivered-To: mailing list commits@cassandra.apache.org Received: (qmail 54149 invoked by uid 99); 12 Jul 2012 15:40:41 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 12 Jul 2012 15:40:41 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 1488113587; Thu, 12 Jul 2012 15:40:41 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: yukim@apache.org To: commits@cassandra.apache.org X-Mailer: ASF-Git Admin Mailer Subject: git commit: new json format with row level deletion; patch by David Alves, reviewed by yukim for CASSANDRA-4054 Message-Id: <20120712154041.1488113587@tyr.zones.apache.org> Date: Thu, 12 Jul 2012 15:40:41 +0000 (UTC) Updated Branches: refs/heads/trunk bd9ab5923 -> d569f873d new json format with row level deletion; patch by David Alves, reviewed by yukim for CASSANDRA-4054 Project: http://git-wip-us.apache.org/repos/asf/cassandra/repo Commit: http://git-wip-us.apache.org/repos/asf/cassandra/commit/d569f873 Tree: http://git-wip-us.apache.org/repos/asf/cassandra/tree/d569f873 Diff: http://git-wip-us.apache.org/repos/asf/cassandra/diff/d569f873 Branch: refs/heads/trunk Commit: d569f873de40f0336a9a34e260c1942866e48950 Parents: bd9ab59 Author: Yuki Morishita Authored: Thu Jul 12 10:40:24 2012 -0500 Committer: Yuki Morishita Committed: Thu Jul 12 10:40:24 2012 -0500 ---------------------------------------------------------------------- CHANGES.txt | 1 + .../org/apache/cassandra/tools/SSTableExport.java | 120 ++++++-- .../org/apache/cassandra/tools/SSTableImport.java | 245 +++++++++------ test/resources/CounterCF.json | 6 +- test/resources/SimpleCF.json | 8 +- test/resources/SimpleCF.oldformat.json | 8 +- test/resources/SimpleCFWithDeletionInfo.json | 4 + test/resources/SuperCF.json | 8 +- test/resources/UnsortedCF.json | 4 + test/resources/UnsortedSuperCF.json | 10 +- .../apache/cassandra/tools/SSTableExportTest.java | 172 ++++++++--- .../apache/cassandra/tools/SSTableImportTest.java | 85 ++++-- 12 files changed, 467 insertions(+), 204 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/CHANGES.txt ---------------------------------------------------------------------- diff --git a/CHANGES.txt b/CHANGES.txt index 60ee225..9a58ed6 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,5 @@ 1.2-dev + * Introduce new json format with row level deletion (CASSANDRA-4054) * remove redundant "name" column from schema_keyspaces (CASSANDRA-4433) * improve "nodetool ring" handling of multi-dc clusters (CASSANDRA-3047) * update NTS calculateNaturalEndpoints to be O(N log N) (CASSANDRA-3881) http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/src/java/org/apache/cassandra/tools/SSTableExport.java ---------------------------------------------------------------------- diff --git a/src/java/org/apache/cassandra/tools/SSTableExport.java b/src/java/org/apache/cassandra/tools/SSTableExport.java index 861f8ab..7017232 100644 --- a/src/java/org/apache/cassandra/tools/SSTableExport.java +++ b/src/java/org/apache/cassandra/tools/SSTableExport.java @@ -17,30 +17,55 @@ */ package org.apache.cassandra.tools; +import static org.apache.cassandra.utils.ByteBufferUtil.bytesToHex; +import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; + import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.nio.ByteBuffer; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.cli.PosixParser; import org.apache.cassandra.config.CFMetaData; +import org.apache.cassandra.config.ConfigurationException; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.Schema; -import org.apache.cassandra.db.*; +import org.apache.cassandra.db.AbstractColumnContainer; +import org.apache.cassandra.db.ColumnFamily; +import org.apache.cassandra.db.CounterColumn; +import org.apache.cassandra.db.DecoratedKey; +import org.apache.cassandra.db.DeletedColumn; +import org.apache.cassandra.db.DeletionInfo; +import org.apache.cassandra.db.DeletionTime; +import org.apache.cassandra.db.ExpiringColumn; +import org.apache.cassandra.db.IColumn; +import org.apache.cassandra.db.OnDiskAtom; +import org.apache.cassandra.db.RangeTombstone; +import org.apache.cassandra.db.SuperColumn; import org.apache.cassandra.db.marshal.AbstractType; - -import org.apache.commons.cli.*; - -import org.apache.cassandra.config.ConfigurationException; import org.apache.cassandra.dht.IPartitioner; -import org.apache.cassandra.io.sstable.*; +import org.apache.cassandra.io.sstable.Descriptor; +import org.apache.cassandra.io.sstable.KeyIterator; +import org.apache.cassandra.io.sstable.SSTableIdentityIterator; +import org.apache.cassandra.io.sstable.SSTableReader; +import org.apache.cassandra.io.sstable.SSTableScanner; import org.apache.cassandra.utils.ByteBufferUtil; import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.map.ObjectMapper; -import static org.apache.cassandra.utils.ByteBufferUtil.bytesToHex; -import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; - /** * Export SSTables to JSON format. */ @@ -87,6 +112,56 @@ public class SSTableExport } /** + * JSON ColumnFamily metadata serializer.
Serializes: + *
    + *
  • column family deletion info (if present)
  • + *
+ * + * @param out + * The output steam to write data + * @param columnFamily + * to which the metadata belongs + */ + private static void writeMeta(PrintStream out, AbstractColumnContainer columnContainer) + { + if (columnContainer instanceof ColumnFamily) + { + ColumnFamily columnFamily = (ColumnFamily) columnContainer; + if (!columnFamily.deletionInfo().equals(DeletionInfo.LIVE)) + { + // begin meta + writeKey(out, "metadata"); + writeDeletionInfo(out, columnFamily.deletionInfo().getTopLevelDeletion()); + out.print(","); + } + return; + } + + if (columnContainer instanceof SuperColumn) + { + SuperColumn superColumn = (SuperColumn) columnContainer; + DeletionInfo deletionInfo = new DeletionInfo(superColumn.getMarkedForDeleteAt(), + superColumn.getLocalDeletionTime()); + if (!deletionInfo.equals(DeletionInfo.LIVE)) + { + writeKey(out, "metadata"); + writeDeletionInfo(out, deletionInfo.getTopLevelDeletion()); + out.print(","); + } + return; + } + } + + private static void writeDeletionInfo(PrintStream out, DeletionTime deletionTime) + { + out.print("{"); + writeKey(out, "deletionInfo"); + // only store topLevelDeletion (serializeForSSTable only uses this) + writeJSON(out, deletionTime); + out.print("}"); + } + + /** * Serialize columns using given column iterator * * @param columns column iterator @@ -196,21 +271,26 @@ public class SSTableExport CFMetaData cfMetaData = columnFamily.metadata(); AbstractType comparator = columnFamily.getComparator(); - writeKey(out, bytesToHex(key.key)); + out.print("{"); + writeKey(out, "key"); + writeJSON(out, bytesToHex(key.key)); + out.print(","); + + writeMeta(out, columnFamily); + + writeKey(out, "columns"); out.print(isSuperCF ? "{" : "["); if (isSuperCF) { while (row.hasNext()) { - OnDiskAtom scol = row.next(); + SuperColumn scol = (SuperColumn)row.next(); assert scol instanceof IColumn; IColumn column = (IColumn)scol; writeKey(out, comparator.getString(column.name())); out.print("{"); - writeKey(out, "deletedAt"); - out.print(column.getMarkedForDeleteAt()); - out.print(", "); + writeMeta(out, scol); writeKey(out, "subColumns"); out.print("["); serializeIColumns(column.getSubColumns().iterator(), out, columnFamily.getSubComparator(), cfMetaData); @@ -227,7 +307,7 @@ public class SSTableExport } out.print(isSuperCF ? "}" : "]"); - + out.print("}"); } /** @@ -276,7 +356,7 @@ public class SSTableExport if (excludes != null) toExport.removeAll(Arrays.asList(excludes)); - outs.println("{"); + outs.println("["); int i = 0; @@ -309,7 +389,7 @@ public class SSTableExport i++; } - outs.println("\n}"); + outs.println("\n]"); outs.flush(); scanner.close(); @@ -328,7 +408,7 @@ public class SSTableExport SSTableIdentityIterator row; SSTableScanner scanner = reader.getDirectScanner(); - outs.println("{"); + outs.println("["); int i = 0; @@ -349,7 +429,7 @@ public class SSTableExport i++; } - outs.println("\n}"); + outs.println("\n]"); outs.flush(); scanner.close(); http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/src/java/org/apache/cassandra/tools/SSTableImport.java ---------------------------------------------------------------------- diff --git a/src/java/org/apache/cassandra/tools/SSTableImport.java b/src/java/org/apache/cassandra/tools/SSTableImport.java index bae198e..7549689 100644 --- a/src/java/org/apache/cassandra/tools/SSTableImport.java +++ b/src/java/org/apache/cassandra/tools/SSTableImport.java @@ -17,33 +17,50 @@ */ package org.apache.cassandra.tools; +import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; + import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; -import java.util.*; +import java.util.List; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; -import org.apache.cassandra.config.Schema; -import org.apache.cassandra.db.marshal.AbstractType; -import org.apache.cassandra.db.marshal.BytesType; -import org.apache.cassandra.db.marshal.MarshalException; -import org.apache.cassandra.utils.ByteBufferUtil; -import org.apache.commons.cli.*; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.cli.PosixParser; + +import org.apache.pig.parser.AliasMasker.cond_return; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ConfigurationException; import org.apache.cassandra.config.DatabaseDescriptor; -import org.apache.cassandra.db.*; +import org.apache.cassandra.config.Schema; +import org.apache.cassandra.db.AbstractColumnContainer; +import org.apache.cassandra.db.ColumnFamily; +import org.apache.cassandra.db.ColumnFamilyType; +import org.apache.cassandra.db.CounterColumn; +import org.apache.cassandra.db.DecoratedKey; +import org.apache.cassandra.db.DeletionInfo; +import org.apache.cassandra.db.ExpiringColumn; +import org.apache.cassandra.db.RangeTombstone; +import org.apache.cassandra.db.SuperColumn; import org.apache.cassandra.db.filter.QueryPath; +import org.apache.cassandra.db.marshal.AbstractType; +import org.apache.cassandra.db.marshal.BytesType; +import org.apache.cassandra.db.marshal.MarshalException; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.io.sstable.SSTableWriter; -import org.codehaus.jackson.type.TypeReference; - +import org.apache.cassandra.utils.ByteBufferUtil; import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.map.MappingJsonFactory; - import org.codehaus.jackson.JsonParser; - -import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; +import org.codehaus.jackson.JsonToken; +import org.codehaus.jackson.map.MappingJsonFactory; +import org.codehaus.jackson.type.TypeReference; /** * Create SSTables from JSON input @@ -58,10 +75,11 @@ public class SSTableImport private static final Options options = new Options(); private static CommandLine cmd; - private static Integer keyCountToImport = null; - private static boolean isSorted = false; + private Integer keyCountToImport; + private final boolean isSorted; - private static final JsonFactory factory = new MappingJsonFactory().configure(JsonParser.Feature.INTERN_FIELD_NAMES, false); + private static final JsonFactory factory = new MappingJsonFactory().configure( + JsonParser.Feature.INTERN_FIELD_NAMES, false); static { @@ -175,7 +193,23 @@ public class SSTableImport } } - private static void addToStandardCF(List row, ColumnFamily cfamily) + public SSTableImport() + { + this(null, false); + } + + public SSTableImport(boolean isSorted) + { + this(null, isSorted); + } + + public SSTableImport(Integer keyCountToImport, boolean isSorted) + { + this.keyCountToImport = keyCountToImport; + this.isSorted = isSorted; + } + + private void addToStandardCF(List row, ColumnFamily cfamily) { addColumnsToCF(row, null, cfamily); } @@ -187,7 +221,7 @@ public class SSTableImport * @param superName name of the super column if any * @param cfamily the column family to add columns to */ - private static void addColumnsToCF(List row, ByteBuffer superName, ColumnFamily cfamily) + private void addColumnsToCF(List row, ByteBuffer superName, ColumnFamily cfamily) { CFMetaData cfm = cfamily.metadata(); assert cfm != null; @@ -220,13 +254,27 @@ public class SSTableImport } } + private void parseMeta(Map map, AbstractColumnContainer columnContainer) + { + + // deletionInfo is the only metadata we store for now + if (map.containsKey("deletionInfo")) + { + Map unparsedDeletionInfo = (Map) map.get("deletionInfo"); + Number number = (Number) unparsedDeletionInfo.get("markedForDeleteAt"); + long markedForDeleteAt = number instanceof Long ? (Long) number : ((Integer) number).longValue(); + int localDeletionTime = (Integer) unparsedDeletionInfo.get("localDeletionTime"); + columnContainer.setDeletionInfo(new DeletionInfo(markedForDeleteAt, localDeletionTime)); + } + } + /** * Add super columns to a column family. * * @param row the super columns associated with a row * @param cfamily the column family to add columns to */ - private static void addToSuperCF(Map row, ColumnFamily cfamily) + private void addToSuperCF(Map row, ColumnFamily cfamily) { CFMetaData metaData = cfamily.metadata(); assert metaData != null; @@ -238,12 +286,14 @@ public class SSTableImport { Map data = (Map) entry.getValue(); - addColumnsToCF((List) data.get("subColumns"), stringAsType((String) entry.getKey(), comparator), cfamily); + ByteBuffer superName = stringAsType((String) entry.getKey(), comparator); + + addColumnsToCF((List) data.get("subColumns"), superName, cfamily); - // *WARNING* markForDeleteAt has been DEPRECATED at Cassandra side - //BigInteger deletedAt = (BigInteger) data.get("deletedAt"); - //SuperColumn superColumn = (SuperColumn) cfamily.getColumn(superName); - //superColumn.markForDeleteAt((int) (System.currentTimeMillis()/1000), deletedAt); + if (data.containsKey("metadata")) + { + parseMeta((Map) data.get("metadata"), (SuperColumn) cfamily.getColumn(superName)); + } } } @@ -257,52 +307,63 @@ public class SSTableImport * * @throws IOException for errors reading/writing input/output */ - public static void importJson(String jsonFile, String keyspace, String cf, String ssTablePath) throws IOException + public int importJson(String jsonFile, String keyspace, String cf, String ssTablePath) throws IOException { ColumnFamily columnFamily = ColumnFamily.create(keyspace, cf); IPartitioner partitioner = DatabaseDescriptor.getPartitioner(); int importedKeys = (isSorted) ? importSorted(jsonFile, columnFamily, ssTablePath, partitioner) - : importUnsorted(getParser(jsonFile), columnFamily, ssTablePath, partitioner); + : importUnsorted(jsonFile, columnFamily, ssTablePath, partitioner); if (importedKeys != -1) System.out.printf("%d keys imported successfully.%n", importedKeys); + + return importedKeys; } - private static int importUnsorted(JsonParser parser, ColumnFamily columnFamily, String ssTablePath, IPartitioner partitioner) throws IOException + private int importUnsorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, IPartitioner partitioner) throws IOException { int importedKeys = 0; long start = System.currentTimeMillis(); - Map data = parser.readValueAs(new TypeReference>() {}); - keyCountToImport = (keyCountToImport == null) ? data.size() : keyCountToImport; + JsonParser parser = getParser(jsonFile); + + Object[] data = parser.readValueAs(new TypeReference(){}); + + keyCountToImport = (keyCountToImport == null) ? data.length : keyCountToImport; SSTableWriter writer = new SSTableWriter(ssTablePath, keyCountToImport); System.out.printf("Importing %s keys...%n", keyCountToImport); // sort by dk representation, but hold onto the hex version - SortedMap decoratedKeys = new TreeMap(); + SortedMap> decoratedKeys = new TreeMap>(); - for (Object keyObject : data.keySet()) + for (Object row : data) { - String key = (String) keyObject; - decoratedKeys.put(partitioner.decorateKey(hexToBytes(key)), key); + Map rowAsMap = (Map)row; + decoratedKeys.put(partitioner.decorateKey(hexToBytes((String)rowAsMap.get("key"))), rowAsMap); } - for (Map.Entry rowKey : decoratedKeys.entrySet()) + for (Map.Entry> row : decoratedKeys.entrySet()) { - if (columnFamily.getType() == ColumnFamilyType.Super) + if (row.getValue().containsKey("metadata")) { - addToSuperCF((Map) data.get(rowKey.getValue()), columnFamily); + parseMeta((Map) row.getValue().get("metadata"), columnFamily); } + + Object columns = row.getValue().get("columns"); + if (columnFamily.getType() == ColumnFamilyType.Super) + addToSuperCF((Map) columns, columnFamily); else - { - addToStandardCF((List) data.get(rowKey.getValue()), columnFamily); - } + addToStandardCF((List) columns, columnFamily); + - writer.append(rowKey.getKey(), columnFamily); + writer.append(row.getKey(), columnFamily); columnFamily.clear(); + // ready the column family for the next row since we might have read deletionInfo metadata + columnFamily.delete(DeletionInfo.LIVE); + importedKeys++; long current = System.currentTimeMillis(); @@ -322,7 +383,8 @@ public class SSTableImport return importedKeys; } - public static int importSorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, IPartitioner partitioner) throws IOException + private int importSorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, + IPartitioner partitioner) throws IOException { int importedKeys = 0; // already imported keys count long start = System.currentTimeMillis(); @@ -334,12 +396,12 @@ public class SSTableImport keyCountToImport = 0; System.out.println("Counting keys to import, please wait... (NOTE: to skip this use -n )"); - parser.nextToken(); // START_OBJECT + parser.nextToken(); // START_ARRAY while (parser.nextToken() != null) { - parser.nextToken(); parser.skipChildren(); - if (parser.getCurrentName() == null) continue; + if (parser.getCurrentToken() == JsonToken.END_ARRAY) + break; keyCountToImport++; } @@ -353,66 +415,49 @@ public class SSTableImport int lineNumber = 1; DecoratedKey prevStoredKey = null; + parser.nextToken(); // START_ARRAY while (parser.nextToken() != null) { String key = parser.getCurrentName(); + Map row = parser.readValueAs(new TypeReference>(){}); + DecoratedKey currentKey = partitioner.decorateKey(hexToBytes((String) row.get("key"))); - if (key != null) - { - String tokenName = parser.nextToken().name(); + if (row.containsKey("metadata")) + parseMeta((Map) row.get("metadata"), columnFamily); - if (tokenName.equals("START_ARRAY")) - { - if (columnFamily.getType() == ColumnFamilyType.Super) - { - throw new RuntimeException("Can't write Standard columns to the Super Column Family."); - } - List columns = parser.readValueAs(new TypeReference>() {}); - addToStandardCF(columns, columnFamily); - } - else if (tokenName.equals("START_OBJECT")) - { - if (columnFamily.getType() == ColumnFamilyType.Standard) - { - throw new RuntimeException("Can't write Super columns to the Standard Column Family."); - } - - Map columns = parser.readValueAs(new TypeReference>() {}); - addToSuperCF(columns, columnFamily); - } - else - { - throw new UnsupportedOperationException("Only Array or Hash allowed as row content."); - } + if (columnFamily.getType() == ColumnFamilyType.Super) + addToSuperCF((Map)row.get("columns"), columnFamily); + else + addToStandardCF((List)row.get("columns"), columnFamily); - DecoratedKey currentKey = partitioner.decorateKey(hexToBytes(key)); + if (prevStoredKey != null && prevStoredKey.compareTo(currentKey) != -1) + { + System.err + .printf("Line %d: Key %s is greater than previous, collection is not sorted properly. Aborting import. You might need to delete SSTables manually.%n", + lineNumber, key); + return -1; + } - if (prevStoredKey != null && prevStoredKey.compareTo(currentKey) != -1) - { - System.err.printf("Line %d: Key %s is greater than previous, collection is not sorted properly. Aborting import. You might need to delete SSTables manually.%n", lineNumber, key); - return -1; - } + // saving decorated key + writer.append(currentKey, columnFamily); + columnFamily.clear(); - // saving decorated key - writer.append(currentKey, columnFamily); - columnFamily.clear(); + prevStoredKey = currentKey; + importedKeys++; + lineNumber++; - prevStoredKey = currentKey; - importedKeys++; - lineNumber++; + long current = System.currentTimeMillis(); - long current = System.currentTimeMillis(); + if (current - start >= 5000) // 5 secs. + { + System.out.printf("Currently imported %d keys.%n", importedKeys); + start = current; + } - if (current - start >= 5000) // 5 secs. - { - System.out.printf("Currently imported %d keys.%n", importedKeys); - start = current; - } + if (keyCountToImport == importedKeys) + break; - if (keyCountToImport == importedKeys) - break; - } } writer.closeAndOpenReader(); @@ -426,7 +471,7 @@ public class SSTableImport * @return json parser instance for given file * @throws IOException if any I/O error. */ - private static JsonParser getParser(String fileName) throws IOException + private JsonParser getParser(String fileName) throws IOException { return factory.createJsonParser(new File(fileName)); } @@ -466,6 +511,9 @@ public class SSTableImport String keyspace = cmd.getOptionValue(KEYSPACE_OPTION); String cfamily = cmd.getOptionValue(COLUMN_FAMILY_OPTION); + Integer keyCountToImport = null; + boolean isSorted = false; + if (cmd.hasOption(KEY_COUNT_OPTION)) { keyCountToImport = Integer.valueOf(cmd.getOptionValue(KEY_COUNT_OPTION)); @@ -486,7 +534,7 @@ public class SSTableImport try { - importJson(json, keyspace, cfamily, ssTable); + new SSTableImport(keyCountToImport,isSorted).importJson(json, keyspace, cfamily, ssTable); } catch (Exception e) { @@ -512,15 +560,6 @@ public class SSTableImport } /** - * Used by test framework to set key count - * @param keyCount numbers of keys to import - */ - public static void setKeyCountToImport(Integer keyCount) - { - keyCountToImport = keyCount; - } - - /** * Convert a string to bytes (ByteBuffer) according to type * @param content string to convert * @param type type to use for conversion http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/resources/CounterCF.json ---------------------------------------------------------------------- diff --git a/test/resources/CounterCF.json b/test/resources/CounterCF.json index 7a9225a..b6bca73 100644 --- a/test/resources/CounterCF.json +++ b/test/resources/CounterCF.json @@ -1,3 +1,3 @@ -{ - "726f7741": [["636f6c4141", "000100008c619170467411e00000fe8ebeead9ee0000000000000001000000000000002a", 1294532915068, "c", 0]] -} +[ + {"key": "726f7741", "columns": [["636f6c4141", "000100008c619170467411e00000fe8ebeead9ee0000000000000001000000000000002a", 1294532915068, "c", 0]]} +] http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/resources/SimpleCF.json ---------------------------------------------------------------------- diff --git a/test/resources/SimpleCF.json b/test/resources/SimpleCF.json index 098467d..45f57eb 100644 --- a/test/resources/SimpleCF.json +++ b/test/resources/SimpleCF.json @@ -1,4 +1,4 @@ -{ - "726f7741": [["636f6c4141", "76616c4141", 1294532915068], ["636f6c4142", "76616c4142", 1294532915069], ["636f6c4143", "76616c4143", 1294532915071, "e", 42, 2000000000]], - "726f7742": [["636f6c4241", "76616c4241", 1294532915070], ["636f6c4242", "76616c4242", 1294532915073]] -} +[ + {"key": "726f7741", "columns": [["636f6c4141", "76616c4141", 1294532915068], ["636f6c4142", "76616c4142", 1294532915069], ["636f6c4143", "76616c4143", 1294532915071, "e", 42, 2000000000]]}, + {"key": "726f7742", "columns": [["636f6c4241", "76616c4241", 1294532915070], ["636f6c4242", "76616c4242", 1294532915073]]} +] http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/resources/SimpleCF.oldformat.json ---------------------------------------------------------------------- diff --git a/test/resources/SimpleCF.oldformat.json b/test/resources/SimpleCF.oldformat.json index e14fe2b..d920cfb 100644 --- a/test/resources/SimpleCF.oldformat.json +++ b/test/resources/SimpleCF.oldformat.json @@ -1,4 +1,4 @@ -{ - "726f7741": [["636f6c4141", "76616c4141", 1294532915068, false], ["636f6c4142", "76616c4142", 1294532915069, false], ["636f6c4143", "76616c4143", 1294532915071, false, 42, 2000000000 ]], - "726f7742": [["636f6c4241", "76616c4241", 1294532915070, false], ["636f6c4242", "76616c4242", 1294532915073, false]] -} +[ + {"key": "726f7741", "columns": [["636f6c4141", "76616c4141", 1294532915068, false], ["636f6c4142", "76616c4142", 1294532915069, false], ["636f6c4143", "76616c4143", 1294532915071, false, 42, 2000000000 ]]}, + {"key": "726f7742", "columns": [["636f6c4241", "76616c4241", 1294532915070, false], ["636f6c4242", "76616c4242", 1294532915073, false]]} +] http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/resources/SimpleCFWithDeletionInfo.json ---------------------------------------------------------------------- diff --git a/test/resources/SimpleCFWithDeletionInfo.json b/test/resources/SimpleCFWithDeletionInfo.json new file mode 100644 index 0000000..ef673f2 --- /dev/null +++ b/test/resources/SimpleCFWithDeletionInfo.json @@ -0,0 +1,4 @@ +[ + {"key": "726f7741","metadata":{"deletionInfo":{"markedForDeleteAt":0,"localDeletionTime":0}}, "columns": [["636f6c4141", "76616c4141", 1294532915068], ["636f6c4142", "76616c4142", 1294532915069], ["636f6c4143", "76616c4143", 1294532915071, "e", 42, 2000000000]]}, + {"key": "726f7742","metadata":{"deletionInfo":{"markedForDeleteAt":0,"localDeletionTime":0}}, "columns": [["636f6c4241", "76616c4241", 1294532915070], ["636f6c4242", "76616c4242", 1294532915073]]} +] http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/resources/SuperCF.json ---------------------------------------------------------------------- diff --git a/test/resources/SuperCF.json b/test/resources/SuperCF.json index cb3ea27..5afec7f 100644 --- a/test/resources/SuperCF.json +++ b/test/resources/SuperCF.json @@ -1,4 +1,4 @@ -{ - "726f7741": {"737570657241": {"deletedAt": -9223372036854775808, "subColumns": [["636f6c4141", "76616c75654141", 1294532915069], ["636f6c4142", "76616c75654142", 1294532915069]]}}, - "726f7742": {"737570657242": {"deletedAt": -9223372036854775808, "subColumns": [["636f6c4241", "76616c75654241", 1294532915069], ["636f6c4242", "76616c75654242", 1294532915069]]}} -} +[ + {"key": "726f7741", "columns": {"737570657241": {"metadata": {"deletionInfo": {"markedForDeleteAt":0,"localDeletionTime":0}}, "subColumns": [["636f6c4141", "76616c75654141", 1294532915069], ["636f6c4142", "76616c75654142", 1294532915069]]}}}, + {"key": "726f7742", "columns": {"737570657242": {"subColumns": [["636f6c4241", "76616c75654241", 1294532915069], ["636f6c4242", "76616c75654242", 1294532915069]]}}} +] http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/resources/UnsortedCF.json ---------------------------------------------------------------------- diff --git a/test/resources/UnsortedCF.json b/test/resources/UnsortedCF.json new file mode 100644 index 0000000..814f182 --- /dev/null +++ b/test/resources/UnsortedCF.json @@ -0,0 +1,4 @@ +[ + {"key": "726f7742", "columns": [["636f6c4241", "76616c4241", 1294532915070], ["636f6c4242", "76616c4242", 1294532915073]]}, + {"key": "726f7741", "columns": [["636f6c4141", "76616c4141", 1294532915068], ["636f6c4142", "76616c4142", 1294532915069], ["636f6c4143", "76616c4143", 1294532915071, "e", 42, 2000000000]]} +] http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/resources/UnsortedSuperCF.json ---------------------------------------------------------------------- diff --git a/test/resources/UnsortedSuperCF.json b/test/resources/UnsortedSuperCF.json index b55833d..bd07e81 100644 --- a/test/resources/UnsortedSuperCF.json +++ b/test/resources/UnsortedSuperCF.json @@ -1,5 +1,5 @@ -{ - "303935": { "5330": {"deletedAt": -9223372036854775808, "subColumns": [["4330", "366338333439636337323630", 1294656637116, false], ["4331", "366338333439636337323630", 1294656637116, false], ["4332", "366338333439636337323630", 1294656637116, false], ["4333", "366338333439636337323630", 1294656637116, false], ["4334", "366338333439636337323630", 1294656637116, false]]}} , - "303630": { "5330": {"deletedAt": -9223372036854775808, "subColumns": [["4330", "643364393434363830326134", 1294656636902, false], ["4331", "643364393434363830326134", 1294656636902, false], ["4332", "643364393434363830326134", 1294656636902, false], ["4333", "643364393434363830326134", 1294656636902, false], ["4334", "643364393434363830326134", 1294656636902, false]]}} , - "303638": { "5330": {"deletedAt": -9223372036854775808, "subColumns": [["4330", "366634393232663435353638", 1294656636885, false], ["4331", "366634393232663435353638", 1294656636885, false], ["4332", "366634393232663435353638", 1294656636885, false], ["4333", "366634393232663435353638", 1294656636885, false], ["4334", "366634393232663435353638", 1294656636885, false]]}} -} +[ + {"key": "303935", "columns": { "5330": {"deletedAt": -9223372036854775808, "subColumns": [["4330", "366338333439636337323630", 1294656637116, false], ["4331", "366338333439636337323630", 1294656637116, false], ["4332", "366338333439636337323630", 1294656637116, false], ["4333", "366338333439636337323630", 1294656637116, false], ["4334", "366338333439636337323630", 1294656637116, false]]}}} , + {"key": "303630", "columns": { "5330": {"deletedAt": -9223372036854775808, "subColumns": [["4330", "643364393434363830326134", 1294656636902, false], ["4331", "643364393434363830326134", 1294656636902, false], ["4332", "643364393434363830326134", 1294656636902, false], ["4333", "643364393434363830326134", 1294656636902, false], ["4334", "643364393434363830326134", 1294656636902, false]]}}} , + {"key": "303638", "columns": { "5330": {"deletedAt": -9223372036854775808, "subColumns": [["4330", "366634393232663435353638", 1294656636885, false], ["4331", "366634393232663435353638", 1294656636885, false], ["4332", "366634393232663435353638", 1294656636885, false], ["4333", "366634393232663435353638", 1294656636885, false], ["4334", "366634393232663435353638", 1294656636885, false]]}}} +] http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/unit/org/apache/cassandra/tools/SSTableExportTest.java ---------------------------------------------------------------------- diff --git a/test/unit/org/apache/cassandra/tools/SSTableExportTest.java b/test/unit/org/apache/cassandra/tools/SSTableExportTest.java index fae4d60..aace9ef 100644 --- a/test/unit/org/apache/cassandra/tools/SSTableExportTest.java +++ b/test/unit/org/apache/cassandra/tools/SSTableExportTest.java @@ -18,17 +18,29 @@ */ package org.apache.cassandra.tools; +import static junit.framework.Assert.assertEquals; +import static junit.framework.Assert.assertNotNull; +import static org.apache.cassandra.io.sstable.SSTableUtils.tempSSTableFile; +import static org.apache.cassandra.utils.ByteBufferUtil.bytesToHex; +import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; +import static org.junit.Assert.assertTrue; + import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import java.util.Collections; +import org.junit.Test; + import org.apache.cassandra.SchemaLoader; +import org.apache.cassandra.Util; +import org.apache.cassandra.db.Column; import org.apache.cassandra.db.ColumnFamily; import org.apache.cassandra.db.CounterColumn; +import org.apache.cassandra.db.DeletionInfo; import org.apache.cassandra.db.ExpiringColumn; -import org.apache.cassandra.db.Column; +import org.apache.cassandra.db.SuperColumn; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.db.filter.QueryPath; import org.apache.cassandra.db.marshal.UTF8Type; @@ -36,19 +48,10 @@ import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.sstable.SSTableReader; import org.apache.cassandra.io.sstable.SSTableWriter; import org.apache.cassandra.utils.ByteBufferUtil; - -import static org.apache.cassandra.io.sstable.SSTableUtils.tempSSTableFile; -import static org.apache.cassandra.utils.ByteBufferUtil.bytesToHex; -import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; -import static org.junit.Assert.assertTrue; - -import org.apache.cassandra.Util; - import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.json.simple.parser.ParseException; -import org.junit.Test; public class SSTableExportTest extends SchemaLoader { @@ -91,7 +94,7 @@ public class SSTableExportTest extends SchemaLoader } @Test - public void testExportSimpleCf() throws IOException + public void testExportSimpleCf() throws IOException, ParseException { File tempSS = tempSSTableFile("Keyspace1", "Standard1"); ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Standard1"); @@ -120,26 +123,33 @@ public class SSTableExportTest extends SchemaLoader File tempJson = File.createTempFile("Standard1", ".json"); SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[]{asHex("rowExclude")}); - JSONObject json = (JSONObject)JSONValue.parse(new FileReader(tempJson)); + JSONArray json = (JSONArray)JSONValue.parseWithException(new FileReader(tempJson)); + assertEquals("unexpected number of rows", 2, json.size()); + + JSONObject rowA = (JSONObject)json.get(0); + assertEquals("unexpected number of keys", 2, rowA.keySet().size()); + assertEquals("unexpected row key",asHex("rowA"),rowA.get("key")); - JSONArray rowA = (JSONArray)json.get(asHex("rowA")); - JSONArray colA = (JSONArray)rowA.get(0); + JSONArray colsA = (JSONArray)rowA.get("columns"); + JSONArray colA = (JSONArray)colsA.get(0); assert hexToBytes((String)colA.get(1)).equals(ByteBufferUtil.bytes("valA")); - JSONArray colExp = (JSONArray)rowA.get(1); + JSONArray colExp = (JSONArray)colsA.get(1); assert ((Long)colExp.get(4)) == 42; assert ((Long)colExp.get(5)) == nowInSec; - JSONArray rowB = (JSONArray)json.get(asHex("rowB")); - JSONArray colB = (JSONArray)rowB.get(0); + JSONObject rowB = (JSONObject)json.get(1); + assertEquals("unexpected number of keys", 2, rowB.keySet().size()); + assertEquals("unexpected row key",asHex("rowB"),rowB.get("key")); + + JSONArray colsB = (JSONArray)rowB.get("columns"); + JSONArray colB = (JSONArray)colsB.get(0); assert colB.size() == 3; - JSONArray rowExclude = (JSONArray)json.get(asHex("rowExclude")); - assert rowExclude == null; } @Test - public void testExportSuperCf() throws IOException + public void testExportSuperCf() throws IOException, ParseException { File tempSS = tempSSTableFile("Keyspace1", "Super4"); ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Super4"); @@ -147,6 +157,8 @@ public class SSTableExportTest extends SchemaLoader // Add rowA cfamily.addColumn(new QueryPath("Super4", ByteBufferUtil.bytes("superA"), ByteBufferUtil.bytes("colA")), ByteBufferUtil.bytes("valA"), System.currentTimeMillis()); + // set deletion info on the super col + ((SuperColumn) cfamily.getColumn(ByteBufferUtil.bytes("superA"))).setDeletionInfo(new DeletionInfo(0, 0)); writer.append(Util.dk("rowA"), cfamily); cfamily.clear(); @@ -164,18 +176,26 @@ public class SSTableExportTest extends SchemaLoader // Export to JSON and verify File tempJson = File.createTempFile("Super4", ".json"); - SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[]{asHex("rowExclude")}); + SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[] { asHex("rowExclude") }); - JSONObject json = (JSONObject)JSONValue.parse(new FileReader(tempJson)); + JSONArray json = (JSONArray) JSONValue.parseWithException(new FileReader(tempJson)); + assertEquals("unexpected number of rows", 2, json.size()); - JSONObject rowA = (JSONObject)json.get(asHex("rowA")); - JSONObject superA = (JSONObject)rowA.get(cfamily.getComparator().getString(ByteBufferUtil.bytes("superA"))); - JSONArray subColumns = (JSONArray)superA.get("subColumns"); - JSONArray colA = (JSONArray)subColumns.get(0); - JSONObject rowExclude = (JSONObject)json.get(asHex("rowExclude")); - assert hexToBytes((String)colA.get(1)).equals(ByteBufferUtil.bytes("valA")); + // make sure only the two rows we expect are there + JSONObject rowA = (JSONObject) json.get(0); + assertEquals("unexpected number of keys", 2, rowA.keySet().size()); + assertEquals("unexpected row key", asHex("rowA"), rowA.get("key")); + JSONObject rowB = (JSONObject) json.get(0); + assertEquals("unexpected number of keys", 2, rowB.keySet().size()); + assertEquals("unexpected row key", asHex("rowA"), rowB.get("key")); + + JSONObject cols = (JSONObject) rowA.get("columns"); + + JSONObject superA = (JSONObject) cols.get(cfamily.getComparator().getString(ByteBufferUtil.bytes("superA"))); + JSONArray subColumns = (JSONArray) superA.get("subColumns"); + JSONArray colA = (JSONArray) subColumns.get(0); + assert hexToBytes((String) colA.get(1)).equals(ByteBufferUtil.bytes("valA")); assert colA.size() == 3; - assert rowExclude == null; } @Test @@ -203,7 +223,7 @@ public class SSTableExportTest extends SchemaLoader // Import JSON to another SSTable file File tempSS2 = tempSSTableFile("Keyspace1", "Standard1"); - SSTableImport.importJson(tempJson.getPath(), "Keyspace1", "Standard1", tempSS2.getPath()); + new SSTableImport().importJson(tempJson.getPath(), "Keyspace1", "Standard1", tempSS2.getPath()); reader = SSTableReader.open(Descriptor.fromFilename(tempSS2.getPath())); QueryFilter qf = QueryFilter.getNamesFilter(Util.dk("rowA"), new QueryPath("Standard1", null, null), ByteBufferUtil.bytes("name")); @@ -218,7 +238,7 @@ public class SSTableExportTest extends SchemaLoader } @Test - public void testExportCounterCf() throws IOException + public void testExportCounterCf() throws IOException, ParseException { File tempSS = tempSSTableFile("Keyspace1", "Counter1"); ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Counter1"); @@ -234,18 +254,22 @@ public class SSTableExportTest extends SchemaLoader // Export to JSON and verify File tempJson = File.createTempFile("Counter1", ".json"); SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[0]); + JSONArray json = (JSONArray)JSONValue.parseWithException(new FileReader(tempJson)); + assertEquals("unexpected number of rows", 1, json.size()); - JSONObject json = (JSONObject)JSONValue.parse(new FileReader(tempJson)); + JSONObject row = (JSONObject)json.get(0); + assertEquals("unexpected number of keys", 2, row.keySet().size()); + assertEquals("unexpected row key",asHex("rowA"),row.get("key")); - JSONArray rowA = (JSONArray)json.get(asHex("rowA")); - JSONArray colA = (JSONArray)rowA.get(0); + JSONArray cols = (JSONArray)row.get("columns"); + JSONArray colA = (JSONArray)cols.get(0); assert hexToBytes((String)colA.get(0)).equals(ByteBufferUtil.bytes("colA")); assert ((String) colA.get(3)).equals("c"); assert (Long) colA.get(4) == Long.MIN_VALUE; } @Test - public void testEscapingDoubleQuotes() throws IOException + public void testEscapingDoubleQuotes() throws IOException, ParseException { File tempSS = tempSSTableFile("Keyspace1", "ValuesWithQuotes"); ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "ValuesWithQuotes"); @@ -262,11 +286,79 @@ public class SSTableExportTest extends SchemaLoader File tempJson = File.createTempFile("ValuesWithQuotes", ".json"); SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[0]); - JSONObject json = (JSONObject) JSONValue.parse(new FileReader(tempJson)); + JSONArray json = (JSONArray)JSONValue.parseWithException(new FileReader(tempJson)); + assertEquals("unexpected number of rows", 1, json.size()); + + JSONObject row = (JSONObject)json.get(0); + assertEquals("unexpected number of keys", 2, row.keySet().size()); + assertEquals("unexpected row key",asHex("rowA"),row.get("key")); + + JSONArray cols = (JSONArray)row.get("columns"); + JSONArray colA = (JSONArray)cols.get(0); + assert hexToBytes((String)colA.get(0)).equals(ByteBufferUtil.bytes("data")); + assert colA.get(1).equals("{\"foo\":\"bar\"}"); + } + + @Test + public void testExportColumnsWithMetadata() throws IOException, ParseException + { + + File tempSS = tempSSTableFile("Keyspace1", "Standard1"); + ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Standard1"); + SSTableWriter writer = new SSTableWriter(tempSS.getPath(), 2); + + // Add rowA + cfamily.addColumn(new QueryPath("Standard1", null, ByteBufferUtil.bytes("colName")), + ByteBufferUtil.bytes("val"), System.currentTimeMillis()); + cfamily.addColumn(new QueryPath("Standard1", null, ByteBufferUtil.bytes("colName1")), + ByteBufferUtil.bytes("val1"), System.currentTimeMillis()); + cfamily.delete(new DeletionInfo(0, 0)); + writer.append(Util.dk("rowA"), cfamily); + + SSTableReader reader = writer.closeAndOpenReader(); + // Export to JSON and verify + File tempJson = File.createTempFile("CFWithDeletionInfo", ".json"); + SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[0]); + + JSONArray json = (JSONArray)JSONValue.parseWithException(new FileReader(tempJson)); + System.out.println(json.toJSONString()); + assertEquals("unexpected number of rows", 1, json.size()); + + JSONObject row = (JSONObject)json.get(0); + assertEquals("unexpected number of keys", 3, row.keySet().size()); + assertEquals("unexpected row key",asHex("rowA"),row.get("key")); + + // check that the row key is there and present + String rowKey = (String) row.get("key"); + assertNotNull("expecing key to be present", rowKey); + assertEquals("key did not match", ByteBufferUtil.bytes("rowA"), hexToBytes(rowKey)); + + // check that there is metadata and that it contains deletionInfo + JSONObject meta = (JSONObject) row.get("metadata"); + assertNotNull("expecing metadata to be present", meta); + + assertEquals("unexpected number of metadata entries", 1, meta.keySet().size()); + + JSONObject serializedDeletionInfo = (JSONObject) meta.get("deletionInfo"); + assertNotNull("expecing deletionInfo to be present", serializedDeletionInfo); + + assertEquals( + "unexpected serialization format for topLevelDeletion", + "{\"markedForDeleteAt\":0,\"localDeletionTime\":0}", + serializedDeletionInfo.toJSONString()); + + // check the colums are what we put in + JSONArray cols = (JSONArray) row.get("columns"); + assertNotNull("expecing columns to be present", cols); + assertEquals("expecting two columns", 2, cols.size()); + + JSONArray col1 = (JSONArray) cols.get(0); + assertEquals("column name did not match", ByteBufferUtil.bytes("colName"), hexToBytes((String) col1.get(0))); + assertEquals("column value did not match", ByteBufferUtil.bytes("val"), hexToBytes((String) col1.get(1))); + + JSONArray col2 = (JSONArray) cols.get(1); + assertEquals("column name did not match", ByteBufferUtil.bytes("colName1"), hexToBytes((String) col2.get(0))); + assertEquals("column value did not match", ByteBufferUtil.bytes("val1"), hexToBytes((String) col2.get(1))); - JSONArray rowA = (JSONArray)json.get(asHex("rowA")); - JSONArray data = (JSONArray)rowA.get(0); - assert hexToBytes((String)data.get(0)).equals(ByteBufferUtil.bytes("data")); - assert data.get(1).equals("{\"foo\":\"bar\"}"); } } http://git-wip-us.apache.org/repos/asf/cassandra/blob/d569f873/test/unit/org/apache/cassandra/tools/SSTableImportTest.java ---------------------------------------------------------------------- diff --git a/test/unit/org/apache/cassandra/tools/SSTableImportTest.java b/test/unit/org/apache/cassandra/tools/SSTableImportTest.java index 1b59696..6bed245 100644 --- a/test/unit/org/apache/cassandra/tools/SSTableImportTest.java +++ b/test/unit/org/apache/cassandra/tools/SSTableImportTest.java @@ -18,35 +18,34 @@ */ package org.apache.cassandra.tools; +import static junit.framework.Assert.assertEquals; +import static org.apache.cassandra.io.sstable.SSTableUtils.tempSSTableFile; +import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; + import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collections; +import org.junit.Test; + import org.apache.cassandra.SchemaLoader; -import org.apache.cassandra.config.DatabaseDescriptor; +import org.apache.cassandra.Util; import org.apache.cassandra.db.ColumnFamily; import org.apache.cassandra.db.CounterColumn; import org.apache.cassandra.db.DeletedColumn; +import org.apache.cassandra.db.DeletionInfo; import org.apache.cassandra.db.ExpiringColumn; import org.apache.cassandra.db.IColumn; +import org.apache.cassandra.db.SuperColumn; +import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.db.filter.QueryPath; -import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; -import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.sstable.SSTableReader; - import org.apache.cassandra.utils.ByteBufferUtil; - -import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; - -import static org.apache.cassandra.io.sstable.SSTableUtils.tempSSTableFile; -import org.apache.cassandra.Util; - import org.json.simple.parser.ParseException; -import org.junit.Test; public class SSTableImportTest extends SchemaLoader { @@ -56,7 +55,7 @@ public class SSTableImportTest extends SchemaLoader // Import JSON to temp SSTable file String jsonUrl = resourcePath("SimpleCF.json"); File tempSS = tempSSTableFile("Keyspace1", "Standard1"); - SSTableImport.importJson(jsonUrl, "Keyspace1", "Standard1", tempSS.getPath()); + new SSTableImport(true).importJson(jsonUrl, "Keyspace1", "Standard1", tempSS.getPath()); // Verify results SSTableReader reader = SSTableReader.open(Descriptor.fromFilename(tempSS.getPath())); @@ -85,7 +84,7 @@ public class SSTableImportTest extends SchemaLoader // Import JSON to temp SSTable file String jsonUrl = resourcePath("SimpleCF.oldformat.json"); File tempSS = tempSSTableFile("Keyspace1", "Standard1"); - SSTableImport.importJson(jsonUrl, "Keyspace1", "Standard1", tempSS.getPath()); + new SSTableImport(true).importJson(jsonUrl, "Keyspace1", "Standard1", tempSS.getPath()); // Verify results SSTableReader reader = SSTableReader.open(Descriptor.fromFilename(tempSS.getPath())); @@ -106,7 +105,7 @@ public class SSTableImportTest extends SchemaLoader { String jsonUrl = resourcePath("SuperCF.json"); File tempSS = tempSSTableFile("Keyspace1", "Super4"); - SSTableImport.importJson(jsonUrl, "Keyspace1", "Super4", tempSS.getPath()); + new SSTableImport(true).importJson(jsonUrl, "Keyspace1", "Super4", tempSS.getPath()); // Verify results SSTableReader reader = SSTableReader.open(Descriptor.fromFilename(tempSS.getPath())); @@ -114,6 +113,8 @@ public class SSTableImportTest extends SchemaLoader ColumnFamily cf = qf.getSSTableColumnIterator(reader).getColumnFamily(); qf.collateOnDiskAtom(cf, Collections.singletonList(qf.getSSTableColumnIterator(reader)), Integer.MIN_VALUE); IColumn superCol = cf.getColumn(ByteBufferUtil.bytes("superA")); + assertEquals("supercolumn deletion time did not match the expected time", new DeletionInfo(0, 0), + ((SuperColumn) superCol).deletionInfo()); assert superCol != null; assert superCol.getSubColumns().size() > 0; IColumn subColumn = superCol.getSubColumn(ByteBufferUtil.bytes("636f6c4141")); @@ -121,26 +122,68 @@ public class SSTableImportTest extends SchemaLoader } @Test - public void testImportUnsortedMode() throws IOException, URISyntaxException + public void testImportUnsortedDataWithSortedOptionFails() throws IOException, URISyntaxException { String jsonUrl = resourcePath("UnsortedSuperCF.json"); File tempSS = tempSSTableFile("Keyspace1", "Super4"); - ColumnFamily columnFamily = ColumnFamily.create("Keyspace1", "Super4"); - IPartitioner partitioner = DatabaseDescriptor.getPartitioner(); - - SSTableImport.setKeyCountToImport(3); - int result = SSTableImport.importSorted(jsonUrl, columnFamily, tempSS.getPath(), partitioner); + int result = new SSTableImport(3,true).importJson(jsonUrl, "Keyspace1","Super4", tempSS.getPath()); assert result == -1; } @Test + public void testImportUnsortedMode() throws IOException, URISyntaxException + { + String jsonUrl = resourcePath("UnsortedCF.json"); + File tempSS = tempSSTableFile("Keyspace1", "Standard1"); + + new SSTableImport().importJson(jsonUrl, "Keyspace1", "Standard1", tempSS.getPath()); + + SSTableReader reader = SSTableReader.open(Descriptor.fromFilename(tempSS.getPath())); + QueryFilter qf = QueryFilter.getIdentityFilter(Util.dk("rowA"), new QueryPath("Standard1")); + OnDiskAtomIterator iter = qf.getSSTableColumnIterator(reader); + ColumnFamily cf = iter.getColumnFamily(); + while (iter.hasNext()) + cf.addAtom(iter.next()); + assert cf.getColumn(ByteBufferUtil.bytes("colAA")).value().equals(hexToBytes("76616c4141")); + assert !(cf.getColumn(ByteBufferUtil.bytes("colAA")) instanceof DeletedColumn); + IColumn expCol = cf.getColumn(ByteBufferUtil.bytes("colAC")); + assert expCol.value().equals(hexToBytes("76616c4143")); + assert expCol instanceof ExpiringColumn; + assert ((ExpiringColumn) expCol).getTimeToLive() == 42 && expCol.getLocalDeletionTime() == 2000000000; + } + + @Test + public void testImportWithDeletionInfoMetadata() throws IOException, URISyntaxException + { + // Import JSON to temp SSTable file + String jsonUrl = resourcePath("SimpleCFWithDeletionInfo.json"); + File tempSS = tempSSTableFile("Keyspace1", "Standard1"); + new SSTableImport(true).importJson(jsonUrl, "Keyspace1", "Standard1", tempSS.getPath()); + + // Verify results + SSTableReader reader = SSTableReader.open(Descriptor.fromFilename(tempSS.getPath())); + QueryFilter qf = QueryFilter.getIdentityFilter(Util.dk("rowA"), new QueryPath("Standard1")); + OnDiskAtomIterator iter = qf.getSSTableColumnIterator(reader); + ColumnFamily cf = iter.getColumnFamily(); + assertEquals(cf.deletionInfo(), new DeletionInfo(0, 0)); + while (iter.hasNext()) + cf.addAtom(iter.next()); + assert cf.getColumn(ByteBufferUtil.bytes("colAA")).value().equals(hexToBytes("76616c4141")); + assert !(cf.getColumn(ByteBufferUtil.bytes("colAA")) instanceof DeletedColumn); + IColumn expCol = cf.getColumn(ByteBufferUtil.bytes("colAC")); + assert expCol.value().equals(hexToBytes("76616c4143")); + assert expCol instanceof ExpiringColumn; + assert ((ExpiringColumn) expCol).getTimeToLive() == 42 && expCol.getLocalDeletionTime() == 2000000000; + } + + @Test public void testImportCounterCf() throws IOException, URISyntaxException { // Import JSON to temp SSTable file String jsonUrl = resourcePath("CounterCF.json"); File tempSS = tempSSTableFile("Keyspace1", "Counter1"); - SSTableImport.importJson(jsonUrl, "Keyspace1", "Counter1", tempSS.getPath()); + new SSTableImport(true).importJson(jsonUrl, "Keyspace1", "Counter1", tempSS.getPath()); // Verify results SSTableReader reader = SSTableReader.open(Descriptor.fromFilename(tempSS.getPath()));