cassandra-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From yu...@apache.org
Subject [1/3] cassandra git commit: Improve tombstone printing in sstabledump
Date Fri, 29 Apr 2016 19:33:59 GMT
Repository: cassandra
Updated Branches:
  refs/heads/cassandra-3.0 b15983e83 -> 620efdc8c
  refs/heads/trunk ff4d0f9ab -> c6778c5af


Improve tombstone printing in sstabledump

patch by clohfink; reviewed by Wei Deng for CASSANDRA-11655


Project: http://git-wip-us.apache.org/repos/asf/cassandra/repo
Commit: http://git-wip-us.apache.org/repos/asf/cassandra/commit/620efdc8
Tree: http://git-wip-us.apache.org/repos/asf/cassandra/tree/620efdc8
Diff: http://git-wip-us.apache.org/repos/asf/cassandra/diff/620efdc8

Branch: refs/heads/cassandra-3.0
Commit: 620efdc8c4968e45994496b23cd7dcdfbccdad6d
Parents: b15983e
Author: Chris Lohfink <Chris.Lohfink@datastax.com>
Authored: Thu Apr 28 20:34:29 2016 -0500
Committer: Yuki Morishita <yukim@apache.org>
Committed: Fri Apr 29 14:01:02 2016 -0500

----------------------------------------------------------------------
 CHANGES.txt                                     |   1 +
 .../apache/cassandra/tools/JsonTransformer.java | 112 +++++++++++++------
 .../apache/cassandra/tools/SSTableExport.java   |  11 +-
 3 files changed, 85 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/cassandra/blob/620efdc8/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 3184cce..64bcbd8 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,4 +1,5 @@
 3.0.6
+ * Improve tombstone printing in sstabledump (CASSANDRA-11655)
  * Fix paging for range queries where all clustering columns are specified (CASSANDRA-11669)
  * Don't require HEAP_NEW_SIZE to be set when using G1 (CASSANDRA-11600)
  * Fix sstabledump not showing cells after tombstone marker (CASSANDRA-11654)

http://git-wip-us.apache.org/repos/asf/cassandra/blob/620efdc8/src/java/org/apache/cassandra/tools/JsonTransformer.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/tools/JsonTransformer.java b/src/java/org/apache/cassandra/tools/JsonTransformer.java
index 93bb686..364070e 100644
--- a/src/java/org/apache/cassandra/tools/JsonTransformer.java
+++ b/src/java/org/apache/cassandra/tools/JsonTransformer.java
@@ -4,7 +4,9 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.nio.ByteBuffer;
+import java.time.Instant;
 import java.util.List;
+import java.util.concurrent.TimeUnit;
 import java.util.stream.Stream;
 
 import org.apache.cassandra.config.CFMetaData;
@@ -18,6 +20,8 @@ import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.db.marshal.CollectionType;
 import org.apache.cassandra.db.marshal.CompositeType;
 import org.apache.cassandra.db.rows.Cell;
+import org.apache.cassandra.db.rows.ColumnData;
+import org.apache.cassandra.db.rows.ComplexColumnData;
 import org.apache.cassandra.db.rows.RangeTombstoneBoundMarker;
 import org.apache.cassandra.db.rows.RangeTombstoneBoundaryMarker;
 import org.apache.cassandra.db.rows.RangeTombstoneMarker;
@@ -51,13 +55,16 @@ public final class JsonTransformer
 
     private final ISSTableScanner currentScanner;
 
+    private boolean rawTime = false;
+
     private long currentPosition = 0;
 
-    private JsonTransformer(JsonGenerator json, ISSTableScanner currentScanner, CFMetaData
metadata)
+    private JsonTransformer(JsonGenerator json, ISSTableScanner currentScanner, boolean rawTime,
CFMetaData metadata)
     {
         this.json = json;
         this.metadata = metadata;
         this.currentScanner = currentScanner;
+        this.rawTime = rawTime;
 
         DefaultPrettyPrinter prettyPrinter = new DefaultPrettyPrinter();
         prettyPrinter.indentObjectsWith(objectIndenter);
@@ -65,23 +72,23 @@ public final class JsonTransformer
         json.setPrettyPrinter(prettyPrinter);
     }
 
-    public static void toJson(ISSTableScanner currentScanner, Stream<UnfilteredRowIterator>
partitions, CFMetaData metadata, OutputStream out)
+    public static void toJson(ISSTableScanner currentScanner, Stream<UnfilteredRowIterator>
partitions, boolean rawTime, CFMetaData metadata, OutputStream out)
             throws IOException
     {
         try (JsonGenerator json = jsonFactory.createJsonGenerator(new OutputStreamWriter(out,
"UTF-8")))
         {
-            JsonTransformer transformer = new JsonTransformer(json, currentScanner, metadata);
+            JsonTransformer transformer = new JsonTransformer(json, currentScanner, rawTime,
metadata);
             json.writeStartArray();
             partitions.forEach(transformer::serializePartition);
             json.writeEndArray();
         }
     }
 
-    public static void keysToJson(ISSTableScanner currentScanner, Stream<DecoratedKey>
keys, CFMetaData metadata, OutputStream out) throws IOException
+    public static void keysToJson(ISSTableScanner currentScanner, Stream<DecoratedKey>
keys, boolean rawTime, CFMetaData metadata, OutputStream out) throws IOException
     {
         try (JsonGenerator json = jsonFactory.createJsonGenerator(new OutputStreamWriter(out,
"UTF-8")))
         {
-            JsonTransformer transformer = new JsonTransformer(json, currentScanner, metadata);
+            JsonTransformer transformer = new JsonTransformer(json, currentScanner, rawTime,
metadata);
             json.writeStartArray();
             keys.forEach(transformer::serializePartitionKey);
             json.writeEndArray();
@@ -165,16 +172,7 @@ public final class JsonTransformer
 
             if (!partition.partitionLevelDeletion().isLive())
             {
-                json.writeFieldName("deletion_info");
-                objectIndenter.setCompact(true);
-                json.writeStartObject();
-                json.writeFieldName("deletion_time");
-                json.writeNumber(partition.partitionLevelDeletion().markedForDeleteAt());
-                json.writeFieldName("tstamp");
-                json.writeNumber(partition.partitionLevelDeletion().localDeletionTime());
-                json.writeEndObject();
-                objectIndenter.setCompact(false);
-                json.writeEndObject();
+                serializeDeletion(partition.partitionLevelDeletion());
             }
             else
             {
@@ -236,13 +234,12 @@ public final class JsonTransformer
                 objectIndenter.setCompact(true);
                 json.writeStartObject();
                 json.writeFieldName("tstamp");
-                json.writeNumber(liveInfo.timestamp());
+                json.writeString(dateString(TimeUnit.MICROSECONDS, liveInfo.timestamp()));
                 if (liveInfo.isExpiring())
                 {
-                    json.writeFieldName("ttl");
-                    json.writeNumber(liveInfo.ttl());
+                    json.writeNumberField("ttl", liveInfo.ttl());
                     json.writeFieldName("expires_at");
-                    json.writeNumber(liveInfo.localExpirationTime());
+                    json.writeString(dateString(TimeUnit.SECONDS, liveInfo.localExpirationTime()));
                     json.writeFieldName("expired");
                     json.writeBoolean(liveInfo.localExpirationTime() < (System.currentTimeMillis()
/ 1000));
                 }
@@ -253,19 +250,14 @@ public final class JsonTransformer
             // If this is a deletion, indicate that, otherwise write cells.
             if (!row.deletion().isLive())
             {
-                json.writeFieldName("deletion_info");
-                objectIndenter.setCompact(true);
-                json.writeStartObject();
-                json.writeFieldName("deletion_time");
-                json.writeNumber(row.deletion().time().markedForDeleteAt());
-                json.writeFieldName("tstamp");
-                json.writeNumber(row.deletion().time().localDeletionTime());
-                json.writeEndObject();
-                objectIndenter.setCompact(false);
+                serializeDeletion(row.deletion().time());
             }
             json.writeFieldName("cells");
             json.writeStartArray();
-            row.cells().forEach(c -> serializeCell(c, liveInfo));
+            for (ColumnData cd : row)
+            {
+                serializeColumnData(cd, liveInfo);
+            }
             json.writeEndArray();
             json.writeEndObject();
         }
@@ -348,14 +340,48 @@ public final class JsonTransformer
         json.writeFieldName("deletion_info");
         objectIndenter.setCompact(true);
         json.writeStartObject();
-        json.writeFieldName("deletion_time");
-        json.writeNumber(deletion.markedForDeleteAt());
-        json.writeFieldName("tstamp");
-        json.writeNumber(deletion.localDeletionTime());
+        json.writeFieldName("marked_deleted");
+        json.writeString(dateString(TimeUnit.MICROSECONDS, deletion.markedForDeleteAt()));
+        json.writeFieldName("local_delete_time");
+        json.writeString(dateString(TimeUnit.SECONDS, deletion.localDeletionTime()));
         json.writeEndObject();
         objectIndenter.setCompact(false);
     }
 
+    private void serializeColumnData(ColumnData cd, LivenessInfo liveInfo)
+    {
+        if (cd.column().isSimple())
+        {
+            serializeCell((Cell) cd, liveInfo);
+        }
+        else
+        {
+            ComplexColumnData complexData = (ComplexColumnData) cd;
+            if (!complexData.complexDeletion().isLive())
+            {
+                try
+                {
+                    objectIndenter.setCompact(true);
+                    json.writeStartObject();
+                    json.writeFieldName("name");
+                    AbstractType<?> type = cd.column().type;
+                    json.writeString(cd.column().name.toCQLString());
+                    serializeDeletion(complexData.complexDeletion());
+                    objectIndenter.setCompact(true);
+                    json.writeEndObject();
+                    objectIndenter.setCompact(false);
+                }
+                catch (IOException e)
+                {
+                    logger.error("Failure parsing ColumnData.", e);
+                }
+            }
+            for (Cell cell : complexData){
+                serializeCell(cell, liveInfo);
+            }
+        }
+    }
+
     private void serializeCell(Cell cell, LivenessInfo liveInfo)
     {
         try
@@ -381,8 +407,13 @@ public final class JsonTransformer
             }
             if (cell.isTombstone())
             {
-                json.writeFieldName("deletion_time");
-                json.writeNumber(cell.localDeletionTime());
+                json.writeFieldName("deletion_info");
+                objectIndenter.setCompact(true);
+                json.writeStartObject();
+                json.writeFieldName("local_delete_time");
+                json.writeString(dateString(TimeUnit.SECONDS, cell.localDeletionTime()));
+                json.writeEndObject();
+                objectIndenter.setCompact(false);
             }
             else
             {
@@ -392,14 +423,14 @@ public final class JsonTransformer
             if (liveInfo.isEmpty() || cell.timestamp() != liveInfo.timestamp())
             {
                 json.writeFieldName("tstamp");
-                json.writeNumber(cell.timestamp());
+                json.writeString(dateString(TimeUnit.MICROSECONDS, cell.timestamp()));
             }
             if (cell.isExpiring() && (liveInfo.isEmpty() || cell.ttl() != liveInfo.ttl()))
             {
                 json.writeFieldName("ttl");
                 json.writeNumber(cell.ttl());
                 json.writeFieldName("expires_at");
-                json.writeNumber(cell.localDeletionTime());
+                json.writeString(dateString(TimeUnit.SECONDS, cell.localDeletionTime()));
                 json.writeFieldName("expired");
                 json.writeBoolean(!cell.isLive((int) (System.currentTimeMillis() / 1000)));
             }
@@ -412,6 +443,13 @@ public final class JsonTransformer
         }
     }
 
+    private String dateString(TimeUnit from, long time)
+    {
+        long secs = from.toSeconds(time);
+        long offset = Math.floorMod(from.toNanos(time), 1000_000_000L); // nanos per sec
+        return rawTime? Long.toString(time) : Instant.ofEpochSecond(secs, offset).toString();
+    }
+
     /**
      * A specialized {@link Indenter} that enables a 'compact' mode which puts all subsequent
json values on the same
      * line. This is manipulated via {@link CompactIndenter#setCompact(boolean)}

http://git-wip-us.apache.org/repos/asf/cassandra/blob/620efdc8/src/java/org/apache/cassandra/tools/SSTableExport.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/tools/SSTableExport.java b/src/java/org/apache/cassandra/tools/SSTableExport.java
index d918ff4..09dbbed 100644
--- a/src/java/org/apache/cassandra/tools/SSTableExport.java
+++ b/src/java/org/apache/cassandra/tools/SSTableExport.java
@@ -57,6 +57,7 @@ public class SSTableExport
     private static final String DEBUG_OUTPUT_OPTION = "d";
     private static final String EXCLUDE_KEY_OPTION = "x";
     private static final String ENUMERATE_KEYS_OPTION = "e";
+    private static final String RAW_TIMESTAMPS = "t";
 
     private static final Options options = new Options();
     private static CommandLine cmd;
@@ -80,6 +81,9 @@ public class SSTableExport
 
         Option debugOutput = new Option(DEBUG_OUTPUT_OPTION, false, "CQL row per line internal
representation");
         options.addOption(debugOutput);
+
+        Option rawTimestamps = new Option(RAW_TIMESTAMPS, false, "Print raw timestamps instead
of iso8601 date strings");
+        options.addOption(rawTimestamps);
     }
 
     /**
@@ -180,7 +184,10 @@ public class SSTableExport
             CFMetaData metadata = metadataFromSSTable(desc);
             if (cmd.hasOption(ENUMERATE_KEYS_OPTION))
             {
-                JsonTransformer.keysToJson(null, iterToStream(new KeyIterator(desc, metadata)),
metadata, System.out);
+                JsonTransformer.keysToJson(null, iterToStream(new KeyIterator(desc, metadata)),
+                                                              cmd.hasOption(RAW_TIMESTAMPS),
+                                                              metadata,
+                                                              System.out);
             }
             else
             {
@@ -233,7 +240,7 @@ public class SSTableExport
                 }
                 else
                 {
-                    JsonTransformer.toJson(currentScanner, partitions, metadata, System.out);
+                    JsonTransformer.toJson(currentScanner, partitions, cmd.hasOption(RAW_TIMESTAMPS),
metadata, System.out);
                 }
             }
         }


Mime
View raw message