cassandra-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jji...@apache.org
Subject [4/6] cassandra git commit: Merge branch 'cassandra-3.0' into cassandra-3.11
Date Tue, 05 Sep 2017 16:56:54 GMT
Merge branch 'cassandra-3.0' into cassandra-3.11


Project: http://git-wip-us.apache.org/repos/asf/cassandra/repo
Commit: http://git-wip-us.apache.org/repos/asf/cassandra/commit/d44a0d25
Tree: http://git-wip-us.apache.org/repos/asf/cassandra/tree/d44a0d25
Diff: http://git-wip-us.apache.org/repos/asf/cassandra/diff/d44a0d25

Branch: refs/heads/trunk
Commit: d44a0d25bb9a07f5e43a9951bd13f8259dfccd36
Parents: 03155b0 ef5ac1a
Author: Jeff Jirsa <jjirsa@apple.com>
Authored: Tue Sep 5 09:54:36 2017 -0700
Committer: Jeff Jirsa <jjirsa@apple.com>
Committed: Tue Sep 5 09:55:26 2017 -0700

----------------------------------------------------------------------
 CHANGES.txt                                     |   1 +
 .../apache/cassandra/db/ColumnFamilyStore.java  |   2 +-
 .../io/sstable/metadata/MetadataCollector.java  |  10 +-
 .../cassandra/utils/StreamingHistogram.java     | 333 ++++++++++---------
 .../microbench/StreamingHistogramBench.java     | 192 ++++++-----
 .../cassandra/utils/StreamingHistogramTest.java |  43 +--
 6 files changed, 306 insertions(+), 275 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/cassandra/blob/d44a0d25/CHANGES.txt
----------------------------------------------------------------------
diff --cc CHANGES.txt
index 7056075,853bf61..0faed3d
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@@ -1,17 -1,9 +1,18 @@@
 -3.0.15
 +3.11.1
 + * Add a compaction option to TWCS to ignore sstables overlapping checks (CASSANDRA-13418)
 + * BTree.Builder memory leak (CASSANDRA-13754)
 + * Revert CASSANDRA-10368 of supporting non-pk column filtering due to correctness (CASSANDRA-13798)
 + * Fix cassandra-stress hang issues when an error during cluster connection happens (CASSANDRA-12938)
 + * Better bootstrap failure message when blocked by (potential) range movement (CASSANDRA-13744)
 + * "ignore" option is ignored in sstableloader (CASSANDRA-13721)
 + * Deadlock in AbstractCommitLogSegmentManager (CASSANDRA-13652)
 + * Duplicate the buffer before passing it to analyser in SASI operation (CASSANDRA-13512)
 + * Properly evict pstmts from prepared statements cache (CASSANDRA-13641)
 +Merged from 3.0:
+  * StreamingHistogram is not thread safe (CASSANDRA-13756)
   * Fix MV timestamp issues (CASSANDRA-11500)
 - * Better tolerate improperly formatted bcrypt hashes (CASSANDRA-13626) 
 + * Better tolerate improperly formatted bcrypt hashes (CASSANDRA-13626)
   * Fix race condition in read command serialization (CASSANDRA-13363)
 - * Enable segement creation before recovering commitlogs (CASSANDRA-13587)
   * Fix AssertionError in short read protection (CASSANDRA-13747)
   * Don't skip corrupted sstables on startup (CASSANDRA-13620)
   * Fix the merging of cells with different user type versions (CASSANDRA-13776)

http://git-wip-us.apache.org/repos/asf/cassandra/blob/d44a0d25/src/java/org/apache/cassandra/db/ColumnFamilyStore.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/cassandra/blob/d44a0d25/src/java/org/apache/cassandra/io/sstable/metadata/MetadataCollector.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/cassandra/blob/d44a0d25/src/java/org/apache/cassandra/utils/StreamingHistogram.java
----------------------------------------------------------------------
diff --cc src/java/org/apache/cassandra/utils/StreamingHistogram.java
index 9114c7d,6500a1a..df49d8d
--- a/src/java/org/apache/cassandra/utils/StreamingHistogram.java
+++ b/src/java/org/apache/cassandra/utils/StreamingHistogram.java
@@@ -24,6 -24,6 +24,7 @@@ import com.google.common.base.Objects
  
  import org.apache.cassandra.db.TypeSizes;
  import org.apache.cassandra.io.ISerializer;
++import org.apache.cassandra.io.sstable.SSTable;
  import org.apache.cassandra.io.util.DataInputPlus;
  import org.apache.cassandra.io.util.DataOutputPlus;
  
@@@ -39,172 -39,15 +40,33 @@@ public class StreamingHistogra
      public static final StreamingHistogramSerializer serializer = new StreamingHistogramSerializer();
  
      // TreeMap to hold bins of histogram.
 -    private final TreeMap<Double, Long> bin;
 +    // The key is a numeric type so we can avoid boxing/unboxing streams of different key
types
 +    // The value is a unboxed long array always of length == 1
 +    // Serialized Histograms always writes with double keys for backwards compatibility
 +    private final TreeMap<Number, long[]> bin;
 +
-     // Keep a second, larger buffer to spool data in, before finalizing it into `bin`
-     private final TreeMap<Number, long[]> spool;
- 
 +    // maximum bin size for this histogram
      private final int maxBinSize;
  
-     // maximum size of the spool
-     private final int maxSpoolSize;
- 
-     // voluntarily give up resolution for speed
-     private final int roundSeconds;
 -    private StreamingHistogram(int maxBinSize, Map<Double, Long> bin)
 +
 +    /**
 +     * Creates a new histogram with max bin size of maxBinSize
 +     * @param maxBinSize maximum number of bins this histogram can have
++     * @param source the existing bins in map form
 +     */
-     public StreamingHistogram(int maxBinSize, int maxSpoolSize, int roundSeconds)
++    private StreamingHistogram(int maxBinSize, Map<Number, long[]> source)
      {
          this.maxBinSize = maxBinSize;
-         this.maxSpoolSize = maxSpoolSize;
-         this.roundSeconds = roundSeconds;
-         bin = new TreeMap<>((o1, o2) -> {
-             if (o1.getClass().equals(o2.getClass()))
-                 return ((Comparable)o1).compareTo(o2);
-             else
-             	return Double.compare(o1.doubleValue(), o2.doubleValue());
-         });
-         spool = new TreeMap<>((o1, o2) -> {
 -        this.bin = new TreeMap<>(bin);
++        this.bin = new TreeMap<>((o1, o2) -> {
 +            if (o1.getClass().equals(o2.getClass()))
 +                return ((Comparable)o1).compareTo(o2);
 +            else
 +                return Double.compare(o1.doubleValue(), o2.doubleValue());
 +        });
- 
-     }
- 
-     private StreamingHistogram(int maxBinSize, int maxSpoolSize,  int roundSeconds, Map<Double,
Long> bin)
-     {
-         this(maxBinSize, maxSpoolSize, roundSeconds);
-         for (Map.Entry<Double, Long> entry : bin.entrySet())
-             this.bin.put(entry.getKey(), new long[]{entry.getValue()});
-     }
- 
-     /**
-      * Adds new point p to this histogram.
-      * @param p
-      */
-     public void update(Number p)
-     {
-         update(p, 1L);
-     }
- 
-     /**
-      * Adds new point p with value m to this histogram.
-      * @param p
-      * @param m
-      */
-     public void update(Number p, long m)
-     {
-         Number d = p.longValue() % this.roundSeconds;
-         if (d.longValue() > 0)
-             p =p.longValue() + (this.roundSeconds - d.longValue());
- 
-         long[] mi = spool.get(p);
-         if (mi != null)
-         {
-             // we found the same p so increment that counter
-             mi[0] += m;
-         }
-         else
-         {
-             mi = new long[]{m};
-             spool.put(p, mi);
-         }
- 
-         // If spool has overflowed, compact it
-         if(spool.size() > maxSpoolSize)
-             flushHistogram();
-     }
- 
-     /**
-      * Drain the temporary spool into the final bins
-      */
-     public void flushHistogram()
-     {
-         if (spool.size() > 0)
-         {
-             long[] spoolValue;
-             long[] binValue;
- 
-             // Iterate over the spool, copying the value into the primary bin map
-             // and compacting that map as necessary
-             for (Map.Entry<Number, long[]> entry : spool.entrySet())
-             {
-                 Number key = entry.getKey();
-                 spoolValue = entry.getValue();
-                 binValue = bin.get(key);
- 
-                 // If this value is already in the final histogram bins
-                 // Simply increment and update, otherwise, insert a new long[1] value
-                 if(binValue != null)
-                 {
-                     binValue[0] += spoolValue[0];
-                     bin.put(key, binValue);
-                 }
-                 else
-                 {
-                     bin.put(key, new long[]{spoolValue[0]});
-                 }
- 
-                 if (bin.size() > maxBinSize)
-                 {
-                     // find points p1, p2 which have smallest difference
-                     Iterator<Number> keys = bin.keySet().iterator();
-                     double p1 = keys.next().doubleValue();
-                     double p2 = keys.next().doubleValue();
-                     double smallestDiff = p2 - p1;
-                     double q1 = p1, q2 = p2;
-                     while (keys.hasNext())
-                     {
-                         p1 = p2;
-                         p2 = keys.next().doubleValue();
-                         double diff = p2 - p1;
-                         if (diff < smallestDiff)
-                         {
-                             smallestDiff = diff;
-                             q1 = p1;
-                             q2 = p2;
-                         }
-                     }
-                     // merge those two
-                     long[] a1 = bin.remove(q1);
-                     long[] a2 = bin.remove(q2);
-                     long k1 = a1[0];
-                     long k2 = a2[0];
- 
-                     a1[0] += k2;
-                     bin.put((q1 * k1 + q2 * k2) / (k1 + k2), a1);
- 
-                 }
-             }
-             spool.clear();
-         }
-     }
- 
-     /**
-      * Merges given histogram with this histogram.
-      *
-      * @param other histogram to merge
-      */
-     public void merge(StreamingHistogram other)
-     {
-         if (other == null)
-             return;
- 
-         other.flushHistogram();
- 
-         for (Map.Entry<Number, long[]> entry : other.getAsMap().entrySet())
-             update(entry.getKey(), entry.getValue()[0]);
++        for (Map.Entry<Number, long[]> entry : source.entrySet())
++            this.bin.put(entry.getKey(), new long[]{entry.getValue()[0]});
      }
--
++    
      /**
       * Calculates estimated number of points in interval [-inf,b].
       *
@@@ -213,10 -56,9 +75,9 @@@
       */
      public double sum(double b)
      {
-         flushHistogram();
          double sum = 0;
          // find the points pi, pnext which satisfy pi <= b < pnext
 -        Map.Entry<Double, Long> pnext = bin.higherEntry(b);
 +        Map.Entry<Number, long[]> pnext = bin.higherEntry(b);
          if (pnext == null)
          {
              // if b is greater than any key in this histogram,
@@@ -241,24 -83,163 +102,191 @@@
          return sum;
      }
  
 -    public Map<Double, Long> getAsMap()
 +    public Map<Number, long[]> getAsMap()
      {
-         flushHistogram();
          return Collections.unmodifiableMap(bin);
      }
  
+     public static class StreamingHistogramBuilder
+     {
+         // TreeMap to hold bins of histogram.
 -        private final TreeMap<Double, Long> bin;
++        // The key is a numeric type so we can avoid boxing/unboxing streams of different
key types
++        // The value is a unboxed long array always of length == 1
++        // Serialized Histograms always writes with double keys for backwards compatibility
++        private final TreeMap<Number, long[]> bin;
+ 
+         // Keep a second, larger buffer to spool data in, before finalizing it into `bin`
 -        private final TreeMap<Double, Long> spool;
++        private final TreeMap<Number, long[]> spool;
+ 
+         // maximum bin size for this histogram
+         private final int maxBinSize;
+ 
+         // maximum size of the spool
+         private final int maxSpoolSize;
+ 
+         // voluntarily give up resolution for speed
+         private final int roundSeconds;
++
+         /**
+          * Creates a new histogram with max bin size of maxBinSize
+          * @param maxBinSize maximum number of bins this histogram can have
+          */
+         public StreamingHistogramBuilder(int maxBinSize, int maxSpoolSize, int roundSeconds)
+         {
+             this.maxBinSize = maxBinSize;
+             this.maxSpoolSize = maxSpoolSize;
+             this.roundSeconds = roundSeconds;
 -            bin = new TreeMap<>();
 -            spool = new TreeMap<>();
++            bin = new TreeMap<>((o1, o2) -> {
++                if (o1.getClass().equals(o2.getClass()))
++                    return ((Comparable)o1).compareTo(o2);
++                else
++                    return Double.compare(o1.doubleValue(), o2.doubleValue());
++            });
++            spool = new TreeMap<>((o1, o2) -> {
++                if (o1.getClass().equals(o2.getClass()))
++                    return ((Comparable)o1).compareTo(o2);
++                else
++                    return Double.compare(o1.doubleValue(), o2.doubleValue());
++            });
++
+         }
+ 
+         public StreamingHistogram build()
+         {
+             flushHistogram();
 -            return new StreamingHistogram(maxBinSize, bin);
++            return new StreamingHistogram(maxBinSize,  bin);
+         }
++
+         /**
+          * Adds new point p to this histogram.
+          * @param p
+          */
 -        public void update(double p)
++        public void update(Number p)
+         {
 -            update(p, 1);
++            update(p, 1L);
+         }
+ 
+         /**
+          * Adds new point p with value m to this histogram.
+          * @param p
+          * @param m
+          */
 -        public void update(double p, long m)
++        public void update(Number p, long m)
+         {
 -            double d = p % this.roundSeconds;
 -            if (d > 0)
 -                p = p + (this.roundSeconds - d);
++            Number d = p.longValue() % this.roundSeconds;
++            if (d.longValue() > 0)
++                p =p.longValue() + (this.roundSeconds - d.longValue());
+ 
 -            Long mi = spool.get(p);
++            long[] mi = spool.get(p);
+             if (mi != null)
+             {
+                 // we found the same p so increment that counter
 -                spool.put(p, mi + m);
++                mi[0] += m;
+             }
+             else
+             {
 -                spool.put(p, m);
++                mi = new long[]{m};
++                spool.put(p, mi);
+             }
++
++            // If spool has overflowed, compact it
+             if(spool.size() > maxSpoolSize)
+                 flushHistogram();
+         }
+ 
++
++
+         /**
+          * Drain the temporary spool into the final bins
+          */
+         public void flushHistogram()
+         {
 -            if(spool.size() > 0)
++            if (spool.size() > 0)
+             {
 -                Long spoolValue;
 -                Long binValue;
++                long[] spoolValue;
++                long[] binValue;
+ 
+                 // Iterate over the spool, copying the value into the primary bin map
+                 // and compacting that map as necessary
 -                for (Map.Entry<Double, Long> entry : spool.entrySet())
++                for (Map.Entry<Number, long[]> entry : spool.entrySet())
+                 {
 -                    Double key = entry.getKey();
++                    Number key = entry.getKey();
+                     spoolValue = entry.getValue();
+                     binValue = bin.get(key);
+ 
 -                    if (binValue != null)
++                    // If this value is already in the final histogram bins
++                    // Simply increment and update, otherwise, insert a new long[1] value
++                    if(binValue != null)
+                     {
 -                        binValue += spoolValue;
++                        binValue[0] += spoolValue[0];
+                         bin.put(key, binValue);
 -                    } else
++                    }
++                    else
+                     {
 -                        bin.put(key, spoolValue);
++                        bin.put(key, new long[]{spoolValue[0]});
+                     }
+ 
 -                    // if bin size exceeds maximum bin size then trim down to max size
+                     if (bin.size() > maxBinSize)
+                     {
+                         // find points p1, p2 which have smallest difference
 -                        Iterator<Double> keys = bin.keySet().iterator();
 -                        double p1 = keys.next();
 -                        double p2 = keys.next();
++                        Iterator<Number> keys = bin.keySet().iterator();
++                        double p1 = keys.next().doubleValue();
++                        double p2 = keys.next().doubleValue();
+                         double smallestDiff = p2 - p1;
+                         double q1 = p1, q2 = p2;
 -                        while (keys.hasNext()) {
++                        while (keys.hasNext())
++                        {
+                             p1 = p2;
 -                            p2 = keys.next();
++                            p2 = keys.next().doubleValue();
+                             double diff = p2 - p1;
 -                            if (diff < smallestDiff) {
++                            if (diff < smallestDiff)
++                            {
+                                 smallestDiff = diff;
+                                 q1 = p1;
+                                 q2 = p2;
+                             }
+                         }
+                         // merge those two
 -                        long k1 = bin.remove(q1);
 -                        long k2 = bin.remove(q2);
 -                        bin.put((q1 * k1 + q2 * k2) / (k1 + k2), k1 + k2);
++                        long[] a1 = bin.remove(q1);
++                        long[] a2 = bin.remove(q2);
++                        long k1 = a1[0];
++                        long k2 = a2[0];
++
++                        a1[0] += k2;
++                        bin.put((q1 * k1 + q2 * k2) / (k1 + k2), a1);
++
+                     }
+                 }
+                 spool.clear();
+             }
+         }
+ 
+         /**
 -        * Merges given histogram with this histogram.
 -        *
 -        * @param other histogram to merge
 -        */
++         * Merges given histogram with this histogram.
++         *
++         * @param other histogram to merge
++         */
+         public void merge(StreamingHistogram other)
+         {
+             if (other == null)
+                 return;
+ 
 -            flushHistogram();
 -
 -            for (Map.Entry<Double, Long> entry : other.getAsMap().entrySet())
 -                update(entry.getKey(), entry.getValue());
++            for (Map.Entry<Number, long[]> entry : other.getAsMap().entrySet())
++                update(entry.getKey(), entry.getValue()[0]);
+         }
+     }
+ 
      public static class StreamingHistogramSerializer implements ISerializer<StreamingHistogram>
      {
          public void serialize(StreamingHistogram histogram, DataOutputPlus out) throws IOException
          {
-             histogram.flushHistogram();
              out.writeInt(histogram.maxBinSize);
 -            Map<Double, Long> entries = histogram.getAsMap();
 +            Map<Number, long[]> entries = histogram.getAsMap();
              out.writeInt(entries.size());
 -            for (Map.Entry<Double, Long> entry : entries.entrySet())
 +            for (Map.Entry<Number, long[]> entry : entries.entrySet())
              {
 -                out.writeDouble(entry.getKey());
 -                out.writeLong(entry.getValue());
 +                out.writeDouble(entry.getKey().doubleValue());
 +                out.writeLong(entry.getValue()[0]);
              }
          }
  
@@@ -266,13 -247,13 +294,13 @@@
          {
              int maxBinSize = in.readInt();
              int size = in.readInt();
--            Map<Double, Long> tmp = new HashMap<>(size);
++            Map<Number, long[]> tmp = new HashMap<>(size);
              for (int i = 0; i < size; i++)
              {
--                tmp.put(in.readDouble(), in.readLong());
++                tmp.put(in.readDouble(), new long[]{in.readLong()});
              }
  
-             return new StreamingHistogram(maxBinSize, maxBinSize, 1, tmp);
+             return new StreamingHistogram(maxBinSize, tmp);
          }
  
          public long serializedSize(StreamingHistogram histogram)
@@@ -296,15 -277,13 +324,14 @@@
              return false;
  
          StreamingHistogram that = (StreamingHistogram) o;
 -        return maxBinSize == that.maxBinSize
 -               && bin.equals(that.bin);
 +        return maxBinSize == that.maxBinSize &&
-                spool.equals(that.spool) &&
 +               bin.equals(that.bin);
      }
  
      @Override
      public int hashCode()
      {
-         return Objects.hashCode(bin.hashCode(), spool.hashCode(), maxBinSize);
+         return Objects.hashCode(bin.hashCode(), maxBinSize);
      }
 +
  }

http://git-wip-us.apache.org/repos/asf/cassandra/blob/d44a0d25/test/unit/org/apache/cassandra/utils/StreamingHistogramTest.java
----------------------------------------------------------------------
diff --cc test/unit/org/apache/cassandra/utils/StreamingHistogramTest.java
index f64cbd9,b107600..dcb6703
--- a/test/unit/org/apache/cassandra/utils/StreamingHistogramTest.java
+++ b/test/unit/org/apache/cassandra/utils/StreamingHistogramTest.java
@@@ -33,13 -32,13 +33,13 @@@ public class StreamingHistogramTes
      @Test
      public void testFunction() throws Exception
      {
-         StreamingHistogram hist = new StreamingHistogram(5, 0, 1);
 -        StreamingHistogram.StreamingHistogramBuilder hist = new StreamingHistogram.StreamingHistogramBuilder(5,
5, 1);
++        StreamingHistogram.StreamingHistogramBuilder histogramBuilder = new StreamingHistogram.StreamingHistogramBuilder(5,
0, 1);
          long[] samples = new long[]{23, 19, 10, 16, 36, 2, 9, 32, 30, 45};
  
          // add 7 points to histogram of 5 bins
          for (int i = 0; i < 7; i++)
          {
--            hist.update(samples[i]);
++            histogramBuilder.update(samples[i]);
          }
  
          // should end up (2,1),(9.5,2),(17.5,2),(23,1),(36,1)
@@@ -51,20 -50,21 +51,20 @@@
          expected1.put(36.0, 1L);
  
          Iterator<Map.Entry<Double, Long>> expectedItr = expected1.entrySet().iterator();
-         for (Map.Entry<Number, long[]> actual : hist.getAsMap().entrySet())
 -        for (Map.Entry<Double, Long> actual : hist.build().getAsMap().entrySet())
++        for (Map.Entry<Number, long[]> actual : histogramBuilder.build().getAsMap().entrySet())
          {
              Map.Entry<Double, Long> entry = expectedItr.next();
 -            assertEquals(entry.getKey(), actual.getKey(), 0.01);
 -            assertEquals(entry.getValue(), actual.getValue());
 +            assertEquals(entry.getKey(), actual.getKey().doubleValue(), 0.01);
 +            assertEquals(entry.getValue().longValue(), actual.getValue()[0]);
          }
  
          // merge test
-         StreamingHistogram hist2 = new StreamingHistogram(3, 3, 1);
 -        StreamingHistogram.StreamingHistogramBuilder hist2 = new StreamingHistogram.StreamingHistogramBuilder(3,
0, 1);
++        StreamingHistogram.StreamingHistogramBuilder hist2 = new StreamingHistogram.StreamingHistogramBuilder(3,
3, 1);
          for (int i = 7; i < samples.length; i++)
          {
              hist2.update(samples[i]);
          }
-         hist.merge(hist2);
 -        hist.merge(hist2.build());
 -        StreamingHistogram histBuilt = hist.build();
++        histogramBuilder.merge(hist2.build());
          // should end up (2,1),(9.5,2),(19.33,3),(32.67,3),(45,1)
          Map<Double, Long> expected2 = new LinkedHashMap<Double, Long>(5);
          expected2.put(2.0, 1L);
@@@ -73,11 -73,11 +73,12 @@@
          expected2.put(32.67, 3L);
          expected2.put(45.0, 1L);
          expectedItr = expected2.entrySet().iterator();
 -        for (Map.Entry<Double, Long> actual : histBuilt.getAsMap().entrySet())
++        StreamingHistogram hist = histogramBuilder.build();
 +        for (Map.Entry<Number, long[]> actual : hist.getAsMap().entrySet())
          {
              Map.Entry<Double, Long> entry = expectedItr.next();
 -            assertEquals(entry.getKey(), actual.getKey(), 0.01);
 -            assertEquals(entry.getValue(), actual.getValue());
 +            assertEquals(entry.getKey(), actual.getKey().doubleValue(), 0.01);
 +            assertEquals(entry.getValue().longValue(), actual.getValue()[0]);
          }
  
          // sum test
@@@ -89,17 -89,18 +90,17 @@@
      @Test
      public void testSerDe() throws Exception
      {
-         StreamingHistogram hist = new StreamingHistogram(5, 0, 1);
 -        StreamingHistogram.StreamingHistogramBuilder histogramBuilder = new StreamingHistogram.StreamingHistogramBuilder(5,
0, 1);
++        StreamingHistogram.StreamingHistogramBuilder hist = new StreamingHistogram.StreamingHistogramBuilder(5,
0, 1);
          long[] samples = new long[]{23, 19, 10, 16, 36, 2, 9};
  
          // add 7 points to histogram of 5 bins
          for (int i = 0; i < samples.length; i++)
          {
 -            histogramBuilder.update(samples[i]);
 +            hist.update(samples[i]);
          }
 -        StreamingHistogram hist = histogramBuilder.build();
  
          DataOutputBuffer out = new DataOutputBuffer();
--        StreamingHistogram.serializer.serialize(hist, out);
++        StreamingHistogram.serializer.serialize(hist.build(), out);
          byte[] bytes = out.toByteArray();
  
          StreamingHistogram deserialized = StreamingHistogram.serializer.deserialize(new
DataInputBuffer(bytes));
@@@ -121,64 -122,37 +122,68 @@@
          }
      }
  
 +
 +    @Test
 +    public void testNumericTypes() throws Exception
 +    {
-         StreamingHistogram hist = new StreamingHistogram(5, 0, 1);
++        StreamingHistogram.StreamingHistogramBuilder hist = new StreamingHistogram.StreamingHistogramBuilder(5,
0, 1);
 +
 +        hist.update(2);
 +        hist.update(2.0);
 +        hist.update(2L);
 +
-         Map<Number, long[]> asMap = hist.getAsMap();
++        Map<Number, long[]> asMap = hist.build().getAsMap();
 +
 +        assertEquals(1, asMap.size());
 +        assertEquals(3L, asMap.get(2)[0]);
 +
 +        //Make sure it's working with Serde
 +        DataOutputBuffer out = new DataOutputBuffer();
-         StreamingHistogram.serializer.serialize(hist, out);
++        StreamingHistogram.serializer.serialize(hist.build(), out);
 +        byte[] bytes = out.toByteArray();
 +
 +        StreamingHistogram deserialized = StreamingHistogram.serializer.deserialize(new
DataInputBuffer(bytes));
 +
-         deserialized.update(2L);
++        StreamingHistogram.StreamingHistogramBuilder hist2Builder = new StreamingHistogram.StreamingHistogramBuilder(5,
0, 1);
++        hist2Builder.merge(deserialized);
++        hist2Builder.update(2L);
 +
-         asMap = deserialized.getAsMap();
++        asMap = hist2Builder.build().getAsMap();
 +        assertEquals(1, asMap.size());
 +        assertEquals(4L, asMap.get(2)[0]);
 +    }
 +
      @Test
      public void testOverflow() throws Exception
      {
-         StreamingHistogram hist = new StreamingHistogram(5, 10, 1);
 -        StreamingHistogram.StreamingHistogramBuilder histogramBuilder = new StreamingHistogram.StreamingHistogramBuilder(5,
10, 1);
++        StreamingHistogram.StreamingHistogramBuilder hist = new StreamingHistogram.StreamingHistogramBuilder(5,
10, 1);
          long[] samples = new long[]{23, 19, 10, 16, 36, 2, 9, 32, 30, 45, 31,
 -                32, 32, 33, 34, 35, 70, 78, 80, 90, 100,
 -                32, 32, 33, 34, 35, 70, 78, 80, 90, 100
 -        };
 +                                    32, 32, 33, 34, 35, 70, 78, 80, 90, 100,
 +                                    32, 32, 33, 34, 35, 70, 78, 80, 90, 100
 +                                    };
  
          // Hit the spool cap, force it to make bins
          for (int i = 0; i < samples.length; i++)
          {
 -            histogramBuilder.update(samples[i]);
 +            hist.update(samples[i]);
          }
- 
-         assertEquals(5, hist.getAsMap().keySet().size());
 -        assertEquals(5, histogramBuilder.build().getAsMap().keySet().size());
++        StreamingHistogram histogram = hist.build();
++        assertEquals(5, histogram.getAsMap().keySet().size());
  
      }
  
      @Test
      public void testRounding() throws Exception
      {
-         StreamingHistogram hist = new StreamingHistogram(5, 10, 60);
 -        StreamingHistogram.StreamingHistogramBuilder histogramBuilder = new StreamingHistogram.StreamingHistogramBuilder(5,
10, 60);
++        StreamingHistogram.StreamingHistogramBuilder hist = new StreamingHistogram.StreamingHistogramBuilder(5,
10, 60);
          long[] samples = new long[] { 59, 60, 119, 180, 181, 300 }; // 60, 60, 120, 180,
240, 300
          for (int i = 0 ; i < samples.length ; i++)
 -            histogramBuilder.update(samples[i]);
 +            hist.update(samples[i]);
-         assertEquals(hist.getAsMap().keySet().size(), 5);
-         assertEquals(hist.getAsMap().get(60)[0], 2);
-         assertEquals(hist.getAsMap().get(120)[0], 1);
+ 
 -        StreamingHistogram hist = histogramBuilder.build();
 -        assertEquals(hist.getAsMap().keySet().size(), (int) 5);
 -        assertEquals((long) hist.getAsMap().get(Double.valueOf(60)), (long) 2L);
 -        assertEquals((long) hist.getAsMap().get(Double.valueOf(120)), (long) 1L);
++        StreamingHistogram histogram = hist.build();
++        assertEquals(histogram.getAsMap().keySet().size(), 5);
++        assertEquals(histogram.getAsMap().get(60)[0], 2);
++        assertEquals(histogram.getAsMap().get(120)[0], 1);
  
      }
 -
  }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@cassandra.apache.org
For additional commands, e-mail: commits-help@cassandra.apache.org


Mime
View raw message